From 7698ab7bfc83305da7b3d7e0df3b5a52aa7e1266 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Tue, 17 Nov 2015 13:40:10 +0100 Subject: [PATCH 001/320] Fix typos in query dsl docs. When passing the example json snippets through the query parser while working on #14249 some of the examples could not be parsed. This PR fixes those examples. Relates to #14249 --- docs/reference/query-dsl/bool-query.asciidoc | 2 +- .../query-dsl/geo-bounding-box-query.asciidoc | 8 ++++---- docs/reference/query-dsl/has-child-query.asciidoc | 10 +++++----- docs/reference/query-dsl/mlt-query.asciidoc | 1 - 4 files changed, 10 insertions(+), 11 deletions(-) diff --git a/docs/reference/query-dsl/bool-query.asciidoc b/docs/reference/query-dsl/bool-query.asciidoc index 8f2fdb0c99ee..17bf74df1e12 100644 --- a/docs/reference/query-dsl/bool-query.asciidoc +++ b/docs/reference/query-dsl/bool-query.asciidoc @@ -51,7 +51,7 @@ final `_score` for each document. }, "filter": { "term" : { "tag" : "tech" } - } + }, "must_not" : { "range" : { "age" : { "from" : 10, "to" : 20 } diff --git a/docs/reference/query-dsl/geo-bounding-box-query.asciidoc b/docs/reference/query-dsl/geo-bounding-box-query.asciidoc index c52bcb93e7da..f751e83fc805 100644 --- a/docs/reference/query-dsl/geo-bounding-box-query.asciidoc +++ b/docs/reference/query-dsl/geo-bounding-box-query.asciidoc @@ -188,10 +188,10 @@ values separately. "filter" : { "geo_bounding_box" : { "pin.location" : { - "top" : -74.1, - "left" : 40.73, - "bottom" : -71.12, - "right" : 40.01 + "top" : 40.73, + "left" : -74.1, + "bottom" : 40.01, + "right" : -71.12 } } } diff --git a/docs/reference/query-dsl/has-child-query.asciidoc b/docs/reference/query-dsl/has-child-query.asciidoc index 24951bbe9302..f65434242ea7 100644 --- a/docs/reference/query-dsl/has-child-query.asciidoc +++ b/docs/reference/query-dsl/has-child-query.asciidoc @@ -9,7 +9,7 @@ an example: -------------------------------------------------- { "has_child" : { - "type" : "blog_tag", + "child_type" : "blog_tag", "query" : { "term" : { "tag" : "something" @@ -34,8 +34,8 @@ inside the `has_child` query: -------------------------------------------------- { "has_child" : { - "type" : "blog_tag", - "score_mode" : "sum", + "child_type" : "blog_tag", + "score_mode" : "min", "query" : { "term" : { "tag" : "something" @@ -56,8 +56,8 @@ a match: -------------------------------------------------- { "has_child" : { - "type" : "blog_tag", - "score_mode" : "sum", + "child_type" : "blog_tag", + "score_mode" : "min", "min_children": 2, <1> "max_children": 10, <1> "query" : { diff --git a/docs/reference/query-dsl/mlt-query.asciidoc b/docs/reference/query-dsl/mlt-query.asciidoc index ee4b695c2ffb..ce2d34144ee9 100644 --- a/docs/reference/query-dsl/mlt-query.asciidoc +++ b/docs/reference/query-dsl/mlt-query.asciidoc @@ -73,7 +73,6 @@ present in the index, the syntax is similar to < Date: Thu, 19 Nov 2015 14:47:01 +0100 Subject: [PATCH 002/320] Revert back to type instead of child_type... for has child queries. Relates to #14249 --- docs/reference/query-dsl/has-child-query.asciidoc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/reference/query-dsl/has-child-query.asciidoc b/docs/reference/query-dsl/has-child-query.asciidoc index f65434242ea7..5ffdb4a2b8d2 100644 --- a/docs/reference/query-dsl/has-child-query.asciidoc +++ b/docs/reference/query-dsl/has-child-query.asciidoc @@ -9,7 +9,7 @@ an example: -------------------------------------------------- { "has_child" : { - "child_type" : "blog_tag", + "type" : "blog_tag", "query" : { "term" : { "tag" : "something" @@ -34,7 +34,7 @@ inside the `has_child` query: -------------------------------------------------- { "has_child" : { - "child_type" : "blog_tag", + "type" : "blog_tag", "score_mode" : "min", "query" : { "term" : { @@ -56,7 +56,7 @@ a match: -------------------------------------------------- { "has_child" : { - "child_type" : "blog_tag", + "type" : "blog_tag", "score_mode" : "min", "min_children": 2, <1> "max_children": 10, <1> From f623c88d9943057e7ab0b0abe9e2cfab4276c89a Mon Sep 17 00:00:00 2001 From: Rhommel Lamas Date: Wed, 2 Dec 2015 16:37:30 +1100 Subject: [PATCH 003/320] Enable ES_INCLUDE --- distribution/deb/src/main/packaging/init.d/elasticsearch | 1 + 1 file changed, 1 insertion(+) diff --git a/distribution/deb/src/main/packaging/init.d/elasticsearch b/distribution/deb/src/main/packaging/init.d/elasticsearch index 9f1f1479a51d..b6cef82f5f87 100755 --- a/distribution/deb/src/main/packaging/init.d/elasticsearch +++ b/distribution/deb/src/main/packaging/init.d/elasticsearch @@ -107,6 +107,7 @@ export ES_DIRECT_SIZE export ES_JAVA_OPTS export ES_GC_LOG_FILE export JAVA_HOME +export ES_INCLUDE # Check DAEMON exists test -x $DAEMON || exit 0 From c00c0fa020a2ab3391a38312aba898db517f9f47 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 26 Jan 2016 09:59:11 +0100 Subject: [PATCH 004/320] Initial refactoring for phrase suggester Adding initial serialization methods (readFrom, writeTo) to the PhraseSuggestionBuilder, also adding the base test framework for serialiazation testing, equals and hashCode. Moving SuggestionBuilder out of the global SuggestBuilder for better readability. --- .../action/suggest/SuggestRequest.java | 5 +- .../action/suggest/SuggestRequestBuilder.java | 4 +- .../common/io/stream/StreamInput.java | 16 + .../common/io/stream/StreamOutput.java | 17 + .../search/suggest/SuggestBuilder.java | 131 +------- .../search/suggest/SuggestionBuilder.java | 299 ++++++++++++++++++ .../CompletionSuggestionBuilder.java | 47 ++- .../phrase/PhraseSuggestionBuilder.java | 176 ++++++++++- .../suggest/term/TermSuggestionBuilder.java | 39 ++- .../AbstractSuggestionBuilderTestCase.java | 186 +++++++++++ .../suggest/CompletionSuggestSearchIT.java | 3 +- .../ContextCompletionSuggestSearchIT.java | 2 +- .../suggest/CustomSuggesterSearchIT.java | 61 +++- .../phrase/PhraseSuggestionBuilderTests.java | 114 +++++++ .../messy/tests/SuggestSearchTests.java | 74 ++--- 15 files changed, 982 insertions(+), 192 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java create mode 100644 core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java create mode 100644 core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java diff --git a/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java b/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java index 0d1c4932d483..5dcb39fa14bb 100644 --- a/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java +++ b/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.search.suggest.SuggestBuilder; +import org.elasticsearch.search.suggest.SuggestionBuilder; import java.io.IOException; import java.util.Arrays; @@ -99,10 +100,10 @@ public final class SuggestRequest extends BroadcastRequest { } /** - * set a new source using a {@link org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder} + * set a new source using a {@link org.elasticsearch.search.suggest.SuggestionBuilder} * for completion suggestion lookup */ - public SuggestRequest suggest(SuggestBuilder.SuggestionBuilder suggestionBuilder) { + public SuggestRequest suggest(SuggestionBuilder suggestionBuilder) { return suggest(suggestionBuilder.buildAsBytes(Requests.CONTENT_TYPE)); } diff --git a/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequestBuilder.java index 06a2b00c648d..d9f957aa2b15 100644 --- a/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequestBuilder.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.search.suggest.SuggestBuilder; -import org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder; +import org.elasticsearch.search.suggest.SuggestionBuilder; import java.io.IOException; @@ -45,7 +45,7 @@ public class SuggestRequestBuilder extends BroadcastOperationRequestBuilder SuggestRequestBuilder addSuggestion(SuggestionBuilder suggestion) { + public SuggestRequestBuilder addSuggestion(SuggestionBuilder suggestion) { suggest.addSuggestion(suggestion); return this; } diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index 02e937dbd838..3f44b61f2126 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.text.Text; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.search.rescore.RescoreBuilder; +import org.elasticsearch.search.suggest.SuggestionBuilder; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -281,6 +282,14 @@ public abstract class StreamInput extends InputStream { return null; } + @Nullable + public Float readOptionalFloat() throws IOException { + if (readBoolean()) { + return readFloat(); + } + return null; + } + @Nullable public Integer readOptionalVInt() throws IOException { if (readBoolean()) { @@ -683,6 +692,13 @@ public abstract class StreamInput extends InputStream { return readNamedWriteable(RescoreBuilder.class); } + /** + * Reads a {@link SuggestionBuilder} from the current stream + */ + public SuggestionBuilder readSuggestion() throws IOException { + return readNamedWriteable(SuggestionBuilder.class); + } + /** * Reads a {@link org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder} from the current stream */ diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index 0863717a5ab9..5e0af597b09b 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.text.Text; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.search.rescore.RescoreBuilder; +import org.elasticsearch.search.suggest.SuggestionBuilder; import org.joda.time.ReadableInstant; import java.io.EOFException; @@ -230,6 +231,15 @@ public abstract class StreamOutput extends OutputStream { } } + public void writeOptionalFloat(@Nullable Float floatValue) throws IOException { + if (floatValue == null) { + writeBoolean(false); + } else { + writeBoolean(true); + writeFloat(floatValue); + } + } + public void writeOptionalText(@Nullable Text text) throws IOException { if (text == null) { writeInt(-1); @@ -684,4 +694,11 @@ public abstract class StreamOutput extends OutputStream { public void writeRescorer(RescoreBuilder rescorer) throws IOException { writeNamedWriteable(rescorer); } + + /** + * Writes a {@link SuggestionBuilder} to the current stream + */ + public void writeSuggestion(SuggestionBuilder suggestion) throws IOException { + writeNamedWriteable(suggestion); + } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java index 5621e03e7def..92661b21f18e 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java @@ -42,14 +42,14 @@ public class SuggestBuilder extends ToXContentToBytes { public SuggestBuilder() { this.name = null; } - + public SuggestBuilder(String name) { this.name = name; } - + /** * Sets the text to provide suggestions for. The suggest text is a required option that needs - * to be set either via this setter or via the {@link org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder#setText(String)} method. + * to be set either via this setter or via the {@link org.elasticsearch.search.suggest.SuggestionBuilder#text(String)} method. *

* The suggest text gets analyzed by the suggest analyzer or the suggest field search analyzer. * For each analyzed token, suggested terms are suggested if possible. @@ -67,7 +67,7 @@ public class SuggestBuilder extends ToXContentToBytes { suggestions.add(suggestion); return this; } - + /** * Returns all suggestions with the defined names. */ @@ -82,7 +82,7 @@ public class SuggestBuilder extends ToXContentToBytes { } else { builder.startObject(name); } - + if (globalText != null) { builder.field("text", globalText); } @@ -92,125 +92,4 @@ public class SuggestBuilder extends ToXContentToBytes { builder.endObject(); return builder; } - - public static abstract class SuggestionBuilder extends ToXContentToBytes { - - private String name; - private String suggester; - private String text; - private String prefix; - private String regex; - private String field; - private String analyzer; - private Integer size; - private Integer shardSize; - - public SuggestionBuilder(String name, String suggester) { - this.name = name; - this.suggester = suggester; - } - - /** - * Same as in {@link SuggestBuilder#setText(String)}, but in the suggestion scope. - */ - @SuppressWarnings("unchecked") - public T text(String text) { - this.text = text; - return (T) this; - } - - protected void setPrefix(String prefix) { - this.prefix = prefix; - } - - protected void setRegex(String regex) { - this.regex = regex; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(name); - if (text != null) { - builder.field("text", text); - } - if (prefix != null) { - builder.field("prefix", prefix); - } - if (regex != null) { - builder.field("regex", regex); - } - builder.startObject(suggester); - if (analyzer != null) { - builder.field("analyzer", analyzer); - } - if (field != null) { - builder.field("field", field); - } - if (size != null) { - builder.field("size", size); - } - if (shardSize != null) { - builder.field("shard_size", shardSize); - } - - builder = innerToXContent(builder, params); - builder.endObject(); - builder.endObject(); - return builder; - } - - protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException; - - /** - * Sets from what field to fetch the candidate suggestions from. This is an - * required option and needs to be set via this setter or - * {@link org.elasticsearch.search.suggest.term.TermSuggestionBuilder#field(String)} - * method - */ - @SuppressWarnings("unchecked") - public T field(String field) { - this.field = field; - return (T)this; - } - - /** - * Sets the analyzer to analyse to suggest text with. Defaults to the search - * analyzer of the suggest field. - */ - @SuppressWarnings("unchecked") - public T analyzer(String analyzer) { - this.analyzer = analyzer; - return (T)this; - } - - /** - * Sets the maximum suggestions to be returned per suggest text term. - */ - @SuppressWarnings("unchecked") - public T size(int size) { - if (size <= 0) { - throw new IllegalArgumentException("Size must be positive"); - } - this.size = size; - return (T)this; - } - - /** - * Sets the maximum number of suggested term to be retrieved from each - * individual shard. During the reduce phase the only the top N suggestions - * are returned based on the size option. Defaults to the - * size option. - *

- * Setting this to a value higher than the `size` can be useful in order to - * get a more accurate document frequency for suggested terms. Due to the - * fact that terms are partitioned amongst shards, the shard level document - * frequencies of suggestions may not be precise. Increasing this will make - * these document frequencies more precise. - */ - @SuppressWarnings("unchecked") - public T shardSize(Integer shardSize) { - this.shardSize = shardSize; - return (T)this; - } - } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java new file mode 100644 index 000000000000..7705f2201d1c --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java @@ -0,0 +1,299 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.suggest; + +import org.elasticsearch.action.support.ToXContentToBytes; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +/** + * Base class for the different suggestion implementations. + */ +public abstract class SuggestionBuilder> extends ToXContentToBytes implements NamedWriteable { + + protected final String name; + // TODO this seems mandatory and should be constructor arg + protected String fieldname; + protected String text; + protected String prefix; + protected String regex; + protected String analyzer; + protected Integer size; + protected Integer shardSize; + + protected static final ParseField TEXT_FIELD = new ParseField("text"); + protected static final ParseField PREFIX_FIELD = new ParseField("prefix"); + protected static final ParseField REGEX_FIELD = new ParseField("regex"); + protected static final ParseField FIELDNAME_FIELD = new ParseField("field"); + protected static final ParseField ANALYZER_FIELD = new ParseField("analyzer"); + protected static final ParseField SIZE_FIELD = new ParseField("size"); + protected static final ParseField SHARDSIZE_FIELD = new ParseField("shard_size"); + + public SuggestionBuilder(String name) { + this.name = name; + } + + /** + * get the name for this suggestion + */ + public String name() { + return this.name; + } + + /** + * Same as in {@link SuggestBuilder#setText(String)}, but in the suggestion scope. + */ + @SuppressWarnings("unchecked") + public T text(String text) { + this.text = text; + return (T) this; + } + + /** + * get the text for this suggestion + */ + public String text() { + return this.text; + } + + @SuppressWarnings("unchecked") + protected T prefix(String prefix) { + this.prefix = prefix; + return (T) this; + } + + /** + * get the prefix for this suggestion + */ + public String prefix() { + return this.prefix; + } + + @SuppressWarnings("unchecked") + protected T regex(String regex) { + this.regex = regex; + return (T) this; + } + + /** + * get the regex for this suggestion + */ + public String regex() { + return this.regex; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(name); + if (text != null) { + builder.field(TEXT_FIELD.getPreferredName(), text); + } + if (prefix != null) { + builder.field(PREFIX_FIELD.getPreferredName(), prefix); + } + if (regex != null) { + builder.field(REGEX_FIELD.getPreferredName(), regex); + } + builder.startObject(getSuggesterName()); + if (analyzer != null) { + builder.field(ANALYZER_FIELD.getPreferredName(), analyzer); + } + if (fieldname != null) { + builder.field(FIELDNAME_FIELD.getPreferredName(), fieldname); + } + if (size != null) { + builder.field(SIZE_FIELD.getPreferredName(), size); + } + if (shardSize != null) { + builder.field(SHARDSIZE_FIELD.getPreferredName(), shardSize); + } + + builder = innerToXContent(builder, params); + builder.endObject(); + builder.endObject(); + return builder; + } + + private String getSuggesterName() { + //default impl returns the same as writeable name, but we keep the distinction between the two just to make sure + return getWriteableName(); + } + + protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException; + + /** + * Sets from what field to fetch the candidate suggestions from. This is an + * required option and needs to be set via this setter or + * {@link org.elasticsearch.search.suggest.term.TermSuggestionBuilder#field(String)} + * method + */ + @SuppressWarnings("unchecked") + public T field(String field) { + this.fieldname = field; + return (T)this; + } + + /** + * get the {@link #field()} parameter + */ + public String field() { + return this.fieldname; + } + + /** + * Sets the analyzer to analyse to suggest text with. Defaults to the search + * analyzer of the suggest field. + */ + @SuppressWarnings("unchecked") + public T analyzer(String analyzer) { + this.analyzer = analyzer; + return (T)this; + } + + /** + * get the {@link #analyzer()} parameter + */ + public String analyzer() { + return this.analyzer; + } + + /** + * Sets the maximum suggestions to be returned per suggest text term. + */ + @SuppressWarnings("unchecked") + public T size(int size) { + if (size <= 0) { + throw new IllegalArgumentException("Size must be positive"); + } + this.size = size; + return (T)this; + } + + /** + * get the {@link #size()} parameter + */ + public Integer size() { + return this.size; + } + + /** + * Sets the maximum number of suggested term to be retrieved from each + * individual shard. During the reduce phase the only the top N suggestions + * are returned based on the size option. Defaults to the + * size option. + *

+ * Setting this to a value higher than the `size` can be useful in order to + * get a more accurate document frequency for suggested terms. Due to the + * fact that terms are partitioned amongst shards, the shard level document + * frequencies of suggestions may not be precise. Increasing this will make + * these document frequencies more precise. + */ + @SuppressWarnings("unchecked") + public T shardSize(Integer shardSize) { + this.shardSize = shardSize; + return (T)this; + } + + /** + * get the {@link #shardSize()} parameter + */ + public Integer shardSize() { + return this.shardSize; + } + + + @Override + public final T readFrom(StreamInput in) throws IOException { + String name = in.readString(); + T suggestionBuilder = doReadFrom(in, name); + suggestionBuilder.fieldname = in.readOptionalString(); + suggestionBuilder.text = in.readOptionalString(); + suggestionBuilder.prefix = in.readOptionalString(); + suggestionBuilder.regex = in.readOptionalString(); + suggestionBuilder.analyzer = in.readOptionalString(); + suggestionBuilder.size = in.readOptionalVInt(); + suggestionBuilder.shardSize = in.readOptionalVInt(); + return suggestionBuilder; + } + + /** + * Subclass should return a new instance, reading itself from the input string + * @param in the input string to read from + * @param name the name of the suggestion (read from stream by {@link SuggestionBuilder} + */ + protected abstract T doReadFrom(StreamInput in, String name) throws IOException; + + @Override + public final void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + doWriteTo(out); + out.writeOptionalString(fieldname); + out.writeOptionalString(text); + out.writeOptionalString(prefix); + out.writeOptionalString(regex); + out.writeOptionalString(analyzer); + out.writeOptionalVInt(size); + out.writeOptionalVInt(shardSize); + } + + protected abstract void doWriteTo(StreamOutput out) throws IOException; + + @Override + public final boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + @SuppressWarnings("unchecked") + T other = (T) obj; + return Objects.equals(name, other.name()) && + Objects.equals(text, other.text()) && + Objects.equals(prefix, other.prefix()) && + Objects.equals(regex, other.regex()) && + Objects.equals(fieldname, other.field()) && + Objects.equals(analyzer, other.analyzer()) && + Objects.equals(size, other.size()) && + Objects.equals(shardSize, other.shardSize()) && + doEquals(other); + } + + /** + * Indicates whether some other {@link SuggestionBuilder} of the same type is "equal to" this one. + */ + protected abstract boolean doEquals(T other); + + @Override + public final int hashCode() { + return Objects.hash(name, text, prefix, regex, fieldname, analyzer, size, shardSize, doHashCode()); + } + + /** + * HashCode for the subclass of {@link SuggestionBuilder} to implement. + */ + protected abstract int doHashCode(); +} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 9cf78ea66776..1b515e754093 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -22,11 +22,13 @@ import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.RegexpFlag; -import org.elasticsearch.search.suggest.SuggestBuilder; +import org.elasticsearch.search.suggest.SuggestionBuilder; import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; @@ -45,7 +47,7 @@ import java.util.Set; * are created at index-time and so must be defined in the mapping with the type "completion" before * indexing. */ -public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilder { +public class CompletionSuggestionBuilder extends SuggestionBuilder { final static String SUGGESTION_NAME = "completion"; static final ParseField PAYLOAD_FIELD = new ParseField("payload"); @@ -56,7 +58,7 @@ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilde private final Set payloadFields = new HashSet<>(); public CompletionSuggestionBuilder(String name) { - super(name, SUGGESTION_NAME); + super(name); } /** @@ -255,8 +257,9 @@ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilde * Sets the prefix to provide completions for. * The prefix gets analyzed by the suggest analyzer. */ + @Override public CompletionSuggestionBuilder prefix(String prefix) { - super.setPrefix(prefix); + super.prefix(prefix); return this; } @@ -264,7 +267,7 @@ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilde * Same as {@link #prefix(String)} with fuzziness of fuzziness */ public CompletionSuggestionBuilder prefix(String prefix, Fuzziness fuzziness) { - super.setPrefix(prefix); + super.prefix(prefix); this.fuzzyOptionsBuilder = new FuzzyOptionsBuilder().setFuzziness(fuzziness); return this; } @@ -274,7 +277,7 @@ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilde * see {@link FuzzyOptionsBuilder} */ public CompletionSuggestionBuilder prefix(String prefix, FuzzyOptionsBuilder fuzzyOptionsBuilder) { - super.setPrefix(prefix); + super.prefix(prefix); this.fuzzyOptionsBuilder = fuzzyOptionsBuilder; return this; } @@ -282,8 +285,9 @@ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilde /** * Sets a regular expression pattern for prefixes to provide completions for. */ + @Override public CompletionSuggestionBuilder regex(String regex) { - super.setRegex(regex); + super.regex(regex); return this; } @@ -362,4 +366,33 @@ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilde } return builder; } + + @Override + public String getWriteableName() { + return SUGGESTION_NAME; + } + + @Override + public void doWriteTo(StreamOutput out) throws IOException { + // NORELEASE + throw new UnsupportedOperationException(); + } + + @Override + public CompletionSuggestionBuilder doReadFrom(StreamInput in, String name) throws IOException { + // NORELEASE + throw new UnsupportedOperationException(); + } + + @Override + protected boolean doEquals(CompletionSuggestionBuilder other) { + // NORELEASE + return false; + } + + @Override + protected int doHashCode() { + // NORELEASE + return 0; + } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java index 1055fbe83fce..46c9b0f99f64 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java @@ -18,10 +18,12 @@ */ package org.elasticsearch.search.suggest.phrase; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.script.Template; -import org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder; +import org.elasticsearch.search.suggest.SuggestionBuilder; import java.io.IOException; import java.util.ArrayList; @@ -29,12 +31,18 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; import java.util.Set; /** * Defines the actual suggest command for phrase suggestions ( phrase). */ public final class PhraseSuggestionBuilder extends SuggestionBuilder { + + static final String SUGGESTION_NAME = "phrase"; + + public static final PhraseSuggestionBuilder PROTOTYPE = new PhraseSuggestionBuilder("_na_"); + private Float maxErrors; private String separator; private Float realWordErrorLikelihood; @@ -51,7 +59,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder0.95 corresponding to 5% or @@ -100,6 +129,13 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder collateParams() { + return this.collateParams; + } + /** * Sets whether to prune suggestions after collation */ @@ -205,6 +290,13 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder> generators = new HashMap<>(); + } + + @Override + public PhraseSuggestionBuilder doReadFrom(StreamInput in, String name) throws IOException { + PhraseSuggestionBuilder builder = new PhraseSuggestionBuilder(name); + builder.maxErrors = in.readOptionalFloat(); + builder.realWordErrorLikelihood = in.readOptionalFloat(); + builder.confidence = in.readOptionalFloat(); + builder.gramSize = in.readOptionalVInt(); + // NORELEASE read model + builder.forceUnigrams = in.readOptionalBoolean(); + builder.tokenLimit = in.readOptionalVInt(); + builder.preTag = in.readOptionalString(); + builder.postTag = in.readOptionalString(); + builder.separator = in.readOptionalString(); + if (in.readBoolean()) { + builder.collateQuery = Template.readTemplate(in); + } + builder.collateParams = in.readMap(); + builder.collatePrune = in.readOptionalBoolean(); + // NORELEASE read Map> generators; + return builder; + } + + @Override + protected boolean doEquals(PhraseSuggestionBuilder other) { + return Objects.equals(maxErrors, other.maxErrors) && + Objects.equals(separator, other.separator) && + Objects.equals(realWordErrorLikelihood, other.realWordErrorLikelihood) && + Objects.equals(confidence, other.confidence) && + // NORELEASE Objects.equals(generator, other.generator) && + Objects.equals(gramSize, other.gramSize) && + // NORELEASE Objects.equals(model, other.model) && + Objects.equals(forceUnigrams, other.forceUnigrams) && + Objects.equals(tokenLimit, other.tokenLimit) && + Objects.equals(preTag, other.preTag) && + Objects.equals(postTag, other.postTag) && + Objects.equals(collateQuery, other.collateQuery) && + Objects.equals(collateParams, other.collateParams) && + Objects.equals(collatePrune, other.collatePrune); + } + + @Override + protected int doHashCode() { + return Objects.hash(maxErrors, separator, realWordErrorLikelihood, confidence, + /** NORELEASE generators, */ + gramSize, + /** NORELEASE model, */ + forceUnigrams, tokenLimit, preTag, postTag, + collateQuery, collateParams, collatePrune); + } + } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java index 03eb388f0032..e2a14c1a2b2d 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java @@ -17,8 +17,10 @@ * under the License. */ package org.elasticsearch.search.suggest.term; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder; +import org.elasticsearch.search.suggest.SuggestionBuilder; import java.io.IOException; @@ -29,6 +31,8 @@ import java.io.IOException; */ public class TermSuggestionBuilder extends SuggestionBuilder { + static final String SUGGESTION_NAME = "term"; + private String suggestMode; private Float accuracy; private String sort; @@ -39,13 +43,13 @@ public class TermSuggestionBuilder extends SuggestionBuilder> extends ESTestCase { + + private static final int NUMBER_OF_TESTBUILDERS = 20; + private static NamedWriteableRegistry namedWriteableRegistry; + + /** + * setup for the whole base test class + */ + @BeforeClass + public static void init() { + namedWriteableRegistry = new NamedWriteableRegistry(); + namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, PhraseSuggestionBuilder.PROTOTYPE); + } + + @AfterClass + public static void afterClass() throws Exception { + namedWriteableRegistry = null; + } + + /** + * Test serialization and deserialization of the suggestion builder + */ + public void testSerialization() throws IOException { + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + SB original = randomTestBuilder(); + SB deserialized = serializedCopy(original); + assertEquals(deserialized, original); + assertEquals(deserialized.hashCode(), original.hashCode()); + assertNotSame(deserialized, original); + } + } + + /** + * returns a random suggestion builder, setting the common options randomly + */ + protected SB randomTestBuilder() { + SB randomSuggestion = randomSuggestionBuilder(); + maybeSet(randomSuggestion::text, randomAsciiOfLengthBetween(2, 20)); + maybeSet(randomSuggestion::prefix, randomAsciiOfLengthBetween(2, 20)); + maybeSet(randomSuggestion::regex, randomAsciiOfLengthBetween(2, 20)); + maybeSet(randomSuggestion::field, randomAsciiOfLengthBetween(2, 20)); + maybeSet(randomSuggestion::analyzer, randomAsciiOfLengthBetween(2, 20)); + maybeSet(randomSuggestion::size, randomIntBetween(1, 20)); + maybeSet(randomSuggestion::shardSize, randomInt(20)); + return randomSuggestion; + } + + /** + * create a randomized {@link SuggestBuilder} that is used in further tests + */ + protected abstract SB randomSuggestionBuilder(); + + /** + * Test equality and hashCode properties + */ + public void testEqualsAndHashcode() throws IOException { + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + SB firstBuilder = randomTestBuilder(); + assertFalse("suggestion builder is equal to null", firstBuilder.equals(null)); + assertFalse("suggestion builder is equal to incompatible type", firstBuilder.equals("")); + assertTrue("suggestion builder is not equal to self", firstBuilder.equals(firstBuilder)); + assertThat("same suggestion builder's hashcode returns different values if called multiple times", firstBuilder.hashCode(), + equalTo(firstBuilder.hashCode())); + assertThat("different suggestion builders should not be equal", mutate(firstBuilder), not(equalTo(firstBuilder))); + + SB secondBuilder = serializedCopy(firstBuilder); + assertTrue("suggestion builder is not equal to self", secondBuilder.equals(secondBuilder)); + assertTrue("suggestion builder is not equal to its copy", firstBuilder.equals(secondBuilder)); + assertTrue("equals is not symmetric", secondBuilder.equals(firstBuilder)); + assertThat("suggestion builder copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(firstBuilder.hashCode())); + + SB thirdBuilder = serializedCopy(secondBuilder); + assertTrue("suggestion builder is not equal to self", thirdBuilder.equals(thirdBuilder)); + assertTrue("suggestion builder is not equal to its copy", secondBuilder.equals(thirdBuilder)); + assertThat("suggestion builder copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(thirdBuilder.hashCode())); + assertTrue("equals is not transitive", firstBuilder.equals(thirdBuilder)); + assertThat("suggestion builder copy's hashcode is different from original hashcode", firstBuilder.hashCode(), equalTo(thirdBuilder.hashCode())); + assertTrue("equals is not symmetric", thirdBuilder.equals(secondBuilder)); + assertTrue("equals is not symmetric", thirdBuilder.equals(firstBuilder)); + } + } + + private SB mutate(SB firstBuilder) throws IOException { + SB mutation = serializedCopy(firstBuilder); + assertNotSame(mutation, firstBuilder); + if (randomBoolean()) { + // change one of the common SuggestionBuilder parameters + switch (randomIntBetween(0, 6)) { + case 0: + mutation.text(randomValueOtherThan(mutation.text(), () -> randomAsciiOfLengthBetween(2, 20))); + break; + case 1: + mutation.prefix(randomValueOtherThan(mutation.prefix(), () -> randomAsciiOfLengthBetween(2, 20))); + break; + case 2: + mutation.regex(randomValueOtherThan(mutation.regex(), () -> randomAsciiOfLengthBetween(2, 20))); + break; + case 3: + mutation.field(randomValueOtherThan(mutation.field(), () -> randomAsciiOfLengthBetween(2, 20))); + break; + case 4: + mutation.analyzer(randomValueOtherThan(mutation.analyzer(), () -> randomAsciiOfLengthBetween(2, 20))); + break; + case 5: + mutation.size(randomValueOtherThan(mutation.size(), () -> randomIntBetween(1, 20))); + break; + case 6: + mutation.shardSize(randomValueOtherThan(mutation.shardSize(), () -> randomIntBetween(1, 20))); + break; + } + } else { + mutateSpecificParameters(firstBuilder); + } + return mutation; + } + + /** + * take and input {@link SuggestBuilder} and return another one that is different in one aspect (to test non-equality) + */ + protected abstract void mutateSpecificParameters(SB firstBuilder) throws IOException; + + @SuppressWarnings("unchecked") + protected SB serializedCopy(SB original) throws IOException { + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.writeSuggestion(original);; + try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + return (SB) in.readSuggestion(); + } + } + } + + protected static void maybeSet(Consumer consumer, T value) { + if (randomBoolean()) { + consumer.accept(value); + } + } + + /** + * helper to get a random value in a certain range that's different from the input + */ + protected static T randomValueOtherThan(T input, Supplier randomSupplier) { + T randomValue = null; + do { + randomValue = randomSupplier.get(); + } while (randomValue.equals(input)); + return randomValue; + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 1543433be324..271fa08487bf 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.suggest; import com.carrotsearch.hppc.ObjectLongHashMap; import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.apache.lucene.analysis.TokenStreamToAutomaton; import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; @@ -907,7 +908,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { } - public void assertSuggestions(String suggestionName, SuggestBuilder.SuggestionBuilder suggestBuilder, String... suggestions) { + public void assertSuggestions(String suggestionName, SuggestionBuilder suggestBuilder, String... suggestions) { SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(suggestBuilder ).execute().actionGet(); assertSuggestions(suggestResponse, suggestionName, suggestions); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java index 18d6d9b99f98..d92bd865f590 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -632,7 +632,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { assertEquals("Hotel Amsterdam in Berlin", suggestResponse.getSuggest().getSuggestion(suggestionName).iterator().next().getOptions().iterator().next().getText().string()); } - public void assertSuggestions(String suggestionName, SuggestBuilder.SuggestionBuilder suggestBuilder, String... suggestions) { + public void assertSuggestions(String suggestionName, SuggestionBuilder suggestBuilder, String... suggestions) { SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(suggestBuilder ).execute().actionGet(); CompletionSuggestSearchIT.assertSuggestions(suggestResponse, suggestionName, suggestions); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java index 18b4fa50e7b4..80eb4d7b7d44 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java @@ -20,6 +20,8 @@ package org.elasticsearch.search.suggest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.plugins.Plugin; @@ -31,6 +33,7 @@ import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Locale; +import java.util.Objects; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.hasSize; @@ -59,16 +62,7 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { String randomField = randomAsciiOfLength(10); String randomSuffix = randomAsciiOfLength(10); SuggestBuilder suggestBuilder = new SuggestBuilder(); - suggestBuilder.addSuggestion( - new SuggestBuilder.SuggestionBuilder("someName", "custom") { - @Override - protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { - builder.field("field", randomField); - builder.field("suffix", randomSuffix); - return builder; - } - }.text(randomText) - ); + suggestBuilder.addSuggestion(new CustomSuggestionBuilder("someName", randomField, randomSuffix).text(randomText)); SearchRequestBuilder searchRequestBuilder = client().prepareSearch("test").setTypes("test").setFrom(0).setSize(1) .suggest(suggestBuilder); @@ -83,4 +77,51 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { assertThat(suggestions.get(1).getText().string(), is(String.format(Locale.ROOT, "%s-%s-%s-123", randomText, randomField, randomSuffix))); } + class CustomSuggestionBuilder extends SuggestionBuilder { + + private String randomField; + private String randomSuffix; + + public CustomSuggestionBuilder(String name, String randomField, String randomSuffix) { + super(name); + this.randomField = randomField; + this.randomSuffix = randomSuffix; + } + + @Override + protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("field", randomField); + builder.field("suffix", randomSuffix); + return builder; + } + + @Override + public String getWriteableName() { + return "custom"; + } + + @Override + public void doWriteTo(StreamOutput out) throws IOException { + out.writeString(randomField); + out.writeString(randomSuffix); + } + + @Override + public CustomSuggestionBuilder doReadFrom(StreamInput in, String name) throws IOException { + return new CustomSuggestionBuilder(in.readString(), in.readString(), in.readString()); + } + + @Override + protected boolean doEquals(CustomSuggestionBuilder other) { + return Objects.equals(randomField, other.randomField) && + Objects.equals(randomSuffix, other.randomSuffix); + } + + @Override + protected int doHashCode() { + return Objects.hash(randomField, randomSuffix); + } + + } + } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java new file mode 100644 index 000000000000..74f655b6aa18 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java @@ -0,0 +1,114 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.suggest.phrase; + +import org.elasticsearch.script.Template; +import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestCase { + + @Override + protected PhraseSuggestionBuilder randomSuggestionBuilder() { + PhraseSuggestionBuilder testBuilder = new PhraseSuggestionBuilder(randomAsciiOfLength(10)); + maybeSet(testBuilder::maxErrors, randomFloat()); + maybeSet(testBuilder::separator, randomAsciiOfLengthBetween(1, 10)); + maybeSet(testBuilder::realWordErrorLikelihood, randomFloat()); + maybeSet(testBuilder::confidence, randomFloat()); + maybeSet(testBuilder::collatePrune, randomBoolean()); + maybeSet(testBuilder::collateQuery, randomAsciiOfLengthBetween(3, 20)); + if (randomBoolean()) { + // preTag, postTag + testBuilder.highlight(randomAsciiOfLengthBetween(3, 20), randomAsciiOfLengthBetween(3, 20)); + } + maybeSet(testBuilder::gramSize, randomIntBetween(1, 5)); + maybeSet(testBuilder::forceUnigrams, randomBoolean()); + maybeSet(testBuilder::tokenLimit, randomInt(20)); + if (randomBoolean()) { + Map collateParams = new HashMap<>(); + collateParams.put(randomAsciiOfLength(5), randomAsciiOfLength(5)); + testBuilder.collateParams(collateParams ); + } + if (randomBoolean()) { + // NORELEASE add random model + } + + if (randomBoolean()) { + // NORELEASE add random generator + } + return testBuilder; + } + + @Override + protected void mutateSpecificParameters(PhraseSuggestionBuilder builder) throws IOException { + switch (randomIntBetween(0, 7)) { + case 0: + builder.maxErrors(randomValueOtherThan(builder.maxErrors(), () -> randomFloat())); + break; + case 1: + builder.realWordErrorLikelihood(randomValueOtherThan(builder.realWordErrorLikelihood(), () -> randomFloat())); + break; + case 2: + builder.confidence(randomValueOtherThan(builder.confidence(), () -> randomFloat())); + break; + case 3: + builder.gramSize(randomValueOtherThan(builder.gramSize(), () -> randomIntBetween(1, 5))); + break; + case 4: + builder.tokenLimit(randomValueOtherThan(builder.tokenLimit(), () -> randomInt(20))); + break; + case 5: + builder.separator(randomValueOtherThan(builder.separator(), () -> randomAsciiOfLengthBetween(1, 10))); + break; + case 6: + Template collateQuery = builder.collateQuery(); + if (collateQuery != null) { + builder.collateQuery(randomValueOtherThan(collateQuery.getScript(), () -> randomAsciiOfLengthBetween(3, 20))); + } else { + builder.collateQuery(randomAsciiOfLengthBetween(3, 20)); + } + break; + case 7: + builder.collatePrune(builder.collatePrune() == null ? randomBoolean() : !builder.collatePrune() ); + break; + case 8: + // preTag, postTag + String currentPre = builder.preTag(); + if (currentPre != null) { + // simply double both values + builder.highlight(builder.preTag() + builder.preTag(), builder.postTag() + builder.postTag()); + } else { + builder.highlight(randomAsciiOfLengthBetween(3, 20), randomAsciiOfLengthBetween(3, 20)); + } + break; + case 9: + builder.forceUnigrams(builder.forceUnigrams() == null ? randomBoolean() : ! builder.forceUnigrams()); + break; + case 10: + builder.collateParams().put(randomAsciiOfLength(5), randomAsciiOfLength(5)); + break; + // TODO mutate random Model && generator + } + } + +} diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java index 4fd83f9a8504..8511394a4270 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java @@ -20,43 +20,6 @@ package org.elasticsearch.messy.tests; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; -import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.ReduceSearchPhaseException; -import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.action.suggest.SuggestRequestBuilder; -import org.elasticsearch.action.suggest.SuggestResponse; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.mustache.MustachePlugin; -import org.elasticsearch.search.suggest.Suggest; -import org.elasticsearch.search.suggest.SuggestBuilder; -import org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.DirectCandidateGenerator; -import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; - -import java.io.IOException; -import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutionException; - import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -76,6 +39,43 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.nullValue; +import java.io.IOException; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.ReduceSearchPhaseException; +import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.action.suggest.SuggestRequestBuilder; +import org.elasticsearch.action.suggest.SuggestResponse; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.mustache.MustachePlugin; +import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.search.suggest.SuggestBuilder; +import org.elasticsearch.search.suggest.SuggestionBuilder; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.DirectCandidateGenerator; +import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; + /** * Integration tests for term and phrase suggestions. Many of these tests many requests that vary only slightly from one another. Where * possible these tests should declare for the first request, make the request, modify the configuration for the next request, make that From a9ba1e73e780aed0c5581ccdbcde7e603d8a183e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 29 Jan 2016 11:50:49 +0100 Subject: [PATCH 005/320] Add serialization of smoothing model to PhraseSuggestionBuilder and add tests --- .../common/io/stream/StreamInput.java | 8 +++++ .../common/io/stream/StreamOutput.java | 8 +++++ .../phrase/PhraseSuggestionBuilder.java | 27 ++++++++++----- .../AbstractSuggestionBuilderTestCase.java | 2 +- .../suggest/phrase/LaplaceModelTests.java | 5 +++ .../phrase/LinearInterpolationModelTests.java | 4 +++ .../phrase/PhraseSuggestionBuilderTests.java | 33 ++++++++++++++++++- .../phrase/StupidBackoffModelTests.java | 4 +++ 8 files changed, 81 insertions(+), 10 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index 3f44b61f2126..d00499798b5b 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -39,6 +39,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.suggest.SuggestionBuilder; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -706,6 +707,13 @@ public abstract class StreamInput extends InputStream { return readNamedWriteable(ScoreFunctionBuilder.class); } + /** + * Reads a {@link SmoothingModel} from the current stream + */ + public SmoothingModel readSmoothingModel() throws IOException { + return readNamedWriteable(SmoothingModel.class); + } + /** * Reads a list of objects */ diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index 5e0af597b09b..e903ddae1dc2 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -38,6 +38,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.suggest.SuggestionBuilder; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel; import org.joda.time.ReadableInstant; import java.io.EOFException; @@ -670,6 +671,13 @@ public abstract class StreamOutput extends OutputStream { writeNamedWriteable(scoreFunctionBuilder); } + /** + * Writes the given {@link SmoothingModel} to the stream + */ + public void writeSmoothingModel(SmoothingModel smoothinModel) throws IOException { + writeNamedWriteable(smoothinModel); + } + /** * Writes the given {@link GeoPoint} to the stream */ diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java index 09d108b6356e..a03f9109eda8 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java @@ -216,6 +216,13 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder> extends ESTestCase { private static final int NUMBER_OF_TESTBUILDERS = 20; - private static NamedWriteableRegistry namedWriteableRegistry; + protected static NamedWriteableRegistry namedWriteableRegistry; /** * setup for the whole base test class diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/LaplaceModelTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/LaplaceModelTests.java index 87ad654e0cdc..1a939018b8fb 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/LaplaceModelTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/LaplaceModelTests.java @@ -28,6 +28,11 @@ public class LaplaceModelTests extends SmoothingModelTestCase { @Override protected SmoothingModel createTestModel() { + return createRandomModel(); + } + + + static SmoothingModel createRandomModel() { return new Laplace(randomDoubleBetween(0.0, 10.0, false)); } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/LinearInterpolationModelTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/LinearInterpolationModelTests.java index 1112b7a5ed7d..7984395abcce 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/LinearInterpolationModelTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/LinearInterpolationModelTests.java @@ -28,6 +28,10 @@ public class LinearInterpolationModelTests extends SmoothingModelTestCase { @Override protected SmoothingModel createTestModel() { + return createRandomModel(); + } + + static LinearInterpolation createRandomModel() { double trigramLambda = randomDoubleBetween(0.0, 10.0, false); double bigramLambda = randomDoubleBetween(0.0, 10.0, false); double unigramLambda = randomDoubleBetween(0.0, 10.0, false); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java index 74f655b6aa18..63fb937f3d01 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java @@ -21,6 +21,11 @@ package org.elasticsearch.search.suggest.phrase; import org.elasticsearch.script.Template; import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.Laplace; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.LinearInterpolation; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.StupidBackoff; +import org.junit.BeforeClass; import java.io.IOException; import java.util.HashMap; @@ -28,6 +33,13 @@ import java.util.Map; public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestCase { + @BeforeClass + public static void initSmoothingModels() { + namedWriteableRegistry.registerPrototype(SmoothingModel.class, Laplace.PROTOTYPE); + namedWriteableRegistry.registerPrototype(SmoothingModel.class, LinearInterpolation.PROTOTYPE); + namedWriteableRegistry.registerPrototype(SmoothingModel.class, StupidBackoff.PROTOTYPE); + } + @Override protected PhraseSuggestionBuilder randomSuggestionBuilder() { PhraseSuggestionBuilder testBuilder = new PhraseSuggestionBuilder(randomAsciiOfLength(10)); @@ -50,7 +62,7 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC testBuilder.collateParams(collateParams ); } if (randomBoolean()) { - // NORELEASE add random model + randomSmoothingModel(); } if (randomBoolean()) { @@ -59,6 +71,22 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC return testBuilder; } + private static SmoothingModel randomSmoothingModel() { + SmoothingModel model = null; + switch (randomIntBetween(0,2)) { + case 0: + model = LaplaceModelTests.createRandomModel(); + break; + case 1: + model = StupidBackoffModelTests.createRandomModel(); + break; + case 2: + model = LinearInterpolationModelTests.createRandomModel(); + break; + } + return model; + } + @Override protected void mutateSpecificParameters(PhraseSuggestionBuilder builder) throws IOException { switch (randomIntBetween(0, 7)) { @@ -107,6 +135,9 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC case 10: builder.collateParams().put(randomAsciiOfLength(5), randomAsciiOfLength(5)); break; + case 11: + builder.smoothingModel(randomValueOtherThan(builder.smoothingModel(), PhraseSuggestionBuilderTests::randomSmoothingModel)); + break; // TODO mutate random Model && generator } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/StupidBackoffModelTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/StupidBackoffModelTests.java index c3bd66d2a815..3a59c19b13ea 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/StupidBackoffModelTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/StupidBackoffModelTests.java @@ -28,6 +28,10 @@ public class StupidBackoffModelTests extends SmoothingModelTestCase { @Override protected SmoothingModel createTestModel() { + return createRandomModel(); + } + + static SmoothingModel createRandomModel() { return new StupidBackoff(randomDoubleBetween(0.0, 10.0, false)); } From d38a1b4d6193fabc24b35284d7196152383869ee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 1 Feb 2016 14:44:57 +0100 Subject: [PATCH 006/320] Add serialization of candidate generator to PhraseSuggestionBuilder and add tests --- .../DirectCandidateGeneratorBuilder.java | 3 +- .../phrase/PhraseSuggestionBuilder.java | 30 +++++++++++++++---- .../phrase/PhraseSuggestionBuilderTests.java | 18 +++++++---- 3 files changed, 37 insertions(+), 14 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java index 90ec2845b8ab..8cc834ef0d5e 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -42,7 +41,7 @@ import java.util.Set; import java.util.function.Consumer; public final class DirectCandidateGeneratorBuilder - implements Writeable, CandidateGenerator { + implements CandidateGenerator { private static final String TYPE = "direct_generator"; static final DirectCandidateGeneratorBuilder PROTOTYPE = new DirectCandidateGeneratorBuilder("_na_"); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java index a03f9109eda8..06014feba0ee 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -768,7 +769,15 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder> generators = new HashMap<>(); + out.writeVInt(this.generators.size()); + for (Entry> entry : this.generators.entrySet()) { + out.writeString(entry.getKey()); + List generatorsList = entry.getValue(); + out.writeVInt(generatorsList.size()); + for (CandidateGenerator generator : generatorsList) { + generator.writeTo(out); + } + } } @Override @@ -791,7 +800,17 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder> generators; + int generatorsEntries = in.readVInt(); + for (int i = 0; i < generatorsEntries; i++) { + String type = in.readString(); + int numberOfGenerators = in.readVInt(); + List generatorsList = new ArrayList<>(numberOfGenerators); + for (int g = 0; g < numberOfGenerators; g++) { + DirectCandidateGeneratorBuilder generator = DirectCandidateGeneratorBuilder.PROTOTYPE.readFrom(in); + generatorsList.add(generator); + } + builder.generators.put(type, generatorsList); + } return builder; } @@ -801,7 +820,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder, ToXContent { String getType(); CandidateGenerator fromXContent(QueryParseContext parseContext) throws IOException; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java index 63fb937f3d01..71a202a6b21a 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java @@ -62,11 +62,13 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC testBuilder.collateParams(collateParams ); } if (randomBoolean()) { - randomSmoothingModel(); + testBuilder.smoothingModel(randomSmoothingModel()); } - if (randomBoolean()) { - // NORELEASE add random generator + int numGenerators = randomIntBetween(1, 5); + for (int i = 0; i < numGenerators; i++) { + testBuilder.addCandidateGenerator(DirectCandidateGeneratorTests.randomCandidateGenerator()); + } } return testBuilder; } @@ -89,7 +91,7 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC @Override protected void mutateSpecificParameters(PhraseSuggestionBuilder builder) throws IOException { - switch (randomIntBetween(0, 7)) { + switch (randomIntBetween(0, 12)) { case 0: builder.maxErrors(randomValueOtherThan(builder.maxErrors(), () -> randomFloat())); break; @@ -133,12 +135,16 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC builder.forceUnigrams(builder.forceUnigrams() == null ? randomBoolean() : ! builder.forceUnigrams()); break; case 10: - builder.collateParams().put(randomAsciiOfLength(5), randomAsciiOfLength(5)); + Map collateParams = builder.collateParams() == null ? new HashMap<>(1) : builder.collateParams(); + collateParams.put(randomAsciiOfLength(5), randomAsciiOfLength(5)); + builder.collateParams(collateParams); break; case 11: builder.smoothingModel(randomValueOtherThan(builder.smoothingModel(), PhraseSuggestionBuilderTests::randomSmoothingModel)); break; - // TODO mutate random Model && generator + case 12: + builder.addCandidateGenerator(DirectCandidateGeneratorTests.randomCandidateGenerator()); + break; } } From 694dd0c6c5afb167fac01c843212ad4b76468cc7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 1 Feb 2016 17:08:23 +0100 Subject: [PATCH 007/320] Adressing review comments --- .../java/org/elasticsearch/common/io/stream/StreamInput.java | 2 +- .../java/org/elasticsearch/common/io/stream/StreamOutput.java | 2 +- .../search/suggest/phrase/PhraseSuggestionBuilder.java | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index d00499798b5b..25faf5685616 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -710,7 +710,7 @@ public abstract class StreamInput extends InputStream { /** * Reads a {@link SmoothingModel} from the current stream */ - public SmoothingModel readSmoothingModel() throws IOException { + public SmoothingModel readPhraseSuggestionSmoothingModel() throws IOException { return readNamedWriteable(SmoothingModel.class); } diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index e903ddae1dc2..34a57f8e8985 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -674,7 +674,7 @@ public abstract class StreamOutput extends OutputStream { /** * Writes the given {@link SmoothingModel} to the stream */ - public void writeSmoothingModel(SmoothingModel smoothinModel) throws IOException { + public void writePhraseSuggestionSmoothingModel(SmoothingModel smoothinModel) throws IOException { writeNamedWriteable(smoothinModel); } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java index 06014feba0ee..f346df1f4420 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java @@ -754,7 +754,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder Date: Wed, 27 Jan 2016 16:05:47 -0500 Subject: [PATCH 008/320] Refactored the term suggestion builder for the query refactoring effort Added the term suggestion builder's serialization/deserialization and equals/hashCode methods. --- .../common/io/stream/StreamInput.java | 10 +- .../rest/action/search/RestSearchAction.java | 5 +- .../elasticsearch/search/SearchModule.java | 7 + .../suggest/DirectSpellcheckerSettings.java | 36 ++- .../search/suggest/SuggestBuilder.java | 70 +++-- .../search/suggest/SuggestUtils.java | 14 +- .../search/suggest/SuggestionBuilder.java | 4 +- .../CompletionSuggestionBuilder.java | 2 + .../suggest/term/TermSuggestionBuilder.java | 291 ++++++++++++++++-- .../stream/AbstractWriteableEnumTestCase.java | 74 +++++ .../AbstractSuggestionBuilderTestCase.java | 4 + .../search/suggest/term/SortByTests.java | 70 +++++ .../suggest/term/StringDistanceImplTests.java | 82 +++++ .../search/suggest/term/SuggestModeTests.java | 74 +++++ .../term/TermSuggestionBuilderTests.java | 248 +++++++++++++++ .../messy/tests/SuggestSearchTests.java | 36 ++- 16 files changed, 937 insertions(+), 90 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/common/io/stream/AbstractWriteableEnumTestCase.java create mode 100644 core/src/test/java/org/elasticsearch/search/suggest/term/SortByTests.java create mode 100644 core/src/test/java/org/elasticsearch/search/suggest/term/StringDistanceImplTests.java create mode 100644 core/src/test/java/org/elasticsearch/search/suggest/term/SuggestModeTests.java create mode 100644 core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index 25faf5685616..eae35d0fb23e 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -70,17 +70,9 @@ import static org.elasticsearch.ElasticsearchException.readStackTrace; public abstract class StreamInput extends InputStream { - private final NamedWriteableRegistry namedWriteableRegistry; - private Version version = Version.CURRENT; - protected StreamInput() { - this.namedWriteableRegistry = new NamedWriteableRegistry(); - } - - protected StreamInput(NamedWriteableRegistry namedWriteableRegistry) { - this.namedWriteableRegistry = namedWriteableRegistry; - } + protected StreamInput() { } public Version getVersion() { return this.version; diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index e58caea53208..3e91a1c12803 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -55,6 +55,7 @@ import static org.elasticsearch.common.unit.TimeValue.parseTimeValue; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion; +import static org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; /** * @@ -262,7 +263,9 @@ public class RestSearchAction extends BaseRestHandler { int suggestSize = request.paramAsInt("suggest_size", 5); String suggestMode = request.param("suggest_mode"); searchSourceBuilder.suggest(new SuggestBuilder().addSuggestion( - termSuggestion(suggestField).field(suggestField).text(suggestText).size(suggestSize).suggestMode(suggestMode))); + termSuggestion(suggestField).field(suggestField) + .text(suggestText).size(suggestSize) + .suggestMode(SuggestMode.fromString(suggestMode)))); modified = true; } return modified; diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index 739b97034bf3..1f8e926bf810 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -223,6 +223,10 @@ import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.Suggesters; +import org.elasticsearch.search.suggest.SuggestionBuilder; +import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; +import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import java.util.ArrayList; import java.util.HashMap; @@ -365,6 +369,9 @@ public class SearchModule extends AbstractModule { protected void configureSuggesters() { suggesters.bind(binder()); + namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, TermSuggestionBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, PhraseSuggestionBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, CompletionSuggestionBuilder.PROTOTYPE); } protected void configureHighlighters() { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/DirectSpellcheckerSettings.java b/core/src/main/java/org/elasticsearch/search/suggest/DirectSpellcheckerSettings.java index 2b4687c8497b..a173781de8b8 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/DirectSpellcheckerSettings.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/DirectSpellcheckerSettings.java @@ -25,16 +25,30 @@ import org.apache.lucene.util.automaton.LevenshteinAutomata; public class DirectSpellcheckerSettings { - private SuggestMode suggestMode = SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX; - private float accuracy = 0.5f; - private Suggest.Suggestion.Sort sort = Suggest.Suggestion.Sort.SCORE; - private StringDistance stringDistance = DirectSpellChecker.INTERNAL_LEVENSHTEIN; - private int maxEdits = LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE; - private int maxInspections = 5; - private float maxTermFreq = 0.01f; - private int prefixLength = 1; - private int minWordLength = 4; - private float minDocFreq = 0f; + // NB: If this changes, make sure to change the default in TermBuilderSuggester + public static SuggestMode DEFAULT_SUGGEST_MODE = SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX; + public static float DEFAULT_ACCURACY = 0.5f; + // NB: If this changes, make sure to change the default in TermBuilderSuggester + public static Suggest.Suggestion.Sort DEFAULT_SORT = Suggest.Suggestion.Sort.SCORE; + // NB: If this changes, make sure to change the default in TermBuilderSuggester + public static StringDistance DEFAULT_STRING_DISTANCE = DirectSpellChecker.INTERNAL_LEVENSHTEIN; + public static int DEFAULT_MAX_EDITS = LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE; + public static int DEFAULT_MAX_INSPECTIONS = 5; + public static float DEFAULT_MAX_TERM_FREQ = 0.01f; + public static int DEFAULT_PREFIX_LENGTH = 1; + public static int DEFAULT_MIN_WORD_LENGTH = 4; + public static float DEFAULT_MIN_DOC_FREQ = 0f; + + private SuggestMode suggestMode = DEFAULT_SUGGEST_MODE; + private float accuracy = DEFAULT_ACCURACY; + private Suggest.Suggestion.Sort sort = DEFAULT_SORT; + private StringDistance stringDistance = DEFAULT_STRING_DISTANCE; + private int maxEdits = DEFAULT_MAX_EDITS; + private int maxInspections = DEFAULT_MAX_INSPECTIONS; + private float maxTermFreq = DEFAULT_MAX_TERM_FREQ; + private int prefixLength = DEFAULT_PREFIX_LENGTH; + private int minWordLength = DEFAULT_MIN_WORD_LENGTH; + private float minDocFreq = DEFAULT_MIN_DOC_FREQ; public SuggestMode suggestMode() { return suggestMode; @@ -116,4 +130,4 @@ public class DirectSpellcheckerSettings { this.minDocFreq = minDocFreq; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java index 92661b21f18e..8037646f1526 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java @@ -19,32 +19,30 @@ package org.elasticsearch.search.suggest; import org.elasticsearch.action.support.ToXContentToBytes; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; /** * Defines how to perform suggesting. This builders allows a number of global options to be specified and - * an arbitrary number of {@link org.elasticsearch.search.suggest.term.TermSuggestionBuilder} instances. + * an arbitrary number of {@link SuggestionBuilder} instances. *

- * Suggesting works by suggesting terms that appear in the suggest text that are similar compared to the terms in - * provided text. These spelling suggestions are based on several options described in this class. + * Suggesting works by suggesting terms/phrases that appear in the suggest text that are similar compared + * to the terms in provided text. These suggestions are based on several options described in this class. */ -public class SuggestBuilder extends ToXContentToBytes { +public class SuggestBuilder extends ToXContentToBytes implements Writeable { - private final String name; private String globalText; - private final List> suggestions = new ArrayList<>(); public SuggestBuilder() { - this.name = null; - } - - public SuggestBuilder(String name) { - this.name = name; } /** @@ -54,7 +52,7 @@ public class SuggestBuilder extends ToXContentToBytes { * The suggest text gets analyzed by the suggest analyzer or the suggest field search analyzer. * For each analyzed token, suggested terms are suggested if possible. */ - public SuggestBuilder setText(String globalText) { + public SuggestBuilder setText(@Nullable String globalText) { this.globalText = globalText; return this; } @@ -77,12 +75,7 @@ public class SuggestBuilder extends ToXContentToBytes { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if(name == null) { - builder.startObject(); - } else { - builder.startObject(name); - } - + builder.startObject(); if (globalText != null) { builder.field("text", globalText); } @@ -92,4 +85,45 @@ public class SuggestBuilder extends ToXContentToBytes { builder.endObject(); return builder; } + + @Override + public SuggestBuilder readFrom(StreamInput in) throws IOException { + final SuggestBuilder builder = new SuggestBuilder(); + builder.globalText = in.readOptionalString(); + final int size = in.readVInt(); + for (int i = 0; i < size; i++) { + builder.suggestions.add(in.readSuggestion()); + } + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(globalText); + final int size = suggestions.size(); + out.writeVInt(size); + for (int i = 0; i < size; i++) { + out.writeSuggestion(suggestions.get(i)); + } + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + if (other == null || getClass() != other.getClass()) { + return false; + } + @SuppressWarnings("unchecked") + SuggestBuilder o = (SuggestBuilder)other; + return Objects.equals(globalText, o.globalText) && + Objects.equals(suggestions, o.suggestions); + } + + @Override + public int hashCode() { + return Objects.hash(globalText, suggestions); + } + } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java index 2509f792ecc9..b9f2e29321f2 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java @@ -226,22 +226,22 @@ public final class SuggestUtils { } else if (parseFieldMatcher.match(fieldName, Fields.SORT)) { suggestion.sort(SuggestUtils.resolveSort(parser.text())); } else if (parseFieldMatcher.match(fieldName, Fields.STRING_DISTANCE)) { - suggestion.stringDistance(SuggestUtils.resolveDistance(parser.text())); + suggestion.stringDistance(SuggestUtils.resolveDistance(parser.text())); } else if (parseFieldMatcher.match(fieldName, Fields.MAX_EDITS)) { - suggestion.maxEdits(parser.intValue()); + suggestion.maxEdits(parser.intValue()); if (suggestion.maxEdits() < 1 || suggestion.maxEdits() > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE) { throw new IllegalArgumentException("Illegal max_edits value " + suggestion.maxEdits()); } } else if (parseFieldMatcher.match(fieldName, Fields.MAX_INSPECTIONS)) { - suggestion.maxInspections(parser.intValue()); + suggestion.maxInspections(parser.intValue()); } else if (parseFieldMatcher.match(fieldName, Fields.MAX_TERM_FREQ)) { - suggestion.maxTermFreq(parser.floatValue()); + suggestion.maxTermFreq(parser.floatValue()); } else if (parseFieldMatcher.match(fieldName, Fields.PREFIX_LENGTH)) { - suggestion.prefixLength(parser.intValue()); + suggestion.prefixLength(parser.intValue()); } else if (parseFieldMatcher.match(fieldName, Fields.MIN_WORD_LENGTH)) { - suggestion.minQueryLength(parser.intValue()); + suggestion.minQueryLength(parser.intValue()); } else if (parseFieldMatcher.match(fieldName, Fields.MIN_DOC_FREQ)) { - suggestion.minDocFreq(parser.floatValue()); + suggestion.minDocFreq(parser.floatValue()); } else { return false; } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java index 7705f2201d1c..59304fdd578c 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java @@ -53,6 +53,7 @@ public abstract class SuggestionBuilder> extends protected static final ParseField SHARDSIZE_FIELD = new ParseField("shard_size"); public SuggestionBuilder(String name) { + Objects.requireNonNull(name, "Suggester 'name' cannot be null"); this.name = name; } @@ -296,4 +297,5 @@ public abstract class SuggestionBuilder> extends * HashCode for the subclass of {@link SuggestionBuilder} to implement. */ protected abstract int doHashCode(); -} \ No newline at end of file + +} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 1b515e754093..afa0760e7044 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -49,9 +49,11 @@ import java.util.Set; */ public class CompletionSuggestionBuilder extends SuggestionBuilder { + public static final CompletionSuggestionBuilder PROTOTYPE = new CompletionSuggestionBuilder("_na_"); // name doesn't matter final static String SUGGESTION_NAME = "completion"; static final ParseField PAYLOAD_FIELD = new ParseField("payload"); static final ParseField CONTEXTS_FIELD = new ParseField("contexts", "context"); + private FuzzyOptionsBuilder fuzzyOptionsBuilder; private RegexOptionsBuilder regexOptionsBuilder; private final Map> queryContexts = new HashMap<>(); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java index e2a14c1a2b2d..bd318e1a0134 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java @@ -16,13 +16,26 @@ * specific language governing permissions and limitations * under the License. */ + package org.elasticsearch.search.suggest.term; + import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.suggest.SuggestionBuilder; import java.io.IOException; +import java.util.Locale; +import java.util.Objects; + +import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_ACCURACY; +import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MAX_EDITS; +import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MAX_INSPECTIONS; +import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MAX_TERM_FREQ; +import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MIN_DOC_FREQ; +import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MIN_WORD_LENGTH; +import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_PREFIX_LENGTH; /** * Defines the actual suggest command. Each command uses the global options @@ -31,18 +44,19 @@ import java.io.IOException; */ public class TermSuggestionBuilder extends SuggestionBuilder { + public static final TermSuggestionBuilder PROTOTYPE = new TermSuggestionBuilder("_na_"); // name doesn't matter static final String SUGGESTION_NAME = "term"; - private String suggestMode; - private Float accuracy; - private String sort; - private String stringDistance; - private Integer maxEdits; - private Integer maxInspections; - private Float maxTermFreq; - private Integer prefixLength; - private Integer minWordLength; - private Float minDocFreq; + private SuggestMode suggestMode = SuggestMode.MISSING; + private Float accuracy = DEFAULT_ACCURACY; + private SortBy sort = SortBy.SCORE; + private StringDistanceImpl stringDistance = StringDistanceImpl.INTERNAL; + private Integer maxEdits = DEFAULT_MAX_EDITS; + private Integer maxInspections = DEFAULT_MAX_INSPECTIONS; + private Float maxTermFreq = DEFAULT_MAX_TERM_FREQ; + private Integer prefixLength = DEFAULT_PREFIX_LENGTH; + private Integer minWordLength = DEFAULT_MIN_WORD_LENGTH; + private Float minDocFreq = DEFAULT_MIN_DOC_FREQ; /** * @param name @@ -65,11 +79,19 @@ public class TermSuggestionBuilder extends SuggestionBuilder */ - public TermSuggestionBuilder suggestMode(String suggestMode) { + public TermSuggestionBuilder suggestMode(SuggestMode suggestMode) { + Objects.requireNonNull(suggestMode, "suggestMode must not be null"); this.suggestMode = suggestMode; return this; } + /** + * Get the suggest mode setting. + */ + public SuggestMode suggestMode() { + return suggestMode; + } + /** * s how similar the suggested terms at least need to be compared to the * original suggest text tokens. A value between 0 and 1 can be specified. @@ -78,11 +100,21 @@ public class TermSuggestionBuilder extends SuggestionBuilder * Default is 0.5 */ - public TermSuggestionBuilder setAccuracy(float accuracy) { + public TermSuggestionBuilder accuracy(float accuracy) { + if (accuracy < 0.0f || accuracy > 1.0f) { + throw new IllegalArgumentException("accuracy must be between 0 and 1"); + } this.accuracy = accuracy; return this; } + /** + * Get the accuracy setting. + */ + public Float accuracy() { + return accuracy; + } + /** * Sets how to sort the suggest terms per suggest text token. Two possible * values: @@ -90,19 +122,27 @@ public class TermSuggestionBuilder extends SuggestionBuilderscore - Sort should first be based on score, then * document frequency and then the term itself. *

  • frequency - Sort should first be based on document - * frequency, then scotr and then the term itself. + * frequency, then score and then the term itself. * *

    * What the score is depends on the suggester being used. */ - public TermSuggestionBuilder sort(String sort) { + public TermSuggestionBuilder sort(SortBy sort) { + Objects.requireNonNull(sort, "sort must not be null"); this.sort = sort; return this; } + /** + * Get the sort setting. + */ + public SortBy sort() { + return sort; + } + /** * Sets what string distance implementation to use for comparing how similar - * suggested terms are. Four possible values can be specified: + * suggested terms are. Five possible values can be specified: *

      *
    1. internal - This is the default and is based on * damerau_levenshtein, but highly optimized for comparing @@ -117,32 +157,60 @@ public class TermSuggestionBuilder extends SuggestionBuilder */ - public TermSuggestionBuilder stringDistance(String stringDistance) { + public TermSuggestionBuilder stringDistance(StringDistanceImpl stringDistance) { + Objects.requireNonNull(stringDistance, "stringDistance must not be null"); this.stringDistance = stringDistance; return this; } + /** + * Get the string distance implementation setting. + */ + public StringDistanceImpl stringDistance() { + return stringDistance; + } + /** * Sets the maximum edit distance candidate suggestions can have in order to * be considered as a suggestion. Can only be a value between 1 and 2. Any * other value result in an bad request error being thrown. Defaults to * 2. */ - public TermSuggestionBuilder maxEdits(Integer maxEdits) { + public TermSuggestionBuilder maxEdits(int maxEdits) { + if (maxEdits < 1 || maxEdits > 2) { + throw new IllegalArgumentException("maxEdits must be between 1 and 2"); + } this.maxEdits = maxEdits; return this; } + /** + * Get the maximum edit distance setting. + */ + public Integer maxEdits() { + return maxEdits; + } + /** * A factor that is used to multiply with the size in order to inspect more * candidate suggestions. Can improve accuracy at the cost of performance. * Defaults to 5. */ - public TermSuggestionBuilder maxInspections(Integer maxInspections) { + public TermSuggestionBuilder maxInspections(int maxInspections) { + if (maxInspections < 0) { + throw new IllegalArgumentException("maxInspections must be positive"); + } this.maxInspections = maxInspections; return this; } + /** + * Get the factor for inspecting more candidate suggestions setting. + */ + public Integer maxInspections() { + return maxInspections; + } + /** * Sets a maximum threshold in number of documents a suggest text token can * exist in order to be corrected. Can be a relative percentage number (e.g @@ -155,10 +223,23 @@ public class TermSuggestionBuilder extends SuggestionBuilder 1.0f && maxTermFreq != Math.floor(maxTermFreq)) { + throw new IllegalArgumentException("if maxTermFreq is greater than 1, it must not be a fraction"); + } this.maxTermFreq = maxTermFreq; return this; } + /** + * Get the maximum term frequency threshold setting. + */ + public Float maxTermFreq() { + return maxTermFreq; + } + /** * Sets the number of minimal prefix characters that must match in order be * a candidate suggestion. Defaults to 1. Increasing this number improves @@ -166,19 +247,39 @@ public class TermSuggestionBuilder extends SuggestionBuilder4. */ public TermSuggestionBuilder minWordLength(int minWordLength) { + if (minWordLength < 1) { + throw new IllegalArgumentException("minWordLength must be greater or equal to 1"); + } this.minWordLength = minWordLength; return this; } + /** + * Get the minimum length of a text term to be corrected setting. + */ + public Integer minWordLength() { + return minWordLength; + } + /** * Sets a minimal threshold in number of documents a suggested term should * appear in. This can be specified as an absolute number or as a relative @@ -187,10 +288,24 @@ public class TermSuggestionBuilder extends SuggestionBuilder 1.0f && minDocFreq != Math.floor(minDocFreq)) { + throw new IllegalArgumentException("if minDocFreq is greater than 1, it must not be a fraction"); + } this.minDocFreq = minDocFreq; return this; } + /** + * Get the minimal threshold for the frequency of a term appearing in the + * document set setting. + */ + public Float minDocFreq() { + return minDocFreq; + } + @Override public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { if (suggestMode != null) { @@ -233,25 +348,149 @@ public class TermSuggestionBuilder extends SuggestionBuilder { + /** Only suggest terms in the suggest text that aren't in the index. This is the default. */ + MISSING, + /** Only suggest terms that occur in more docs then the original suggest text term. */ + POPULAR, + /** Suggest any matching suggest terms based on tokens in the suggest text. */ + ALWAYS; + + protected static SuggestMode PROTOTYPE = MISSING; + + @Override + public void writeTo(final StreamOutput out) throws IOException { + out.writeVInt(ordinal()); + } + + @Override + public SuggestMode readFrom(final StreamInput in) throws IOException { + int ordinal = in.readVInt(); + if (ordinal < 0 || ordinal >= values().length) { + throw new IOException("Unknown SuggestMode ordinal [" + ordinal + "]"); + } + return values()[ordinal]; + } + + public static SuggestMode fromString(final String str) { + Objects.requireNonNull(str, "Input string is null"); + return valueOf(str.toUpperCase(Locale.ROOT)); + } + } + + /** An enum representing the valid sorting options */ + public enum SortBy implements Writeable { + /** Sort should first be based on score, then document frequency and then the term itself. */ + SCORE, + /** Sort should first be based on document frequency, then score and then the term itself. */ + FREQUENCY; + + protected static SortBy PROTOTYPE = SCORE; + + @Override + public void writeTo(final StreamOutput out) throws IOException { + out.writeVInt(ordinal()); + } + + @Override + public SortBy readFrom(final StreamInput in) throws IOException { + int ordinal = in.readVInt(); + if (ordinal < 0 || ordinal >= values().length) { + throw new IOException("Unknown SortBy ordinal [" + ordinal + "]"); + } + return values()[ordinal]; + } + + public static SortBy fromString(final String str) { + Objects.requireNonNull(str, "Input string is null"); + return valueOf(str.toUpperCase(Locale.ROOT)); + } + } + + /** An enum representing the valid string edit distance algorithms for determining suggestions. */ + public enum StringDistanceImpl implements Writeable { + /** This is the default and is based on damerau_levenshtein, but highly optimized + * for comparing string distance for terms inside the index. */ + INTERNAL, + /** String distance algorithm based on Damerau-Levenshtein algorithm. */ + DAMERAU_LEVENSHTEIN, + /** String distance algorithm based on Levenstein edit distance algorithm. */ + LEVENSTEIN, + /** String distance algorithm based on Jaro-Winkler algorithm. */ + JAROWINKLER, + /** String distance algorithm based on character n-grams. */ + NGRAM; + + protected static StringDistanceImpl PROTOTYPE = INTERNAL; + + @Override + public void writeTo(final StreamOutput out) throws IOException { + out.writeVInt(ordinal()); + } + + @Override + public StringDistanceImpl readFrom(final StreamInput in) throws IOException { + int ordinal = in.readVInt(); + if (ordinal < 0 || ordinal >= values().length) { + throw new IOException("Unknown StringDistanceImpl ordinal [" + ordinal + "]"); + } + return values()[ordinal]; + } + + public static StringDistanceImpl fromString(final String str) { + Objects.requireNonNull(str, "Input string is null"); + return valueOf(str.toUpperCase(Locale.ROOT)); + } + } + } diff --git a/core/src/test/java/org/elasticsearch/common/io/stream/AbstractWriteableEnumTestCase.java b/core/src/test/java/org/elasticsearch/common/io/stream/AbstractWriteableEnumTestCase.java new file mode 100644 index 000000000000..b8be6fb1493e --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/io/stream/AbstractWriteableEnumTestCase.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.io.stream; + +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; + +/** + * Abstract class offering base functionality for testing @{link Writeable} enums. + */ +public abstract class AbstractWriteableEnumTestCase extends ESTestCase { + + /** + * Test that the ordinals for the enum are consistent (i.e. the order hasn't changed) + * because writing an enum to a stream often uses the ordinal value. + */ + public abstract void testValidOrdinals(); + + /** + * Test that the conversion from a string to enum is correct. + */ + public abstract void testFromString(); + + /** + * Test that the correct enum value is produced from the serialized value in the {@link StreamInput}. + */ + public abstract void testReadFrom() throws IOException; + + /** + * Test that the correct serialized value is produced from the {@link StreamOutput}. + */ + public abstract void testWriteTo() throws IOException; + + // a convenience method for testing the write of a writeable enum + protected static void assertWriteToStream(final Writeable writeableEnum, final int ordinal) throws IOException { + try (BytesStreamOutput out = new BytesStreamOutput()) { + writeableEnum.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(ordinal)); + } + } + } + + // a convenience method for testing the read of a writeable enum + protected static > void assertReadFromStream(final int ordinal, final Writeable expected) throws IOException { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(ordinal); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(expected.readFrom(in), equalTo(expected)); + } + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 61f678b5a086..77aada31a46d 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -23,7 +23,9 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; +import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import org.elasticsearch.test.ESTestCase; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -46,7 +48,9 @@ public abstract class AbstractSuggestionBuilderTestCase { + + @Override + protected TermSuggestionBuilder randomSuggestionBuilder() { + TermSuggestionBuilder testBuilder = new TermSuggestionBuilder(randomAsciiOfLength(10)); + maybeSet(testBuilder::suggestMode, randomSuggestMode()); + maybeSet(testBuilder::accuracy, randomFloat()); + maybeSet(testBuilder::sort, randomSort()); + maybeSet(testBuilder::stringDistance, randomStringDistance()); + maybeSet(testBuilder::maxEdits, randomIntBetween(1, 2)); + maybeSet(testBuilder::maxInspections, randomInt(Integer.MAX_VALUE)); + maybeSet(testBuilder::maxTermFreq, randomFloat()); + maybeSet(testBuilder::prefixLength, randomInt(Integer.MAX_VALUE)); + maybeSet(testBuilder::minWordLength, randomInt(Integer.MAX_VALUE)); + maybeSet(testBuilder::minDocFreq, randomFloat()); + return testBuilder; + } + + private SuggestMode randomSuggestMode() { + final int randomVal = randomIntBetween(0, 2); + switch (randomVal) { + case 0: return SuggestMode.MISSING; + case 1: return SuggestMode.POPULAR; + case 2: return SuggestMode.ALWAYS; + default: throw new IllegalArgumentException("No suggest mode with an ordinal of " + randomVal); + } + } + + private SortBy randomSort() { + int randomVal = randomIntBetween(0, 1); + switch (randomVal) { + case 0: return SortBy.SCORE; + case 1: return SortBy.FREQUENCY; + default: throw new IllegalArgumentException("No sort mode with an ordinal of " + randomVal); + } + } + + private StringDistanceImpl randomStringDistance() { + int randomVal = randomIntBetween(0, 4); + switch (randomVal) { + case 0: return StringDistanceImpl.INTERNAL; + case 1: return StringDistanceImpl.DAMERAU_LEVENSHTEIN; + case 2: return StringDistanceImpl.LEVENSTEIN; + case 3: return StringDistanceImpl.JAROWINKLER; + case 4: return StringDistanceImpl.NGRAM; + default: throw new IllegalArgumentException("No string distance algorithm with an ordinal of " + randomVal); + } + } + + @Override + protected void mutateSpecificParameters(TermSuggestionBuilder builder) throws IOException { + switch (randomIntBetween(0, 9)) { + case 0: + builder.suggestMode(randomValueOtherThan(builder.suggestMode(), () -> randomSuggestMode())); + break; + case 1: + builder.accuracy(randomValueOtherThan(builder.accuracy(), () -> randomFloat())); + break; + case 2: + builder.sort(randomValueOtherThan(builder.sort(), () -> randomSort())); + break; + case 3: + builder.stringDistance(randomValueOtherThan(builder.stringDistance(), () -> randomStringDistance())); + break; + case 4: + builder.maxEdits(randomValueOtherThan(builder.maxEdits(), () -> randomIntBetween(1, 2))); + break; + case 5: + builder.maxInspections(randomValueOtherThan(builder.maxInspections(), () -> randomInt(Integer.MAX_VALUE))); + break; + case 6: + builder.maxTermFreq(randomValueOtherThan(builder.maxTermFreq(), () -> randomFloat())); + break; + case 7: + builder.prefixLength(randomValueOtherThan(builder.prefixLength(), () -> randomInt(Integer.MAX_VALUE))); + break; + case 8: + builder.minWordLength(randomValueOtherThan(builder.minWordLength(), () -> randomInt(Integer.MAX_VALUE))); + break; + case 9: + builder.minDocFreq(randomValueOtherThan(builder.minDocFreq(), () -> randomFloat())); + break; + default: + break; // do nothing + } + } + + public void testInvalidParameters() throws IOException { + TermSuggestionBuilder builder = new TermSuggestionBuilder(randomAsciiOfLength(10)); + // test invalid accuracy values + try { + builder.accuracy(-0.5f); + fail("Should not allow accuracy to be set to a negative value."); + } catch (IllegalArgumentException e) { + } + try { + builder.accuracy(1.1f); + fail("Should not allow accuracy to be greater than 1.0."); + } catch (IllegalArgumentException e) { + } + // test invalid max edit distance values + try { + builder.maxEdits(0); + fail("Should not allow maxEdits to be less than 1."); + } catch (IllegalArgumentException e) { + } + try { + builder.maxEdits(-1); + fail("Should not allow maxEdits to be a negative value."); + } catch (IllegalArgumentException e) { + } + try { + builder.maxEdits(3); + fail("Should not allow maxEdits to be greater than 2."); + } catch (IllegalArgumentException e) { + } + // test invalid max inspections values + try { + builder.maxInspections(-1); + fail("Should not allow maxInspections to be a negative value."); + } catch (IllegalArgumentException e) { + } + // test invalid max term freq values + try { + builder.maxTermFreq(-0.5f); + fail("Should not allow max term freq to be a negative value."); + } catch (IllegalArgumentException e) { + } + try { + builder.maxTermFreq(1.5f); + fail("If max term freq is greater than 1, it must be a whole number."); + } catch (IllegalArgumentException e) { + } + try { + builder.maxTermFreq(2.0f); // this should be allowed + } catch (IllegalArgumentException e) { + fail("A max term freq greater than 1 that is a whole number should be allowed."); + } + // test invalid min doc freq values + try { + builder.minDocFreq(-0.5f); + fail("Should not allow min doc freq to be a negative value."); + } catch (IllegalArgumentException e) { + } + try { + builder.minDocFreq(1.5f); + fail("If min doc freq is greater than 1, it must be a whole number."); + } catch (IllegalArgumentException e) { + } + try { + builder.minDocFreq(2.0f); // this should be allowed + } catch (IllegalArgumentException e) { + fail("A min doc freq greater than 1 that is a whole number should be allowed."); + } + // test invalid min word length values + try { + builder.minWordLength(0); + fail("A min word length < 1 should not be allowed."); + } catch (IllegalArgumentException e) { + } + try { + builder.minWordLength(-1); + fail("Should not allow min word length to be a negative value."); + } catch (IllegalArgumentException e) { + } + // test invalid prefix length values + try { + builder.prefixLength(-1); + fail("Should not allow prefix length to be a negative value."); + } catch (IllegalArgumentException e) { + } + // test invalid size values + try { + builder.size(0); + fail("Size must be a positive value."); + } catch (IllegalArgumentException e) { + } + try { + builder.size(-1); + fail("Size must be a positive value."); + } catch (IllegalArgumentException e) { + } + // null values not allowed for enums + try { + builder.sort(null); + fail("Should not allow setting a null sort value."); + } catch (NullPointerException e) { + } + try { + builder.stringDistance(null); + fail("Should not allow setting a null string distance value."); + } catch (NullPointerException e) { + } + try { + builder.suggestMode(null); + fail("Should not allow setting a null suggest mode value."); + } catch (NullPointerException e) { + } + } + + public void testDefaultValuesSet() { + TermSuggestionBuilder builder = new TermSuggestionBuilder(randomAsciiOfLength(10)); + assertThat(builder.accuracy(), notNullValue()); + assertThat(builder.maxEdits(), notNullValue()); + assertThat(builder.maxInspections(), notNullValue()); + assertThat(builder.maxTermFreq(), notNullValue()); + assertThat(builder.minDocFreq(), notNullValue()); + assertThat(builder.minWordLength(), notNullValue()); + assertThat(builder.prefixLength(), notNullValue()); + assertThat(builder.sort(), notNullValue()); + assertThat(builder.stringDistance(), notNullValue()); + assertThat(builder.suggestMode(), notNullValue()); + } + +} diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java index 6360a444a23f..d846ff473071 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java @@ -27,6 +27,8 @@ import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion; import static org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.candidateGenerator; +import static org.elasticsearch.search.suggest.SuggestionBuilder.SortBy; +import static org.elasticsearch.search.suggest.SuggestionBuilder.SuggestMode; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestion; @@ -100,7 +102,7 @@ public class SuggestSearchTests extends ESIntegTestCase { refresh(); TermSuggestionBuilder termSuggest = termSuggestion("test") - .suggestMode("always") // Always, otherwise the results can vary between requests. + .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("abcd") .field("text"); logger.info("--> run suggestions with one index"); @@ -114,7 +116,7 @@ public class SuggestSearchTests extends ESIntegTestCase { index("test_1", "type1", "4", "text", "ab cc"); refresh(); termSuggest = termSuggestion("test") - .suggestMode("always") // Always, otherwise the results can vary between requests. + .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("ab cd") .minWordLength(1) .field("text"); @@ -141,7 +143,7 @@ public class SuggestSearchTests extends ESIntegTestCase { refresh(); termSuggest = termSuggestion("test") - .suggestMode("always") // Always, otherwise the results can vary between requests. + .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("ab cd") .minWordLength(1) .field("text"); @@ -161,7 +163,7 @@ public class SuggestSearchTests extends ESIntegTestCase { termSuggest = termSuggestion("test") - .suggestMode("always") // Always, otherwise the results can vary between requests. + .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("ABCD") .minWordLength(1) .field("text"); @@ -241,7 +243,7 @@ public class SuggestSearchTests extends ESIntegTestCase { assertThat("didn't ask for suggestions but got some", search.getSuggest(), nullValue()); TermSuggestionBuilder termSuggestion = termSuggestion("test") - .suggestMode("always") // Always, otherwise the results can vary between requests. + .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("abcd") .field("text") .size(10); @@ -316,7 +318,7 @@ public class SuggestSearchTests extends ESIntegTestCase { assertThat("didn't ask for suggestions but got some", search.getSuggest(), nullValue()); TermSuggestionBuilder termSuggest = termSuggestion("test") - .suggestMode("always") // Always, otherwise the results can vary between requests. + .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("abcd") .field("text"); Suggest suggest = searchSuggest( termSuggest); @@ -336,7 +338,7 @@ public class SuggestSearchTests extends ESIntegTestCase { refresh(); TermSuggestionBuilder termSuggest = termSuggestion("test") - .suggestMode("always") // Always, otherwise the results can vary between requests. + .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("abcd") .field("text"); Suggest suggest = searchSuggest( termSuggest); @@ -361,13 +363,13 @@ public class SuggestSearchTests extends ESIntegTestCase { Suggest suggest = searchSuggest( termSuggestion("size1") .size(1).text("prefix_abcd").maxTermFreq(10).prefixLength(1).minDocFreq(0) - .field("field1").suggestMode("always"), + .field("field1").suggestMode(SuggestMode.ALWAYS), termSuggestion("field2") .field("field2").text("prefix_eeeh prefix_efgh") - .maxTermFreq(10).minDocFreq(0).suggestMode("always"), + .maxTermFreq(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS), termSuggestion("accuracy") - .field("field2").text("prefix_efgh").setAccuracy(1f) - .maxTermFreq(10).minDocFreq(0).suggestMode("always")); + .field("field2").text("prefix_efgh").accuracy(1f) + .maxTermFreq(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS)); assertSuggestion(suggest, 0, "size1", "prefix_aacd"); assertThat(suggest.getSuggestion("field2").getEntries().get(0).getText().string(), equalTo("prefix_eeeh")); assertSuggestion(suggest, 0, "field2", "prefix_efgh"); @@ -403,15 +405,15 @@ public class SuggestSearchTests extends ESIntegTestCase { Suggest suggest = searchSuggest( "prefix_abcd", termSuggestion("size3SortScoreFirst") - .size(3).minDocFreq(0).field("field1").suggestMode("always"), + .size(3).minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS), termSuggestion("size10SortScoreFirst") - .size(10).minDocFreq(0).field("field1").suggestMode("always").shardSize(50), + .size(10).minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS).shardSize(50), termSuggestion("size3SortScoreFirstMaxEdits1") .maxEdits(1) - .size(10).minDocFreq(0).field("field1").suggestMode("always"), + .size(10).minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS), termSuggestion("size10SortFrequencyFirst") - .size(10).sort("frequency").shardSize(1000) - .minDocFreq(0).field("field1").suggestMode("always")); + .size(10).sort(SortBy.FREQUENCY).shardSize(1000) + .minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS)); // The commented out assertions fail sometimes because suggestions are based off of shard frequencies instead of index frequencies. assertSuggestion(suggest, 0, "size3SortScoreFirst", "prefix_aacd", "prefix_abcc", "prefix_accd"); @@ -784,7 +786,7 @@ public class SuggestSearchTests extends ESIntegTestCase { Suggest suggest = searchSuggest( "foobar", termSuggestion("simple") - .size(10).minDocFreq(0).field("field1").suggestMode("always")); + .size(10).minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS)); ElasticsearchAssertions.assertSuggestionSize(suggest, 0, 3, "simple"); } From 96d7fc858f88f93a18addada4c84b82555149487 Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Wed, 3 Feb 2016 12:42:03 -0500 Subject: [PATCH 009/320] Fixed a compiler error with the SuggestSearchTests --- .../org/elasticsearch/messy/tests/SuggestSearchTests.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java index d846ff473071..d66ee0a6b3c9 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java @@ -27,8 +27,6 @@ import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion; import static org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.candidateGenerator; -import static org.elasticsearch.search.suggest.SuggestionBuilder.SortBy; -import static org.elasticsearch.search.suggest.SuggestionBuilder.SuggestMode; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestion; @@ -75,6 +73,8 @@ import org.elasticsearch.search.suggest.SuggestionBuilder; import org.elasticsearch.search.suggest.phrase.DirectCandidateGeneratorBuilder; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; +import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SortBy; +import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; @@ -102,7 +102,7 @@ public class SuggestSearchTests extends ESIntegTestCase { refresh(); TermSuggestionBuilder termSuggest = termSuggestion("test") - .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. + .suggestMode(TermSuggestionBuilder.SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("abcd") .field("text"); logger.info("--> run suggestions with one index"); From 245022f95b1a1cfb98a4eca38392f30a56b002e9 Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Wed, 3 Feb 2016 13:22:39 -0500 Subject: [PATCH 010/320] Fixed JSON parsing for the Term Suggester Ensures that the clients' serialization of the sort and string edit distance enumeration values as upper case are correctly normalized when parsing the incoming JSON. --- .../java/org/elasticsearch/search/suggest/SuggestUtils.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java index b9f2e29321f2..595806c8acb3 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java @@ -171,6 +171,7 @@ public final class SuggestUtils { } public static Suggest.Suggestion.Sort resolveSort(String sortVal) { + sortVal = sortVal.toLowerCase(Locale.US); if ("score".equals(sortVal)) { return Suggest.Suggestion.Sort.SCORE; } else if ("frequency".equals(sortVal)) { @@ -181,6 +182,7 @@ public final class SuggestUtils { } public static StringDistance resolveDistance(String distanceVal) { + distanceVal = distanceVal.toLowerCase(Locale.US); if ("internal".equals(distanceVal)) { return DirectSpellChecker.INTERNAL_LEVENSHTEIN; } else if ("damerau_levenshtein".equals(distanceVal) || "damerauLevenshtein".equals(distanceVal)) { From b108cb84bcab01c2edfb5ae7ffc1bbd862d933f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 5 Feb 2016 00:23:27 +0100 Subject: [PATCH 011/320] Comply with new 140 column per line checkstyle --- .../suggest/AbstractSuggestionBuilderTestCase.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 77aada31a46d..6f01df4b7006 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -108,14 +108,17 @@ public abstract class AbstractSuggestionBuilderTestCase Date: Thu, 28 Jan 2016 20:04:15 +0100 Subject: [PATCH 012/320] Suggest: Add parsing from xContent to PhraseSuggestionBuilder For the ongoing search refactoring (#10217) the PhraseSuggestionBuilder gets a way of parsing from xContent that will eventually replace the current SuggestParseElement. This PR adds the fromXContent method to the PhraseSuggestionBuilder and also adds parsing code for the common suggestion parameters to SuggestionBuilder. Also adding links from the Suggester implementations registeres in the Suggesters registry to the corresponding prototype that is going to be used for parsing once the refactoring is done and we switch from parsing on shard to parsing on coordinating node. --- .../resources/checkstyle_suppressions.xml | 6 - .../search/suggest/SuggestParseElement.java | 1 - .../search/suggest/Suggester.java | 10 + .../search/suggest/Suggesters.java | 12 + .../search/suggest/SuggestionBuilder.java | 55 +++- .../completion/CompletionSuggester.java | 10 +- .../CompletionSuggestionBuilder.java | 8 +- .../suggest/phrase/PhraseSuggester.java | 30 +- .../phrase/PhraseSuggestionBuilder.java | 275 ++++++++++++++---- .../phrase/PhraseSuggestionContext.java | 28 +- .../search/suggest/term/TermSuggester.java | 11 +- .../suggest/term/TermSuggestionBuilder.java | 8 +- .../rescore/QueryRescoreBuilderTests.java | 20 +- .../AbstractSuggestionBuilderTestCase.java | 42 +++ .../search/suggest/CustomSuggester.java | 5 + .../suggest/CustomSuggesterSearchIT.java | 12 +- .../phrase/PhraseSuggestionBuilderTests.java | 18 +- .../phrase/SmoothingModelTestCase.java | 54 ++-- .../term/TermSuggestionBuilderTests.java | 14 +- 19 files changed, 481 insertions(+), 138 deletions(-) diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 69ecb69d32a4..d2beae4bdffe 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -916,7 +916,6 @@ - @@ -927,12 +926,9 @@ - - - @@ -1446,7 +1442,6 @@ - @@ -1462,7 +1457,6 @@ - diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java index a8a4e9ec26bb..cf6b391ec631 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java @@ -123,7 +123,6 @@ public final class SuggestParseElement implements SearchParseElement { SuggestUtils.verifySuggestion(mapperService, globalText, suggestionContext); suggestionSearchContext.addSuggestion(suggestionName, suggestionContext); } - return suggestionSearchContext; } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/Suggester.java b/core/src/main/java/org/elasticsearch/search/suggest/Suggester.java index 7b3f7bdb89f4..dffef3e1cf55 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/Suggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/Suggester.java @@ -29,8 +29,18 @@ public abstract class Suggester> innerExecute(String name, T suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException; + /** + * link the suggester to its corresponding {@link SuggestContextParser} + * TODO: This method should eventually be removed by {@link #getBuilderPrototype()} once + * we don't directly parse from xContent to the SuggestionContext any more + */ public abstract SuggestContextParser getContextParser(); + /** + * link the suggester to its corresponding {@link SuggestionBuilder} + */ + public abstract SuggestionBuilder getBuilderPrototype(); + public Suggest.Suggestion> execute(String name, T suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException { // #3469 We want to ignore empty shards diff --git a/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java b/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java index af54e5dfd866..c26649f63881 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java @@ -64,4 +64,16 @@ public final class Suggesters extends ExtensionPoint.ClassMap { public Suggester get(String type) { return parsers.get(type); } + + public SuggestionBuilder getSuggestionPrototype(String suggesterName) { + Suggester suggester = parsers.get(suggesterName); + if (suggester == null) { + throw new IllegalArgumentException("suggester with name [" + suggesterName + "] not supported"); + } + SuggestionBuilder suggestParser = suggester.getBuilderPrototype(); + if (suggestParser == null) { + throw new IllegalArgumentException("suggester with name [" + suggesterName + "] not supported"); + } + return suggestParser; + } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java index 59304fdd578c..1fdb38df88f0 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java @@ -21,10 +21,13 @@ package org.elasticsearch.search.suggest; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryParseContext; import java.io.IOException; import java.util.Objects; @@ -138,12 +141,62 @@ public abstract class SuggestionBuilder> extends return builder; } + protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException; + + public static SuggestionBuilder fromXContent(QueryParseContext parseContext, String suggestionName, Suggesters suggesters) + throws IOException { + XContentParser parser = parseContext.parser(); + ParseFieldMatcher parsefieldMatcher = parseContext.parseFieldMatcher(); + XContentParser.Token token; + String fieldName = null; + String suggestText = null; + String prefix = null; + String regex = null; + SuggestionBuilder suggestionBuilder = null; + + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + fieldName = parser.currentName(); + } else if (token.isValue()) { + if (parsefieldMatcher.match(fieldName, TEXT_FIELD)) { + suggestText = parser.text(); + } else if (parsefieldMatcher.match(fieldName, PREFIX_FIELD)) { + prefix = parser.text(); + } else if (parsefieldMatcher.match(fieldName, REGEX_FIELD)) { + regex = parser.text(); + } else { + throw new IllegalArgumentException("[suggestion] does not support [" + fieldName + "]"); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (suggestionName == null) { + throw new IllegalArgumentException("Suggestion must have name"); + } + SuggestionBuilder suggestParser = suggesters.getSuggestionPrototype(fieldName); + if (suggestParser == null) { + throw new IllegalArgumentException("Suggester[" + fieldName + "] not supported"); + } + suggestionBuilder = suggestParser.innerFromXContent(parseContext, suggestionName); + } + } + if (suggestText != null) { + suggestionBuilder.text(suggestText); + } + if (prefix != null) { + suggestionBuilder.prefix(prefix); + } + if (regex != null) { + suggestionBuilder.regex(regex); + } + return suggestionBuilder; + } + + protected abstract SuggestionBuilder innerFromXContent(QueryParseContext parseContext, String name) throws IOException; + private String getSuggesterName() { //default impl returns the same as writeable name, but we keep the distinction between the two just to make sure return getWriteableName(); } - protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException; /** * Sets from what field to fetch the candidate suggestions from. This is an diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index 527a35658c9c..8cd9d386a134 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -38,6 +38,7 @@ import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.Suggester; +import org.elasticsearch.search.suggest.SuggestionBuilder; import java.io.IOException; import java.util.ArrayList; @@ -50,6 +51,7 @@ import java.util.Set; public class CompletionSuggester extends Suggester { + @Override public SuggestContextParser getContextParser() { return new CompletionSuggestParser(this); } @@ -86,7 +88,8 @@ public class CompletionSuggester extends Suggester for (String field : payloadFields) { MappedFieldType payloadFieldType = suggestionContext.getMapperService().fullName(field); if (payloadFieldType != null) { - final AtomicFieldData data = suggestionContext.getIndexFieldDataService().getForField(payloadFieldType).load(subReaderContext); + final AtomicFieldData data = suggestionContext.getIndexFieldDataService().getForField(payloadFieldType) + .load(subReaderContext); final ScriptDocValues scriptValues = data.getScriptValues(); scriptValues.setNextDocId(subDocId); payload.put(field, new ArrayList<>(scriptValues.getValues())); @@ -262,4 +265,9 @@ public class CompletionSuggester extends Suggester } } } + + @Override + public SuggestionBuilder getBuilderPrototype() { + return CompletionSuggestionBuilder.PROTOTYPE; + } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index afa0760e7044..29992c1a077b 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.RegexpFlag; import org.elasticsearch.search.suggest.SuggestionBuilder; import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; @@ -50,7 +51,7 @@ import java.util.Set; public class CompletionSuggestionBuilder extends SuggestionBuilder { public static final CompletionSuggestionBuilder PROTOTYPE = new CompletionSuggestionBuilder("_na_"); // name doesn't matter - final static String SUGGESTION_NAME = "completion"; + static final String SUGGESTION_NAME = "completion"; static final ParseField PAYLOAD_FIELD = new ParseField("payload"); static final ParseField CONTEXTS_FIELD = new ParseField("contexts", "context"); @@ -369,6 +370,11 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder { /* * More Ideas: * - add ability to find whitespace problems -> we can build a poor mans decompounder with our index based on a automaton? - * - add ability to build different error models maybe based on a confusion matrix? + * - add ability to build different error models maybe based on a confusion matrix? * - try to combine a token with its subsequent token to find / detect word splits (optional) * - for this to work we need some way to defined the position length of a candidate * - phonetic filters could be interesting here too for candidate selection */ @Override - public Suggestion> innerExecute(String name, PhraseSuggestionContext suggestion, IndexSearcher searcher, - CharsRefBuilder spare) throws IOException { + public Suggestion> innerExecute(String name, PhraseSuggestionContext suggestion, + IndexSearcher searcher, CharsRefBuilder spare) throws IOException { double realWordErrorLikelihood = suggestion.realworldErrorLikelyhood(); final PhraseSuggestion response = new PhraseSuggestion(name, suggestion.getSize()); final IndexReader indexReader = searcher.getIndexReader(); @@ -84,21 +85,23 @@ public final class PhraseSuggester extends Suggester { DirectSpellChecker directSpellChecker = SuggestUtils.getDirectSpellChecker(generator); Terms terms = MultiFields.getTerms(indexReader, generator.field()); if (terms != null) { - gens.add(new DirectCandidateGenerator(directSpellChecker, generator.field(), generator.suggestMode(), - indexReader, realWordErrorLikelihood, generator.size(), generator.preFilter(), generator.postFilter(), terms)); + gens.add(new DirectCandidateGenerator(directSpellChecker, generator.field(), generator.suggestMode(), + indexReader, realWordErrorLikelihood, generator.size(), generator.preFilter(), generator.postFilter(), terms)); } } final String suggestField = suggestion.getField(); final Terms suggestTerms = MultiFields.getTerms(indexReader, suggestField); if (gens.size() > 0 && suggestTerms != null) { - final NoisyChannelSpellChecker checker = new NoisyChannelSpellChecker(realWordErrorLikelihood, suggestion.getRequireUnigram(), suggestion.getTokenLimit()); + final NoisyChannelSpellChecker checker = new NoisyChannelSpellChecker(realWordErrorLikelihood, suggestion.getRequireUnigram(), + suggestion.getTokenLimit()); final BytesRef separator = suggestion.separator(); - WordScorer wordScorer = suggestion.model().newScorer(indexReader, suggestTerms, suggestField, realWordErrorLikelihood, separator); + WordScorer wordScorer = suggestion.model().newScorer(indexReader, suggestTerms, suggestField, realWordErrorLikelihood, + separator); Result checkerResult; try (TokenStream stream = checker.tokenStream(suggestion.getAnalyzer(), suggestion.getText(), spare, suggestion.getField())) { - checkerResult = checker.getCorrections(stream, new MultiCandidateGeneratorWrapper(suggestion.getShardSize(), - gens.toArray(new CandidateGenerator[gens.size()])), suggestion.maxErrors(), - suggestion.getShardSize(), wordScorer, suggestion.confidence(), suggestion.gramSize()); + checkerResult = checker.getCorrections(stream, + new MultiCandidateGeneratorWrapper(suggestion.getShardSize(), gens.toArray(new CandidateGenerator[gens.size()])), + suggestion.maxErrors(), suggestion.getShardSize(), wordScorer, suggestion.confidence(), suggestion.gramSize()); } PhraseSuggestion.Entry resultEntry = buildResultEntry(suggestion, spare, checkerResult.cutoffScore); @@ -152,10 +155,15 @@ public final class PhraseSuggester extends Suggester { ScriptService scriptService() { return scriptService; } - + @Override public SuggestContextParser getContextParser() { return new PhraseSuggestParser(this); } + @Override + public SuggestionBuilder getBuilderPrototype() { + return PhraseSuggestionBuilder.PROTOTYPE; + } + } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java index f346df1f4420..a0a8e8afba9f 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.search.suggest.phrase; + import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Terms; import org.apache.lucene.util.BytesRef; @@ -51,24 +52,42 @@ import java.util.Set; */ public final class PhraseSuggestionBuilder extends SuggestionBuilder { - static final String SUGGESTION_NAME = "phrase"; + private static final String SUGGESTION_NAME = "phrase"; public static final PhraseSuggestionBuilder PROTOTYPE = new PhraseSuggestionBuilder("_na_"); - private Float maxErrors; - private String separator; - private Float realWordErrorLikelihood; - private Float confidence; - private final Map> generators = new HashMap<>(); + protected static final ParseField MAXERRORS_FIELD = new ParseField("max_errors"); + protected static final ParseField RWE_LIKELIHOOD_FIELD = new ParseField("real_word_error_likelihood"); + protected static final ParseField SEPARATOR_FIELD = new ParseField("separator"); + protected static final ParseField CONFIDENCE_FIELD = new ParseField("confidence"); + protected static final ParseField GENERATORS_FIELD = new ParseField("shard_size"); + protected static final ParseField GRAMSIZE_FIELD = new ParseField("gram_size"); + protected static final ParseField SMOOTHING_MODEL_FIELD = new ParseField("smoothing"); + protected static final ParseField FORCE_UNIGRAM_FIELD = new ParseField("force_unigrams"); + protected static final ParseField TOKEN_LIMIT_FIELD = new ParseField("token_limit"); + protected static final ParseField HIGHLIGHT_FIELD = new ParseField("highlight"); + protected static final ParseField PRE_TAG_FIELD = new ParseField("pre_tag"); + protected static final ParseField POST_TAG_FIELD = new ParseField("post_tag"); + protected static final ParseField COLLATE_FIELD = new ParseField("collate"); + protected static final ParseField COLLATE_QUERY_FIELD = new ParseField("query"); + protected static final ParseField COLLATE_QUERY_PARAMS = new ParseField("params"); + protected static final ParseField COLLATE_QUERY_PRUNE = new ParseField("prune"); + + private float maxErrors = PhraseSuggestionContext.DEFAULT_MAX_ERRORS; + private String separator = PhraseSuggestionContext.DEFAULT_SEPARATOR; + private float realWordErrorLikelihood = PhraseSuggestionContext.DEFAULT_RWE_ERRORLIKELIHOOD; + private float confidence = PhraseSuggestionContext.DEFAULT_CONFIDENCE; + // gramSize needs to be optional although there is a default, if unset parser try to detect and use shingle size private Integer gramSize; - private SmoothingModel model; - private Boolean forceUnigrams; - private Integer tokenLimit; + private boolean forceUnigrams = PhraseSuggestionContext.DEFAULT_REQUIRE_UNIGRAM; + private int tokenLimit = NoisyChannelSpellChecker.DEFAULT_TOKEN_LIMIT; private String preTag; private String postTag; private Template collateQuery; private Map collateParams; - private Boolean collatePrune; + private boolean collatePrune = PhraseSuggestionContext.DEFAULT_COLLATE_PRUNE; + private SmoothingModel model; + private final Map> generators = new HashMap<>(); public PhraseSuggestionBuilder(String name) { super(name); @@ -103,7 +122,10 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder1.0 which corresponds to that only * corrections with at most 1 missspelled term are returned. */ - public PhraseSuggestionBuilder maxErrors(Float maxErrors) { + public PhraseSuggestionBuilder maxErrors(float maxErrors) { + if (maxErrors <= 0.0) { + throw new IllegalArgumentException("max_error must be > 0.0"); + } this.maxErrors = maxErrors; return this; } @@ -120,6 +142,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder0.95 corresponding to 5% or * the real words are misspelled. */ - public PhraseSuggestionBuilder realWordErrorLikelihood(Float realWordErrorLikelihood) { + public PhraseSuggestionBuilder realWordErrorLikelihood(float realWordErrorLikelihood) { + if (realWordErrorLikelihood <= 0.0) { + throw new IllegalArgumentException("real_word_error_likelihood must be > 0.0"); + } this.realWordErrorLikelihood = realWordErrorLikelihood; return this; } /** - * get the {@link #realWordErrorLikelihood(Float)} parameter + * get the {@link #realWordErrorLikelihood(float)} parameter */ public Float realWordErrorLikelihood() { return this.realWordErrorLikelihood; @@ -157,7 +183,10 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder0.0 the top N candidates * are returned. The default is 1.0 */ - public PhraseSuggestionBuilder confidence(Float confidence) { + public PhraseSuggestionBuilder confidence(float confidence) { + if (confidence < 0.0) { + throw new IllegalArgumentException("confidence must be >= 0.0"); + } this.confidence = confidence; return this; } @@ -318,27 +347,15 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder>> entrySet = generators.entrySet(); for (Entry> entry : entrySet) { @@ -350,25 +367,23 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder generators = new ArrayList<>(); - private int gramSize = 1; - private float confidence = 1.0f; + static final boolean DEFAULT_COLLATE_PRUNE = false; + static final boolean DEFAULT_REQUIRE_UNIGRAM = true; + static final float DEFAULT_CONFIDENCE = 1.0f; + static final int DEFAULT_GRAM_SIZE = 1; + static final float DEFAULT_RWE_ERRORLIKELIHOOD = 0.95f; + static final float DEFAULT_MAX_ERRORS = 0.5f; + static final String DEFAULT_SEPARATOR = " "; + + private float maxErrors = DEFAULT_MAX_ERRORS; + private BytesRef separator = new BytesRef(DEFAULT_SEPARATOR); + private float realworldErrorLikelihood = DEFAULT_RWE_ERRORLIKELIHOOD; + private int gramSize = DEFAULT_GRAM_SIZE; + private float confidence = DEFAULT_CONFIDENCE; private int tokenLimit = NoisyChannelSpellChecker.DEFAULT_TOKEN_LIMIT; + private boolean requireUnigram = DEFAULT_REQUIRE_UNIGRAM; private BytesRef preTag; private BytesRef postTag; private CompiledScript collateQueryScript; - private CompiledScript collateFilterScript; + private boolean prune = DEFAULT_COLLATE_PRUNE; + private List generators = new ArrayList<>(); private Map collateScriptParams = new HashMap<>(1); - private WordScorer.WordScorerFactory scorer; - private boolean requireUnigram = true; - private boolean prune = false; - public PhraseSuggestionContext(Suggester suggester) { super(suggester); } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java index 34cd3ad4d563..e67e619bf516 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.text.Text; import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestUtils; import org.elasticsearch.search.suggest.Suggester; +import org.elasticsearch.search.suggest.SuggestionBuilder; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import java.io.IOException; @@ -40,7 +41,8 @@ import java.util.List; public final class TermSuggester extends Suggester { @Override - public TermSuggestion innerExecute(String name, TermSuggestionContext suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException { + public TermSuggestion innerExecute(String name, TermSuggestionContext suggestion, IndexSearcher searcher, CharsRefBuilder spare) + throws IOException { DirectSpellChecker directSpellChecker = SuggestUtils.getDirectSpellChecker(suggestion.getDirectSpellCheckerSettings()); final IndexReader indexReader = searcher.getIndexReader(); TermSuggestion response = new TermSuggestion( @@ -76,7 +78,7 @@ public final class TermSuggester extends Suggester { @Override public void nextToken() { Term term = new Term(field, BytesRef.deepCopyOf(fillBytesRef(new BytesRefBuilder()))); - result.add(new Token(term, offsetAttr.startOffset(), offsetAttr.endOffset())); + result.add(new Token(term, offsetAttr.startOffset(), offsetAttr.endOffset())); } }, spare); return result; @@ -96,4 +98,9 @@ public final class TermSuggester extends Suggester { } + @Override + public SuggestionBuilder getBuilderPrototype() { + return TermSuggestionBuilder.PROTOTYPE; + } + } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java index bd318e1a0134..6f694f735414 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.search.suggest.SuggestionBuilder; import java.io.IOException; @@ -45,7 +46,7 @@ import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAUL public class TermSuggestionBuilder extends SuggestionBuilder { public static final TermSuggestionBuilder PROTOTYPE = new TermSuggestionBuilder("_na_"); // name doesn't matter - static final String SUGGESTION_NAME = "term"; + private static final String SUGGESTION_NAME = "term"; private SuggestMode suggestMode = SuggestMode.MISSING; private Float accuracy = DEFAULT_ACCURACY; @@ -341,6 +342,11 @@ public class TermSuggestionBuilder extends SuggestionBuilder thirdBuilder = serializedCopy(secondBuilder); assertTrue("rescore builder is not equal to self", thirdBuilder.equals(thirdBuilder)); assertTrue("rescore builder is not equal to its copy", secondBuilder.equals(thirdBuilder)); - assertThat("rescore builder copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(thirdBuilder.hashCode())); + assertThat("rescore builder copy's hashcode is different from original hashcode", secondBuilder.hashCode(), + equalTo(thirdBuilder.hashCode())); assertTrue("equals is not transitive", firstBuilder.equals(thirdBuilder)); - assertThat("rescore builder copy's hashcode is different from original hashcode", firstBuilder.hashCode(), equalTo(thirdBuilder.hashCode())); + assertThat("rescore builder copy's hashcode is different from original hashcode", firstBuilder.hashCode(), + equalTo(thirdBuilder.hashCode())); assertTrue("equals is not symmetric", thirdBuilder.equals(secondBuilder)); assertTrue("equals is not symmetric", thirdBuilder.equals(firstBuilder)); } @@ -160,7 +163,8 @@ public class QueryRescoreBuilderTests extends ESTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), indexSettings); // shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer - QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, null, indicesQueriesRegistry) { + QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, null, + indicesQueriesRegistry) { @Override public MappedFieldType fieldMapper(String name) { StringFieldMapper.Builder builder = MapperBuilders.stringField(name); @@ -170,10 +174,11 @@ public class QueryRescoreBuilderTests extends ESTestCase { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { RescoreBuilder rescoreBuilder = randomRescoreBuilder(); - QueryRescoreContext rescoreContext = (QueryRescoreContext) rescoreBuilder.build(mockShardContext); + QueryRescoreContext rescoreContext = rescoreBuilder.build(mockShardContext); XContentParser parser = createParser(rescoreBuilder); - QueryRescoreContext parsedRescoreContext = (QueryRescoreContext) new RescoreParseElement().parseSingleRescoreContext(parser, mockShardContext); + QueryRescoreContext parsedRescoreContext = (QueryRescoreContext) new RescoreParseElement().parseSingleRescoreContext(parser, + mockShardContext); assertNotSame(rescoreContext, parsedRescoreContext); assertEquals(rescoreContext.window(), parsedRescoreContext.window()); assertEquals(rescoreContext.query(), parsedRescoreContext.query()); @@ -316,7 +321,8 @@ public class QueryRescoreBuilderTests extends ESTestCase { * create random shape that is put under test */ public static org.elasticsearch.search.rescore.QueryRescorerBuilder randomRescoreBuilder() { - QueryBuilder queryBuilder = new MatchAllQueryBuilder().boost(randomFloat()).queryName(randomAsciiOfLength(20)); + QueryBuilder queryBuilder = new MatchAllQueryBuilder().boost(randomFloat()) + .queryName(randomAsciiOfLength(20)); org.elasticsearch.search.rescore.QueryRescorerBuilder rescorer = new org.elasticsearch.search.rescore.QueryRescorerBuilder(queryBuilder); if (randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 6f01df4b7006..3c5797f1e4a8 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -19,10 +19,19 @@ package org.elasticsearch.search.suggest; +import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; @@ -31,6 +40,7 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import java.io.IOException; +import java.util.Collections; import java.util.function.Consumer; import java.util.function.Supplier; @@ -41,6 +51,7 @@ public abstract class AbstractSuggestionBuilderTestCase secondSuggestionBuilder = SuggestionBuilder.fromXContent(context, suggestionBuilder.name(), suggesters); + assertNotSame(suggestionBuilder, secondSuggestionBuilder); + assertEquals(suggestionBuilder, secondSuggestionBuilder); + assertEquals(suggestionBuilder.hashCode(), secondSuggestionBuilder.hashCode()); + } + } + private SB mutate(SB firstBuilder) throws IOException { SB mutation = serializedCopy(firstBuilder); assertNotSame(mutation, firstBuilder); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java index 35d495272cac..4dbae08080aa 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java @@ -71,4 +71,9 @@ public class CustomSuggester extends Suggester getBuilderPrototype() { + return CustomSuggesterSearchIT.CustomSuggestionBuilder.PROTOTYPE; + } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java index 80eb4d7b7d44..b3af0eee142e 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -77,7 +78,9 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { assertThat(suggestions.get(1).getText().string(), is(String.format(Locale.ROOT, "%s-%s-%s-123", randomText, randomField, randomSuffix))); } - class CustomSuggestionBuilder extends SuggestionBuilder { + static class CustomSuggestionBuilder extends SuggestionBuilder { + + public final static CustomSuggestionBuilder PROTOTYPE = new CustomSuggestionBuilder("_na_", "_na_", "_na_"); private String randomField; private String randomSuffix; @@ -122,6 +125,13 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { return Objects.hash(randomField, randomSuffix); } + @Override + protected CustomSuggestionBuilder innerFromXContent(QueryParseContext parseContext, String name) + throws IOException { + // TODO some parsing + return new CustomSuggestionBuilder(name, randomField, randomSuffix); + } + } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java index 71a202a6b21a..3cf65722a5dc 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java @@ -47,8 +47,19 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC maybeSet(testBuilder::separator, randomAsciiOfLengthBetween(1, 10)); maybeSet(testBuilder::realWordErrorLikelihood, randomFloat()); maybeSet(testBuilder::confidence, randomFloat()); - maybeSet(testBuilder::collatePrune, randomBoolean()); maybeSet(testBuilder::collateQuery, randomAsciiOfLengthBetween(3, 20)); + // collate query prune and parameters will only be used when query is set + if (testBuilder.collateQuery() != null) { + maybeSet(testBuilder::collatePrune, randomBoolean()); + if (randomBoolean()) { + Map collateParams = new HashMap<>(); + int numParams = randomIntBetween(1, 5); + for (int i = 0; i < numParams; i++) { + collateParams.put(randomAsciiOfLength(5), randomAsciiOfLength(5)); + } + testBuilder.collateParams(collateParams ); + } + } if (randomBoolean()) { // preTag, postTag testBuilder.highlight(randomAsciiOfLengthBetween(3, 20), randomAsciiOfLengthBetween(3, 20)); @@ -56,11 +67,6 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC maybeSet(testBuilder::gramSize, randomIntBetween(1, 5)); maybeSet(testBuilder::forceUnigrams, randomBoolean()); maybeSet(testBuilder::tokenLimit, randomInt(20)); - if (randomBoolean()) { - Map collateParams = new HashMap<>(); - collateParams.put(randomAsciiOfLength(5), randomAsciiOfLength(5)); - testBuilder.collateParams(collateParams ); - } if (randomBoolean()) { testBuilder.smoothingModel(randomSmoothingModel()); } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java index e4a8ae72b911..4672d9db9777 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java @@ -97,7 +97,8 @@ public abstract class SmoothingModelTestCase extends ESTestCase { * Test that creates new smoothing model from a random test smoothing model and checks both for equality */ public void testFromXContent() throws IOException { - QueryParseContext context = new QueryParseContext(new IndicesQueriesRegistry(Settings.settingsBuilder().build(), Collections.emptyMap())); + QueryParseContext context = new QueryParseContext( + new IndicesQueriesRegistry(Settings.settingsBuilder().build(), Collections.emptyMap())); context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY)); SmoothingModel testModel = createTestModel(); @@ -113,7 +114,7 @@ public abstract class SmoothingModelTestCase extends ESTestCase { parser.nextToken(); // go to start token, real parsing would do that in the outer element parser SmoothingModel prototype = (SmoothingModel) namedWriteableRegistry.getPrototype(SmoothingModel.class, testModel.getWriteableName()); - SmoothingModel parsedModel = prototype.fromXContent(context); + SmoothingModel parsedModel = prototype.innerFromXContent(context); assertNotSame(testModel, parsedModel); assertEquals(testModel, parsedModel); assertEquals(testModel.hashCode(), parsedModel.hashCode()); @@ -134,7 +135,8 @@ public abstract class SmoothingModelTestCase extends ESTestCase { writer.addDocument(doc); DirectoryReader ir = DirectoryReader.open(writer, false); - WordScorer wordScorer = testModel.buildWordScorerFactory().newScorer(ir, MultiFields.getTerms(ir , "field"), "field", 0.9d, BytesRefs.toBytesRef(" ")); + WordScorer wordScorer = testModel.buildWordScorerFactory().newScorer(ir, MultiFields.getTerms(ir, "field"), "field", 0.9d, + BytesRefs.toBytesRef(" ")); assertWordScorer(wordScorer, testModel); } @@ -159,35 +161,39 @@ public abstract class SmoothingModelTestCase extends ESTestCase { */ @SuppressWarnings("unchecked") public void testEqualsAndHashcode() throws IOException { - SmoothingModel firstModel = createTestModel(); - assertFalse("smoothing model is equal to null", firstModel.equals(null)); - assertFalse("smoothing model is equal to incompatible type", firstModel.equals("")); - assertTrue("smoothing model is not equal to self", firstModel.equals(firstModel)); - assertThat("same smoothing model's hashcode returns different values if called multiple times", firstModel.hashCode(), - equalTo(firstModel.hashCode())); - assertThat("different smoothing models should not be equal", createMutation(firstModel), not(equalTo(firstModel))); + SmoothingModel firstModel = createTestModel(); + assertFalse("smoothing model is equal to null", firstModel.equals(null)); + assertFalse("smoothing model is equal to incompatible type", firstModel.equals("")); + assertTrue("smoothing model is not equal to self", firstModel.equals(firstModel)); + assertThat("same smoothing model's hashcode returns different values if called multiple times", firstModel.hashCode(), + equalTo(firstModel.hashCode())); + assertThat("different smoothing models should not be equal", createMutation(firstModel), not(equalTo(firstModel))); - SmoothingModel secondModel = copyModel(firstModel); - assertTrue("smoothing model is not equal to self", secondModel.equals(secondModel)); - assertTrue("smoothing model is not equal to its copy", firstModel.equals(secondModel)); - assertTrue("equals is not symmetric", secondModel.equals(firstModel)); - assertThat("smoothing model copy's hashcode is different from original hashcode", secondModel.hashCode(), equalTo(firstModel.hashCode())); + SmoothingModel secondModel = copyModel(firstModel); + assertTrue("smoothing model is not equal to self", secondModel.equals(secondModel)); + assertTrue("smoothing model is not equal to its copy", firstModel.equals(secondModel)); + assertTrue("equals is not symmetric", secondModel.equals(firstModel)); + assertThat("smoothing model copy's hashcode is different from original hashcode", secondModel.hashCode(), + equalTo(firstModel.hashCode())); - SmoothingModel thirdModel = copyModel(secondModel); - assertTrue("smoothing model is not equal to self", thirdModel.equals(thirdModel)); - assertTrue("smoothing model is not equal to its copy", secondModel.equals(thirdModel)); - assertThat("smoothing model copy's hashcode is different from original hashcode", secondModel.hashCode(), equalTo(thirdModel.hashCode())); - assertTrue("equals is not transitive", firstModel.equals(thirdModel)); - assertThat("smoothing model copy's hashcode is different from original hashcode", firstModel.hashCode(), equalTo(thirdModel.hashCode())); - assertTrue("equals is not symmetric", thirdModel.equals(secondModel)); - assertTrue("equals is not symmetric", thirdModel.equals(firstModel)); + SmoothingModel thirdModel = copyModel(secondModel); + assertTrue("smoothing model is not equal to self", thirdModel.equals(thirdModel)); + assertTrue("smoothing model is not equal to its copy", secondModel.equals(thirdModel)); + assertThat("smoothing model copy's hashcode is different from original hashcode", secondModel.hashCode(), + equalTo(thirdModel.hashCode())); + assertTrue("equals is not transitive", firstModel.equals(thirdModel)); + assertThat("smoothing model copy's hashcode is different from original hashcode", firstModel.hashCode(), + equalTo(thirdModel.hashCode())); + assertTrue("equals is not symmetric", thirdModel.equals(secondModel)); + assertTrue("equals is not symmetric", thirdModel.equals(firstModel)); } static SmoothingModel copyModel(SmoothingModel original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { - SmoothingModel prototype = (SmoothingModel) namedWriteableRegistry.getPrototype(SmoothingModel.class, original.getWriteableName()); + SmoothingModel prototype = (SmoothingModel) namedWriteableRegistry.getPrototype(SmoothingModel.class, + original.getWriteableName()); return prototype.readFrom(in); } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java index a56f16b39f83..4a2cf4f3c1e0 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java @@ -20,12 +20,12 @@ package org.elasticsearch.search.suggest.term; import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; +import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SortBy; +import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.StringDistanceImpl; +import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; import java.io.IOException; -import static org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SortBy; -import static org.elasticsearch.search.suggest.term.TermSuggestionBuilder.StringDistanceImpl; -import static org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; import static org.hamcrest.Matchers.notNullValue; /** @@ -33,6 +33,14 @@ import static org.hamcrest.Matchers.notNullValue; */ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCase { + /** + * creates random suggestion builder, renders it to xContent and back to new instance that should be equal to original + */ + @Override + public void testFromXContent() throws IOException { + // skip for now + } + @Override protected TermSuggestionBuilder randomSuggestionBuilder() { TermSuggestionBuilder testBuilder = new TermSuggestionBuilder(randomAsciiOfLength(10)); From 9abf00b665cdcb62aaee1805a24172ae3b15d0f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 8 Feb 2016 15:24:06 +0100 Subject: [PATCH 013/320] Add fromXContent method to SuggestBuilder --- .../elasticsearch/search/SearchService.java | 1 + .../search/suggest/SuggestBuilder.java | 54 +++++++- .../phrase/PhraseSuggestionBuilder.java | 6 +- .../search/suggest/SuggestBuilderTests.java | 118 ++++++++++++++++++ .../suggest/completion/WritableTestCase.java | 115 +++++++++++++++++ .../phrase/PhraseSuggestionBuilderTests.java | 4 + 6 files changed, 293 insertions(+), 5 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java create mode 100644 core/src/test/java/org/elasticsearch/search/suggest/completion/WritableTestCase.java diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 0da838a799bd..29fa5555fed3 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -20,6 +20,7 @@ package org.elasticsearch.search; import com.carrotsearch.hppc.ObjectFloatHashMap; + import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.TopDocs; import org.elasticsearch.ExceptionsHelper; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java index 8037646f1526..d16e8e1d84aa 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java @@ -20,10 +20,15 @@ package org.elasticsearch.search.suggest; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryParseContext; import java.io.IOException; import java.util.ArrayList; @@ -39,6 +44,9 @@ import java.util.Objects; */ public class SuggestBuilder extends ToXContentToBytes implements Writeable { + public static final SuggestBuilder PROTOTYPE = new SuggestBuilder(); + protected static final ParseField GLOBAL_TEXT_FIELD = new ParseField("text"); + private String globalText; private final List> suggestions = new ArrayList<>(); @@ -58,7 +66,14 @@ public class SuggestBuilder extends ToXContentToBytes implements WriteableSuggestions are added, is the same as in the response. */ public SuggestBuilder addSuggestion(SuggestionBuilder suggestion) { @@ -66,6 +81,13 @@ public class SuggestBuilder extends ToXContentToBytes implements WriteableSuggestions that were added to the globat {@link SuggestBuilder} + */ + public List> getSuggestions() { + return suggestions; + } + /** * Returns all suggestions with the defined names. */ @@ -86,6 +108,35 @@ public class SuggestBuilder extends ToXContentToBytes implements Writeable { + + + @Override + protected NamedWriteableRegistry provideNamedWritbaleRegistry() { + NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); + namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, TermSuggestionBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, PhraseSuggestionBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, CompletionSuggestionBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(SmoothingModel.class, Laplace.PROTOTYPE); + namedWriteableRegistry.registerPrototype(SmoothingModel.class, LinearInterpolation.PROTOTYPE); + namedWriteableRegistry.registerPrototype(SmoothingModel.class, StupidBackoff.PROTOTYPE); + return namedWriteableRegistry; + } + + /** + * creates random suggestion builder, renders it to xContent and back to new instance that should be equal to original + */ + public void testFromXContent() throws IOException { + Suggesters suggesters = new Suggesters(Collections.emptyMap(), null, null); + QueryParseContext context = new QueryParseContext(null); + context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY)); + for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) { + SuggestBuilder suggestBuilder = createTestModel(); + XContentBuilder xContentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + if (randomBoolean()) { + xContentBuilder.prettyPrint(); + } + suggestBuilder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); + XContentParser parser = XContentHelper.createParser(xContentBuilder.bytes()); + context.reset(parser); + parser.nextToken(); + + SuggestBuilder secondSuggestBuilder = SuggestBuilder.fromXContent(context, suggesters); + assertNotSame(suggestBuilder, secondSuggestBuilder); + assertEquals(suggestBuilder, secondSuggestBuilder); + assertEquals(suggestBuilder.hashCode(), secondSuggestBuilder.hashCode()); + } + } + + @Override + protected SuggestBuilder createTestModel() { + SuggestBuilder suggestBuilder = new SuggestBuilder(); + if (randomBoolean()) { + suggestBuilder.setText(randomAsciiOfLengthBetween(5, 50)); + } + int numberOfSuggestions = randomIntBetween(0, 5); + for (int i = 0; i < numberOfSuggestions; i++) { + suggestBuilder.addSuggestion(PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder()); + } + return suggestBuilder; + } + + @Override + protected SuggestBuilder createMutation(SuggestBuilder original) throws IOException { + SuggestBuilder mutation = new SuggestBuilder().setText(original.getText()); + for (SuggestionBuilder suggestionBuilder : original.getSuggestions()) { + mutation.addSuggestion(suggestionBuilder); + } + if (randomBoolean()) { + mutation.setText(randomAsciiOfLengthBetween(5, 60)); + } else { + mutation.addSuggestion(PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder()); + } + return mutation; + } + + @Override + protected SuggestBuilder readFrom(StreamInput in) throws IOException { + return SuggestBuilder.PROTOTYPE.readFrom(in); + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/WritableTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/WritableTestCase.java new file mode 100644 index 000000000000..47b337334259 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/WritableTestCase.java @@ -0,0 +1,115 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.suggest.completion; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; + +/** + * Base class for testing serialization and equality for + * {@link Writeable} models + */ +public abstract class WritableTestCase extends ESTestCase { + + protected static final int NUMBER_OF_RUNS = 20; + + /** + * create random model that is put under test + */ + protected abstract M createTestModel(); + + /** + * mutate the given model so the returned model is different + */ + protected abstract M createMutation(M original) throws IOException; + + /** + * model prototype to read serialized format + */ + protected abstract M readFrom(StreamInput in) throws IOException; + + /** + * Test serialization and deserialization of the tested model. + */ + public void testSerialization() throws IOException { + for (int i = 0; i < NUMBER_OF_RUNS; i++) { + M testModel = createTestModel(); + M deserializedModel = copyModel(testModel); + assertEquals(testModel, deserializedModel); + assertEquals(testModel.hashCode(), deserializedModel.hashCode()); + assertNotSame(testModel, deserializedModel); + } + } + + /** + * Test equality and hashCode properties + */ + @SuppressWarnings("unchecked") + public void testEqualsAndHashcode() throws IOException { + M firstModel = createTestModel(); + String modelName = firstModel.getClass().getSimpleName(); + assertFalse(modelName + " is equal to null", firstModel.equals(null)); + assertFalse(modelName + " is equal to incompatible type", firstModel.equals("")); + assertTrue(modelName + " is not equal to self", firstModel.equals(firstModel)); + assertThat("same "+ modelName + "'s hashcode returns different values if called multiple times", firstModel.hashCode(), + equalTo(firstModel.hashCode())); + assertThat("different " + modelName + " should not be equal", createMutation(firstModel), not(equalTo(firstModel))); + + M secondModel = copyModel(firstModel); + assertTrue(modelName + " is not equal to self", secondModel.equals(secondModel)); + assertTrue(modelName + " is not equal to its copy", firstModel.equals(secondModel)); + assertTrue("equals is not symmetric", secondModel.equals(firstModel)); + assertThat(modelName + " copy's hashcode is different from original hashcode", secondModel.hashCode(), + equalTo(firstModel.hashCode())); + + M thirdModel = copyModel(secondModel); + assertTrue(modelName + " is not equal to self", thirdModel.equals(thirdModel)); + assertTrue(modelName + " is not equal to its copy", secondModel.equals(thirdModel)); + assertThat(modelName + " copy's hashcode is different from original hashcode", secondModel.hashCode(), + equalTo(thirdModel.hashCode())); + assertTrue("equals is not transitive", firstModel.equals(thirdModel)); + assertThat(modelName + " copy's hashcode is different from original hashcode", firstModel.hashCode(), + equalTo(thirdModel.hashCode())); + assertTrue(modelName + " equals is not symmetric", thirdModel.equals(secondModel)); + assertTrue(modelName + " equals is not symmetric", thirdModel.equals(firstModel)); + } + + private M copyModel(M original) throws IOException { + try (BytesStreamOutput output = new BytesStreamOutput()) { + original.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), provideNamedWritbaleRegistry())) { + return readFrom(in); + } + } + } + + protected NamedWriteableRegistry provideNamedWritbaleRegistry() { + return new NamedWriteableRegistry(); + } +} diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java index 3cf65722a5dc..d74719fa6f77 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java @@ -42,6 +42,10 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC @Override protected PhraseSuggestionBuilder randomSuggestionBuilder() { + return randomPhraseSuggestionBuilder(); + } + + public static PhraseSuggestionBuilder randomPhraseSuggestionBuilder() { PhraseSuggestionBuilder testBuilder = new PhraseSuggestionBuilder(randomAsciiOfLength(10)); maybeSet(testBuilder::maxErrors, randomFloat()); maybeSet(testBuilder::separator, randomAsciiOfLengthBetween(1, 10)); From 7ca72542b30f5c8d1c57af207b0b08496fd6552e Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Thu, 4 Feb 2016 18:05:53 -0500 Subject: [PATCH 014/320] Implements the ability to go from x-content to a term suggester. --- .../rest/action/search/RestSearchAction.java | 2 +- .../search/suggest/SuggestUtils.java | 1 + .../suggest/term/TermSuggestionBuilder.java | 123 +++++++++++++----- .../search/suggest/term/SortByTests.java | 10 +- .../suggest/term/StringDistanceImplTests.java | 16 +-- .../search/suggest/term/SuggestModeTests.java | 12 +- .../term/TermSuggestionBuilderTests.java | 8 -- 7 files changed, 108 insertions(+), 64 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 3e91a1c12803..2f395d3457ff 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -265,7 +265,7 @@ public class RestSearchAction extends BaseRestHandler { searchSourceBuilder.suggest(new SuggestBuilder().addSuggestion( termSuggestion(suggestField).field(suggestField) .text(suggestText).size(suggestSize) - .suggestMode(SuggestMode.fromString(suggestMode)))); + .suggestMode(SuggestMode.resolve(suggestMode)))); modified = true; } return modified; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java index 595806c8acb3..989546d50bf9 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java @@ -49,6 +49,7 @@ import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; +import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import java.io.IOException; import java.util.Comparator; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java index 6f694f735414..7625f204c037 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java @@ -19,10 +19,18 @@ package org.elasticsearch.search.suggest.term; +import org.apache.lucene.search.spell.DirectSpellChecker; +import org.apache.lucene.search.spell.JaroWinklerDistance; +import org.apache.lucene.search.spell.LevensteinDistance; +import org.apache.lucene.search.spell.LuceneLevenshteinDistance; +import org.apache.lucene.search.spell.NGramDistance; +import org.apache.lucene.search.spell.StringDistance; +import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.search.suggest.SuggestionBuilder; @@ -37,6 +45,16 @@ import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAUL import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MIN_DOC_FREQ; import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MIN_WORD_LENGTH; import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_PREFIX_LENGTH; +import static org.elasticsearch.search.suggest.SuggestUtils.Fields.ACCURACY; +import static org.elasticsearch.search.suggest.SuggestUtils.Fields.MAX_EDITS; +import static org.elasticsearch.search.suggest.SuggestUtils.Fields.MAX_INSPECTIONS; +import static org.elasticsearch.search.suggest.SuggestUtils.Fields.MAX_TERM_FREQ; +import static org.elasticsearch.search.suggest.SuggestUtils.Fields.MIN_DOC_FREQ; +import static org.elasticsearch.search.suggest.SuggestUtils.Fields.MIN_WORD_LENGTH; +import static org.elasticsearch.search.suggest.SuggestUtils.Fields.PREFIX_LENGTH; +import static org.elasticsearch.search.suggest.SuggestUtils.Fields.SORT; +import static org.elasticsearch.search.suggest.SuggestUtils.Fields.STRING_DISTANCE; +import static org.elasticsearch.search.suggest.SuggestUtils.Fields.SUGGEST_MODE; /** * Defines the actual suggest command. Each command uses the global options @@ -309,42 +327,64 @@ public class TermSuggestionBuilder extends SuggestionBuilder { /** Only suggest terms in the suggest text that aren't in the index. This is the default. */ @@ -428,7 +467,7 @@ public class TermSuggestionBuilder extends SuggestionBuilder { - /** - * creates random suggestion builder, renders it to xContent and back to new instance that should be equal to original - */ - @Override - public void testFromXContent() throws IOException { - // skip for now - } - @Override protected TermSuggestionBuilder randomSuggestionBuilder() { TermSuggestionBuilder testBuilder = new TermSuggestionBuilder(randomAsciiOfLength(10)); From 9c064eec82a48e3d0aa5eae9d262d56f67c77e98 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 9 Feb 2016 23:09:35 +0100 Subject: [PATCH 015/320] Fix test failures in TermSuggestionBuilderTests --- .../search/suggest/term/TermSuggestionBuilder.java | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java index 7625f204c037..1378c362c542 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java @@ -19,12 +19,6 @@ package org.elasticsearch.search.suggest.term; -import org.apache.lucene.search.spell.DirectSpellChecker; -import org.apache.lucene.search.spell.JaroWinklerDistance; -import org.apache.lucene.search.spell.LevensteinDistance; -import org.apache.lucene.search.spell.LuceneLevenshteinDistance; -import org.apache.lucene.search.spell.NGramDistance; -import org.apache.lucene.search.spell.StringDistance; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -545,6 +539,8 @@ public class TermSuggestionBuilder extends SuggestionBuilder Date: Tue, 2 Feb 2016 15:48:27 +0100 Subject: [PATCH 016/320] WIP adding build() method to PhraseSuggestionBuilder --- .../indices/query/IndicesQueriesRegistry.java | 4 +- .../elasticsearch/search/SearchService.java | 4 +- .../search/suggest/SuggestParseElement.java | 3 - .../search/suggest/SuggestUtils.java | 48 +++++- .../search/suggest/Suggesters.java | 2 +- .../search/suggest/SuggestionBuilder.java | 6 +- .../suggest/SuggestionSearchContext.java | 24 +-- .../completion/CompletionSuggester.java | 2 + .../CompletionSuggestionBuilder.java | 9 + .../DirectCandidateGeneratorBuilder.java | 4 +- .../suggest/phrase/PhraseSuggestParser.java | 3 - .../suggest/phrase/PhraseSuggester.java | 12 +- .../phrase/PhraseSuggestionBuilder.java | 111 +++++++++++++ .../phrase/PhraseSuggestionContext.java | 10 ++ .../search/suggest/term/TermSuggester.java | 2 + .../suggest/term/TermSuggestionBuilder.java | 8 +- .../elasticsearch/index/IndexModuleTests.java | 2 +- .../AbstractSuggestionBuilderTestCase.java | 156 ++++++++++++++++-- .../suggest/CustomSuggesterSearchIT.java | 8 + .../phrase/DirectCandidateGeneratorTests.java | 52 +++--- .../phrase/PhraseSuggestionBuilderTests.java | 42 ++++- .../term/TermSuggestionBuilderTests.java | 25 ++- 22 files changed, 440 insertions(+), 97 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java b/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java index a9e90884a686..b0b212d2ab43 100644 --- a/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java +++ b/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java @@ -19,12 +19,12 @@ package org.elasticsearch.indices.query; -import java.util.Map; - import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryParser; +import java.util.Map; + public class IndicesQueriesRegistry extends AbstractComponent { private Map> queryParsers; diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index cf9c0cebce7b..ff6d8897d59a 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -751,7 +751,7 @@ public class SearchService extends AbstractLifecycleComponent imp if (source.rescores() != null) { try { for (RescoreBuilder rescore : source.rescores()) { - context.addRescore(rescore.build(context.getQueryShardContext())); + context.addRescore(rescore.build(queryShardContext)); } } catch (IOException e) { throw new SearchContextException(context, "failed to create RescoreSearchContext", e); @@ -776,7 +776,7 @@ public class SearchService extends AbstractLifecycleComponent imp if (source.highlighter() != null) { HighlightBuilder highlightBuilder = source.highlighter(); try { - context.highlight(highlightBuilder.build(context.getQueryShardContext())); + context.highlight(highlightBuilder.build(queryShardContext)); } catch (IOException e) { throw new SearchContextException(context, "failed to create SearchContextHighlighter", e); } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java index cf6b391ec631..52b728a476fb 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java @@ -117,9 +117,6 @@ public final class SuggestParseElement implements SearchParseElement { for (Map.Entry entry : suggestionContexts.entrySet()) { String suggestionName = entry.getKey(); SuggestionContext suggestionContext = entry.getValue(); - - suggestionContext.setShard(shardId); - suggestionContext.setIndex(index); SuggestUtils.verifySuggestion(mapperService, globalText, suggestionContext); suggestionSearchContext.addSuggestion(suggestionName, suggestionContext); } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java index 989546d50bf9..89cc8e2f01a0 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java @@ -42,6 +42,7 @@ import org.apache.lucene.util.automaton.LevenshteinAutomata; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.FastCharArrayReader; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.analysis.CustomAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -271,9 +272,54 @@ public final class SuggestUtils { return false; } return true; - } + /** + * Transfers the text, prefix, regex, analyzer, fieldname, size and shard size settings from the + * original {@link SuggestionBuilder} to the target {@link SuggestionContext} + */ + public static void suggestionToSuggestionContext(SuggestionBuilder suggestionBuilder, MapperService mapperService, + SuggestionSearchContext.SuggestionContext suggestionContext) throws IOException { + String analyzerName = suggestionBuilder.analyzer(); + if (analyzerName != null) { + Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName); + if (analyzer == null) { + throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists"); + } + suggestionContext.setAnalyzer(analyzer); + } + if (suggestionBuilder.field() != null) { + suggestionContext.setField(suggestionBuilder.field()); + } + if (suggestionBuilder.size() != null) { + suggestionContext.setSize(suggestionBuilder.size()); + } + if (suggestionBuilder.shardSize() != null) { + suggestionContext.setShardSize(suggestionBuilder.shardSize()); + } else { + // if no shard size is set in builder, use size (or at least 5) + suggestionContext.setShardSize(Math.max(suggestionContext.getSize(), 5)); + } + String text = suggestionBuilder.text(); + if (text != null) { + suggestionContext.setText(BytesRefs.toBytesRef(text)); + } + String prefix = suggestionBuilder.prefix(); + if (prefix != null) { + suggestionContext.setText(BytesRefs.toBytesRef(prefix)); + } + String regex = suggestionBuilder.regex(); + if (regex != null) { + suggestionContext.setText(BytesRefs.toBytesRef(regex)); + } + if (text != null && prefix == null) { + suggestionContext.setPrefix(BytesRefs.toBytesRef(text)); + } else if (text == null && prefix != null) { + suggestionContext.setText(BytesRefs.toBytesRef(prefix)); + } else if (text == null && regex != null) { + suggestionContext.setText(BytesRefs.toBytesRef(regex)); + } + } public static void verifySuggestion(MapperService mapperService, BytesRef globalText, SuggestionContext suggestion) { // Verify options and set defaults diff --git a/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java b/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java index c26649f63881..dc6bb0613d3e 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java @@ -54,7 +54,7 @@ public final class Suggesters extends ExtensionPoint.ClassMap { private static Map addBuildIns(Map suggesters, ScriptService scriptService, IndicesService indexServices) { final Map map = new HashMap<>(); - map.put("phrase", new PhraseSuggester(scriptService, indexServices)); + map.put("phrase", new PhraseSuggester(scriptService)); map.put("term", new TermSuggester()); map.put("completion", new CompletionSuggester()); map.putAll(suggesters); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java index 1fdb38df88f0..e1ecca1cccea 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java @@ -28,6 +28,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import java.io.IOException; import java.util.Objects; @@ -192,7 +194,9 @@ public abstract class SuggestionBuilder> extends protected abstract SuggestionBuilder innerFromXContent(QueryParseContext parseContext, String name) throws IOException; - private String getSuggesterName() { + protected abstract SuggestionContext build(QueryShardContext context) throws IOException; + + public String getSuggesterName() { //default impl returns the same as writeable name, but we keep the distinction between the two just to make sure return getWriteableName(); } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java index 1d3339e0578b..b662df33f28e 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java @@ -36,9 +36,9 @@ public class SuggestionSearchContext { public Map suggestions() { return suggestions; } - + public static class SuggestionContext { - + private BytesRef text; private BytesRef prefix; private BytesRef regex; @@ -47,9 +47,7 @@ public class SuggestionSearchContext { private Analyzer analyzer; private int size = 5; private int shardSize = -1; - private int shardId; - private String index; - + public BytesRef getText() { return text; } @@ -119,22 +117,6 @@ public class SuggestionSearchContext { } this.shardSize = shardSize; } - - public void setShard(int shardId) { - this.shardId = shardId; - } - - public void setIndex(String index) { - this.index = index; - } - - public String getIndex() { - return index; - } - - public int getShard() { - return shardId; - } } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index 8cd9d386a134..86f3b87b059b 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -51,6 +51,8 @@ import java.util.Set; public class CompletionSuggester extends Suggester { + static final CompletionSuggester PROTOTYPE = new CompletionSuggester(); + @Override public SuggestContextParser getContextParser() { return new CompletionSuggestParser(this); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 29992c1a077b..0bd37be128d0 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -28,8 +28,10 @@ import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.RegexpFlag; import org.elasticsearch.search.suggest.SuggestionBuilder; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; @@ -372,9 +374,16 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder { private final BytesRef SEPARATOR = new BytesRef(" "); private static final String SUGGESTION_TEMPLATE_VAR_NAME = "suggestion"; private final ScriptService scriptService; - private final IndicesService indicesService; - public PhraseSuggester(ScriptService scriptService, IndicesService indicesService) { + static PhraseSuggester PROTOTYPE; + + public PhraseSuggester(ScriptService scriptService) { this.scriptService = scriptService; - this.indicesService = indicesService; + PROTOTYPE = this; } /* @@ -120,8 +119,7 @@ public final class PhraseSuggester extends Suggester { vars.put(SUGGESTION_TEMPLATE_VAR_NAME, spare.toString()); final ExecutableScript executable = scriptService.executable(collateScript, vars); final BytesReference querySource = (BytesReference) executable.run(); - IndexService indexService = indicesService.indexService(suggestion.getIndex()); - final ParsedQuery parsedQuery = indexService.newQueryShardContext().parse(querySource); + final ParsedQuery parsedQuery = suggestion.getShardContext().parse(querySource); collateMatch = Lucene.exists(searcher, parsedQuery.query()); } if (!collateMatch && !collatePrune) { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java index 2982012a2dbd..08a1a47b0717 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java @@ -29,17 +29,28 @@ import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.Template; +import org.elasticsearch.search.suggest.SuggestUtils; import org.elasticsearch.search.suggest.SuggestionBuilder; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator; import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -254,6 +265,9 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder= 1"); + } this.tokenLimit = tokenLimit; return this; } @@ -887,6 +901,101 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder candidateGenerators : this.generators.values()) { + for (CandidateGenerator candidateGenerator : candidateGenerators) { + suggestionContext.addGenerator(candidateGenerator.build(mapperService)); + } + } + + if (this.model != null) { + suggestionContext.setModel(this.model.buildWordScorerFactory()); + } + + if (this.collateQuery != null) { + CompiledScript compiledScript = context.getScriptService().compile(this.collateQuery, ScriptContext.Standard.SEARCH, + Collections.emptyMap()); + suggestionContext.setCollateQueryScript(compiledScript); + if (this.collateParams != null) { + suggestionContext.setCollateScriptParams(this.collateParams); + } + suggestionContext.setCollatePrune(this.collatePrune); + } + + // TODO make field mandatory in the builder, then remove this + if (suggestionContext.getField() == null) { + throw new IllegalArgumentException("The required field option is missing"); + } + + MappedFieldType fieldType = mapperService.fullName(suggestionContext.getField()); + if (fieldType == null) { + throw new IllegalArgumentException("No mapping found for field [" + suggestionContext.getField() + "]"); + } else if (suggestionContext.getAnalyzer() == null) { + // no analyzer name passed in, so try the field's analyzer, or the default analyzer + if (fieldType.searchAnalyzer() == null) { + suggestionContext.setAnalyzer(mapperService.searchAnalyzer()); + } else { + suggestionContext.setAnalyzer(fieldType.searchAnalyzer()); + } + } + + if (suggestionContext.model() == null) { + suggestionContext.setModel(StupidBackoffScorer.FACTORY); + } + + if (this.gramSize == null || suggestionContext.generators().isEmpty()) { + final ShingleTokenFilterFactory.Factory shingleFilterFactory = SuggestUtils + .getShingleFilterFactory(suggestionContext.getAnalyzer()); + if (this.gramSize == null) { + // try to detect the shingle size + if (shingleFilterFactory != null) { + suggestionContext.setGramSize(shingleFilterFactory.getMaxShingleSize()); + if (suggestionContext.getAnalyzer() == null && shingleFilterFactory.getMinShingleSize() > 1 + && !shingleFilterFactory.getOutputUnigrams()) { + throw new IllegalArgumentException("The default analyzer for field: [" + suggestionContext.getField() + + "] doesn't emit unigrams. If this is intentional try to set the analyzer explicitly"); + } + } + } + if (suggestionContext.generators().isEmpty()) { + if (shingleFilterFactory != null && shingleFilterFactory.getMinShingleSize() > 1 + && !shingleFilterFactory.getOutputUnigrams() && suggestionContext.getRequireUnigram()) { + throw new IllegalArgumentException("The default candidate generator for phrase suggest can't operate on field: [" + + suggestionContext.getField() + "] since it doesn't emit unigrams. " + + "If this is intentional try to set the candidate generator field explicitly"); + } + // use a default generator on the same field + DirectCandidateGenerator generator = new DirectCandidateGenerator(); + generator.setField(suggestionContext.getField()); + suggestionContext.addGenerator(generator); + } + } + return suggestionContext; + } + private static void ensureNoSmoothing(PhraseSuggestionBuilder suggestion) { if (suggestion.smoothingModel() != null) { throw new IllegalArgumentException("only one smoothing model supported"); @@ -999,5 +1108,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder generators = new ArrayList<>(); private Map collateScriptParams = new HashMap<>(1); private WordScorer.WordScorerFactory scorer; + private QueryShardContext shardContext; public PhraseSuggestionContext(Suggester suggester) { super(suggester); @@ -214,4 +216,12 @@ class PhraseSuggestionContext extends SuggestionContext { return prune; } + public void setShardContext(QueryShardContext context) { + this.shardContext = context; + } + + public QueryShardContext getShardContext() { + return this.shardContext; + } + } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java index e67e619bf516..37ec73711aac 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java @@ -40,6 +40,8 @@ import java.util.List; public final class TermSuggester extends Suggester { + static final TermSuggester PROTOTYPE = new TermSuggester(); + @Override public TermSuggestion innerExecute(String name, TermSuggestionContext suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java index 7625f204c037..1f190b674a6c 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java @@ -19,12 +19,6 @@ package org.elasticsearch.search.suggest.term; -import org.apache.lucene.search.spell.DirectSpellChecker; -import org.apache.lucene.search.spell.JaroWinklerDistance; -import org.apache.lucene.search.spell.LevensteinDistance; -import org.apache.lucene.search.spell.LuceneLevenshteinDistance; -import org.apache.lucene.search.spell.NGramDistance; -import org.apache.lucene.search.spell.StringDistance; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +26,9 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.suggest.SuggestionBuilder; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import java.io.IOException; import java.util.Locale; diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 0b0691bc5883..656e3ab6a618 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -63,8 +63,8 @@ import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptSettings; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptSettings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.engine.MockEngineFactory; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 3c5797f1e4a8..501041c6af24 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.suggest; +import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; @@ -31,33 +32,82 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AnalysisService; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.mapper.ContentPath; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperBuilders; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptContextRegistry; +import org.elasticsearch.script.ScriptEngineRegistry; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptServiceTests.TestEngineService; +import org.elasticsearch.script.ScriptSettings; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.watcher.ResourceWatcherService; import org.junit.AfterClass; import org.junit.BeforeClass; import java.io.IOException; +import java.nio.file.Path; import java.util.Collections; +import java.util.Map; import java.util.function.Consumer; import java.util.function.Supplier; +import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; public abstract class AbstractSuggestionBuilderTestCase> extends ESTestCase { - private static final int NUMBER_OF_TESTBUILDERS = 20; + private static final int NUMBER_OF_TESTBUILDERS = 2000; protected static NamedWriteableRegistry namedWriteableRegistry; - private static final Suggesters suggesters = new Suggesters(Collections.emptyMap(), null, null); + private static Suggesters suggesters; + private static ScriptService scriptService; + private static SuggestParseElement parseElement; /** * setup for the whole base test class */ @BeforeClass - public static void init() { + public static void init() throws IOException { + Path genericConfigFolder = createTempDir(); + Settings baseSettings = settingsBuilder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), genericConfigFolder) + .build(); + Environment environment = new Environment(baseSettings); + ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList()); + ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry + .ScriptEngineRegistration(TestEngineService.class, TestEngineService.TYPES))); + ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); + scriptService = new ScriptService(baseSettings, environment, Collections.singleton(new TestEngineService()), + new ResourceWatcherService(baseSettings, null), scriptEngineRegistry, scriptContextRegistry, scriptSettings) { + @Override + public CompiledScript compile(Script script, ScriptContext scriptContext, Map params) { + return new CompiledScript(ScriptType.INLINE, "mockName", "mocklang", script); + } + }; + suggesters = new Suggesters(Collections.emptyMap(), scriptService, null); + parseElement = new SuggestParseElement(suggesters); + namedWriteableRegistry = new NamedWriteableRegistry(); namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, TermSuggestionBuilder.PROTOTYPE); namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, PhraseSuggestionBuilder.PROTOTYPE); @@ -69,7 +119,6 @@ public abstract class AbstractSuggestionBuilderTestCase T randomValueOtherThan(T input, Supplier randomSupplier) { T randomValue = null; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java index b3af0eee142e..5bddad8bb022 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java @@ -25,7 +25,9 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -132,6 +134,12 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { return new CustomSuggestionBuilder(name, randomField, randomSuffix); } + @Override + protected SuggestionContext build(QueryShardContext context) throws IOException { + // NORELEASE + return null; + } + } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index 02826b9a7eb2..9bf8447f8d80 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -34,15 +34,10 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.index.mapper.MapperBuilders; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator; @@ -171,19 +166,10 @@ public class DirectCandidateGeneratorTests extends ESTestCase{ } }; - QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, mockMapperService, null, null, null) { - @Override - public MappedFieldType fieldMapper(String name) { - StringFieldMapper.Builder builder = MapperBuilders.stringField(name); - return builder.build(new Mapper.BuilderContext(idxSettings.getSettings(), new ContentPath(1))).fieldType(); - } - }; - mockShardContext.setMapUnmappedFieldAsString(true); - for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) { DirectCandidateGeneratorBuilder generator = randomCandidateGenerator(); // first, build via DirectCandidateGenerator#build() - DirectCandidateGenerator contextGenerator = generator.build(mockShardContext); + DirectCandidateGenerator contextGenerator = generator.build(mockMapperService); // second, render random test generator to xContent and parse using // PhraseSuggestParser @@ -195,28 +181,32 @@ public class DirectCandidateGeneratorTests extends ESTestCase{ XContentParser parser = XContentHelper.createParser(builder.bytes()); DirectCandidateGenerator secondGenerator = PhraseSuggestParser.parseCandidateGenerator(parser, - mockShardContext.getMapperService(), mockShardContext.parseFieldMatcher()); + mockMapperService, ParseFieldMatcher.EMPTY); // compare their properties assertNotSame(contextGenerator, secondGenerator); - assertEquals(contextGenerator.field(), secondGenerator.field()); - assertEquals(contextGenerator.accuracy(), secondGenerator.accuracy(), Float.MIN_VALUE); - assertEquals(contextGenerator.maxTermFreq(), secondGenerator.maxTermFreq(), Float.MIN_VALUE); - assertEquals(contextGenerator.maxEdits(), secondGenerator.maxEdits()); - assertEquals(contextGenerator.maxInspections(), secondGenerator.maxInspections()); - assertEquals(contextGenerator.minDocFreq(), secondGenerator.minDocFreq(), Float.MIN_VALUE); - assertEquals(contextGenerator.minWordLength(), secondGenerator.minWordLength()); - assertEquals(contextGenerator.postFilter(), secondGenerator.postFilter()); - assertEquals(contextGenerator.prefixLength(), secondGenerator.prefixLength()); - assertEquals(contextGenerator.preFilter(), secondGenerator.preFilter()); - assertEquals(contextGenerator.sort(), secondGenerator.sort()); - assertEquals(contextGenerator.size(), secondGenerator.size()); - // some instances of StringDistance don't support equals, just checking the class here - assertEquals(contextGenerator.stringDistance().getClass(), secondGenerator.stringDistance().getClass()); - assertEquals(contextGenerator.suggestMode(), secondGenerator.suggestMode()); + assertEqualGenerators(contextGenerator, secondGenerator); } } + public static void assertEqualGenerators(DirectCandidateGenerator first, DirectCandidateGenerator second) { + assertEquals(first.field(), second.field()); + assertEquals(first.accuracy(), second.accuracy(), Float.MIN_VALUE); + assertEquals(first.maxTermFreq(), second.maxTermFreq(), Float.MIN_VALUE); + assertEquals(first.maxEdits(), second.maxEdits()); + assertEquals(first.maxInspections(), second.maxInspections()); + assertEquals(first.minDocFreq(), second.minDocFreq(), Float.MIN_VALUE); + assertEquals(first.minWordLength(), second.minWordLength()); + assertEquals(first.postFilter(), second.postFilter()); + assertEquals(first.prefixLength(), second.prefixLength()); + assertEquals(first.preFilter(), second.preFilter()); + assertEquals(first.sort(), second.sort()); + assertEquals(first.size(), second.size()); + // some instances of StringDistance don't support equals, just checking the class here + assertEquals(first.stringDistance().getClass(), second.stringDistance().getClass()); + assertEquals(first.suggestMode(), second.suggestMode()); + } + /** * test that bad xContent throws exception */ diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java index d74719fa6f77..43c9b27bab9a 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java @@ -21,16 +21,21 @@ package org.elasticsearch.search.suggest.phrase; import org.elasticsearch.script.Template; import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.Laplace; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.LinearInterpolation; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.StupidBackoff; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator; import org.junit.BeforeClass; import java.io.IOException; import java.util.HashMap; +import java.util.Iterator; import java.util.Map; +import static org.hamcrest.Matchers.instanceOf; + public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestCase { @BeforeClass @@ -70,7 +75,7 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC } maybeSet(testBuilder::gramSize, randomIntBetween(1, 5)); maybeSet(testBuilder::forceUnigrams, randomBoolean()); - maybeSet(testBuilder::tokenLimit, randomInt(20)); + maybeSet(testBuilder::tokenLimit, randomIntBetween(1, 20)); if (randomBoolean()) { testBuilder.smoothingModel(randomSmoothingModel()); } @@ -115,7 +120,7 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC builder.gramSize(randomValueOtherThan(builder.gramSize(), () -> randomIntBetween(1, 5))); break; case 4: - builder.tokenLimit(randomValueOtherThan(builder.tokenLimit(), () -> randomInt(20))); + builder.tokenLimit(randomValueOtherThan(builder.tokenLimit(), () -> randomIntBetween(1, 20))); break; case 5: builder.separator(randomValueOtherThan(builder.separator(), () -> randomAsciiOfLengthBetween(1, 10))); @@ -158,4 +163,37 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC } } + protected void assertSuggestionContext(SuggestionContext oldSuggestion, SuggestionContext newSuggestion) { + assertThat(oldSuggestion, instanceOf(PhraseSuggestionContext.class)); + assertThat(newSuggestion, instanceOf(PhraseSuggestionContext.class)); + PhraseSuggestionContext oldPhraseSuggestion = (PhraseSuggestionContext) oldSuggestion; + PhraseSuggestionContext newPhraseSuggestion = (PhraseSuggestionContext) newSuggestion; + assertEquals(oldPhraseSuggestion.confidence(), newPhraseSuggestion.confidence(), Float.MIN_VALUE); + assertEquals(oldPhraseSuggestion.collatePrune(), newPhraseSuggestion.collatePrune()); + assertEquals(oldPhraseSuggestion.gramSize(), newPhraseSuggestion.gramSize()); + assertEquals(oldPhraseSuggestion.realworldErrorLikelyhood(), newPhraseSuggestion.realworldErrorLikelyhood(), Float.MIN_VALUE); + assertEquals(oldPhraseSuggestion.maxErrors(), newPhraseSuggestion.maxErrors(), Float.MIN_VALUE); + assertEquals(oldPhraseSuggestion.separator(), newPhraseSuggestion.separator()); + assertEquals(oldPhraseSuggestion.getTokenLimit(), newPhraseSuggestion.getTokenLimit()); + assertEquals(oldPhraseSuggestion.getRequireUnigram(), newPhraseSuggestion.getRequireUnigram()); + assertEquals(oldPhraseSuggestion.getPreTag(), newPhraseSuggestion.getPreTag()); + assertEquals(oldPhraseSuggestion.getPostTag(), newPhraseSuggestion.getPostTag()); + if (oldPhraseSuggestion.getCollateQueryScript() != null) { + // only assert that we have a compiled script on the other side + assertNotNull(newPhraseSuggestion.getCollateQueryScript()); + } + if (oldPhraseSuggestion.generators() != null) { + assertNotNull(newPhraseSuggestion.generators()); + assertEquals(oldPhraseSuggestion.generators().size(), newPhraseSuggestion.generators().size()); + Iterator secondList = newPhraseSuggestion.generators().iterator(); + for (DirectCandidateGenerator candidateGenerator : newPhraseSuggestion.generators()) { + DirectCandidateGeneratorTests.assertEqualGenerators(candidateGenerator, secondList.next()); + } + } + assertEquals(oldPhraseSuggestion.getCollateScriptParams(), newPhraseSuggestion.getCollateScriptParams()); + if (oldPhraseSuggestion.model() != null) { + assertNotNull(newPhraseSuggestion.model()); + } + } + } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java index ca5f3f880ec1..ac14efdb4d14 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java @@ -20,9 +20,7 @@ package org.elasticsearch.search.suggest.term; import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; -import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SortBy; -import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.StringDistanceImpl; -import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import java.io.IOException; @@ -33,6 +31,22 @@ import static org.hamcrest.Matchers.notNullValue; */ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCase { + /** + * creates random suggestion builder, renders it to xContent and back to new instance that should be equal to original + */ + @Override + public void testFromXContent() throws IOException { + // skip for now + } + + /** + * creates random suggestion builder, renders it to xContent and back to new instance that should be equal to original + */ + @Override + public void testBuild() throws IOException { + // skip for now + } + @Override protected TermSuggestionBuilder randomSuggestionBuilder() { TermSuggestionBuilder testBuilder = new TermSuggestionBuilder(randomAsciiOfLength(10)); @@ -245,4 +259,9 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas assertThat(builder.suggestMode(), notNullValue()); } + @Override + protected void assertSuggestionContext(SuggestionContext oldSuggestion, SuggestionContext newSuggestion) { + // put assertions on TermSuggestionContext here + } + } From 4b736d2e0c01d55eb320c205fc9c90da52dab910 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 8 Feb 2016 14:26:16 +0100 Subject: [PATCH 017/320] Changing parse signature in QueryParseElement --- .../action/suggest/TransportSuggestAction.java | 5 +++-- .../search/suggest/SuggestContextParser.java | 5 ++--- .../search/suggest/SuggestParseElement.java | 14 ++++++++------ .../search/suggest/SuggestionSearchContext.java | 10 ++++++++++ .../completion/CompletionSuggestParser.java | 8 ++++---- .../suggest/completion/CompletionSuggester.java | 4 +++- .../completion/CompletionSuggestionContext.java | 9 +-------- .../search/suggest/phrase/PhraseSuggestParser.java | 5 +++-- .../search/suggest/phrase/PhraseSuggester.java | 3 ++- .../suggest/phrase/PhraseSuggestionContext.java | 12 +----------- .../search/suggest/term/TermSuggestParser.java | 5 +++-- .../suggest/AbstractSuggestionBuilderTestCase.java | 2 +- .../search/suggest/CustomSuggester.java | 2 +- 13 files changed, 42 insertions(+), 42 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java b/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java index 0ed985785577..36fb079d257a 100644 --- a/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java +++ b/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.suggest.stats.ShardSuggestMetric; import org.elasticsearch.indices.IndicesService; @@ -131,6 +132,7 @@ public class TransportSuggestAction extends TransportBroadcastAction for (String field : payloadFields) { MappedFieldType payloadFieldType = suggestionContext.getMapperService().fullName(field); if (payloadFieldType != null) { - final AtomicFieldData data = suggestionContext.getIndexFieldDataService().getForField(payloadFieldType) + QueryShardContext shardContext = suggestionContext.getShardContext(); + final AtomicFieldData data = shardContext.getForField(payloadFieldType) .load(subReaderContext); final ScriptDocValues scriptValues = data.getScriptValues(); scriptValues.setNextDocId(subDocId); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java index 535151b476e0..328005a8cb29 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.search.suggest.document.CompletionQuery; import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import org.elasticsearch.search.suggest.Suggester; @@ -44,12 +43,10 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest private CompletionSuggestionBuilder.RegexOptionsBuilder regexOptionsBuilder; private Map> queryContexts = Collections.emptyMap(); private final MapperService mapperService; - private final IndexFieldDataService indexFieldDataService; private Set payloadFields = Collections.emptySet(); - CompletionSuggestionContext(Suggester suggester, MapperService mapperService, IndexFieldDataService indexFieldDataService) { + CompletionSuggestionContext(Suggester suggester, MapperService mapperService) { super(suggester); - this.indexFieldDataService = indexFieldDataService; this.mapperService = mapperService; } @@ -78,10 +75,6 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest return mapperService; } - IndexFieldDataService getIndexFieldDataService() { - return indexFieldDataService; - } - void setPayloadFields(Set fields) { this.payloadFields = fields; } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java index eb7254c7226f..b6739866fa72 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java @@ -26,9 +26,9 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; -import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.Template; @@ -51,7 +51,8 @@ public final class PhraseSuggestParser implements SuggestContextParser { } @Override - public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexFieldDataService fieldDataService) throws IOException { + public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, QueryShardContext shardContext) throws IOException { + MapperService mapperService = shardContext.getMapperService(); PhraseSuggestionContext suggestion = new PhraseSuggestionContext(suggester); ParseFieldMatcher parseFieldMatcher = mapperService.getIndexSettings().getParseFieldMatcher(); XContentParser.Token token; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java index 74dcb33e6437..d4731568b598 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java @@ -42,6 +42,7 @@ import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestUtils; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.SuggestionBuilder; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.phrase.NoisyChannelSpellChecker.Result; import java.io.IOException; @@ -143,7 +144,7 @@ public final class PhraseSuggester extends Suggester { return response; } - private PhraseSuggestion.Entry buildResultEntry(PhraseSuggestionContext suggestion, CharsRefBuilder spare, double cutoffScore) { + private PhraseSuggestion.Entry buildResultEntry(SuggestionContext suggestion, CharsRefBuilder spare, double cutoffScore) { spare.copyUTF8Bytes(suggestion.getText()); return new PhraseSuggestion.Entry(new Text(spare.toString()), 0, spare.length(), cutoffScore); } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java index 4b63c03f8ec0..cf5eb4febab5 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.suggest.phrase; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.search.suggest.DirectSpellcheckerSettings; import org.elasticsearch.search.suggest.Suggester; @@ -31,7 +30,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -class PhraseSuggestionContext extends SuggestionContext { +public class PhraseSuggestionContext extends SuggestionContext { static final boolean DEFAULT_COLLATE_PRUNE = false; static final boolean DEFAULT_REQUIRE_UNIGRAM = true; static final float DEFAULT_CONFIDENCE = 1.0f; @@ -54,7 +53,6 @@ class PhraseSuggestionContext extends SuggestionContext { private List generators = new ArrayList<>(); private Map collateScriptParams = new HashMap<>(1); private WordScorer.WordScorerFactory scorer; - private QueryShardContext shardContext; public PhraseSuggestionContext(Suggester suggester) { super(suggester); @@ -216,12 +214,4 @@ class PhraseSuggestionContext extends SuggestionContext { return prune; } - public void setShardContext(QueryShardContext context) { - this.shardContext = context; - } - - public QueryShardContext getShardContext() { - return this.shardContext; - } - } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java index a2fd680c215a..f819ed412afc 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java @@ -20,8 +20,8 @@ package org.elasticsearch.search.suggest.term; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.suggest.DirectSpellcheckerSettings; import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestUtils; @@ -38,7 +38,8 @@ public final class TermSuggestParser implements SuggestContextParser { } @Override - public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexFieldDataService fieldDataService) throws IOException { + public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, QueryShardContext shardContext) throws IOException { + MapperService mapperService = shardContext.getMapperService(); XContentParser.Token token; String fieldName = null; TermSuggestionContext suggestion = new TermSuggestionContext(suggester); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 501041c6af24..0df3abecefda 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -271,7 +271,7 @@ public abstract class AbstractSuggestionBuilderTestCase { + return (parser, shardContext) -> { Map options = parser.map(); CustomSuggestionsContext suggestionContext = new CustomSuggestionsContext(CustomSuggester.this, options); suggestionContext.setField((String) options.get("field")); From 73b819bf9ba13649a1ebaa6b24f4361864bea98b Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Tue, 9 Feb 2016 00:25:47 -0500 Subject: [PATCH 018/320] Building the term suggesters from the builder object --- .../suggest/TransportSuggestAction.java | 4 +- .../suggest/DirectSpellcheckerSettings.java | 30 ++++++++--- .../elasticsearch/search/suggest/Suggest.java | 34 ------------- .../search/suggest/SuggestBuilder.java | 9 ++++ .../search/suggest/SuggestParseElement.java | 2 +- .../search/suggest/SuggestUtils.java | 31 ++++++++++-- .../search/suggest/SuggestionBuilder.java | 17 ++++++- .../suggest/SuggestionSearchContext.java | 15 ++++++ .../CompletionSuggestionBuilder.java | 2 +- .../DirectCandidateGeneratorBuilder.java | 4 +- .../phrase/PhraseSuggestionBuilder.java | 10 +--- .../search/suggest/term/TermSuggestion.java | 7 +-- .../suggest/term/TermSuggestionBuilder.java | 50 ++++++++++++++++++- .../suggest/term/TermSuggestionContext.java | 7 ++- .../AbstractSuggestionBuilderTestCase.java | 11 +++- .../suggest/CustomSuggesterSearchIT.java | 10 ++-- .../term/TermSuggestionBuilderTests.java | 46 +++++++++++------ 17 files changed, 202 insertions(+), 87 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java b/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java index 36fb079d257a..616dbf94937a 100644 --- a/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java +++ b/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.suggest.stats.ShardSuggestMetric; import org.elasticsearch.indices.IndicesService; @@ -132,7 +131,6 @@ public class TransportSuggestAction extends TransportBroadcastAction LUCENE_FREQUENCY = new SuggestWordFrequencyComparator(); @@ -172,12 +173,12 @@ public final class SuggestUtils { } } - public static Suggest.Suggestion.Sort resolveSort(String sortVal) { + public static TermSuggestionBuilder.SortBy resolveSort(String sortVal) { sortVal = sortVal.toLowerCase(Locale.US); if ("score".equals(sortVal)) { - return Suggest.Suggestion.Sort.SCORE; + return TermSuggestionBuilder.SortBy.SCORE; } else if ("frequency".equals(sortVal)) { - return Suggest.Suggestion.Sort.FREQUENCY; + return TermSuggestionBuilder.SortBy.FREQUENCY; } else { throw new IllegalArgumentException("Illegal suggest sort " + sortVal); } @@ -201,6 +202,28 @@ public final class SuggestUtils { } } + public static SuggestMode resolveSuggestMode(TermSuggestionBuilder.SuggestMode suggestMode) { + Objects.requireNonNull(suggestMode, "suggestMode must not be null"); + switch (suggestMode) { + case MISSING: return SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX; + case POPULAR: return SuggestMode.SUGGEST_MORE_POPULAR; + case ALWAYS: return SuggestMode.SUGGEST_ALWAYS; + default: throw new IllegalArgumentException("Unknown suggestMode [" + suggestMode + "]"); + } + } + + public static StringDistance resolveStringDistance(TermSuggestionBuilder.StringDistanceImpl stringDistance) { + Objects.requireNonNull(stringDistance, "stringDistance must not be null"); + switch (stringDistance) { + case INTERNAL: return DirectSpellChecker.INTERNAL_LEVENSHTEIN; + case DAMERAU_LEVENSHTEIN: return new LuceneLevenshteinDistance(); + case LEVENSTEIN: return new LevensteinDistance(); + case JAROWINKLER: return new JaroWinklerDistance(); + case NGRAM: return new NGramDistance(); + default: throw new IllegalArgumentException("Illegal distance option " + stringDistance); + } + } + public static class Fields { public static final ParseField STRING_DISTANCE = new ParseField("string_distance"); public static final ParseField SUGGEST_MODE = new ParseField("suggest_mode"); @@ -243,7 +266,7 @@ public final class SuggestUtils { } else if (parseFieldMatcher.match(fieldName, Fields.PREFIX_LENGTH)) { suggestion.prefixLength(parser.intValue()); } else if (parseFieldMatcher.match(fieldName, Fields.MIN_WORD_LENGTH)) { - suggestion.minQueryLength(parser.intValue()); + suggestion.minWordLength(parser.intValue()); } else if (parseFieldMatcher.match(fieldName, Fields.MIN_DOC_FREQ)) { suggestion.minDocFreq(parser.floatValue()); } else { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java index e1ecca1cccea..fd973e48f651 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java @@ -19,7 +19,9 @@ package org.elasticsearch.search.suggest; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.support.ToXContentToBytes; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.NamedWriteable; @@ -194,7 +196,20 @@ public abstract class SuggestionBuilder> extends protected abstract SuggestionBuilder innerFromXContent(QueryParseContext parseContext, String name) throws IOException; - protected abstract SuggestionContext build(QueryShardContext context) throws IOException; + public SuggestionContext build(QueryShardContext context, @Nullable String globalText) throws IOException { + SuggestionContext suggestionContext = innerBuild(context); + // copy over common settings to each suggestion builder + SuggestUtils.suggestionToSuggestionContext(this, context.getMapperService(), suggestionContext); + SuggestUtils.verifySuggestion(context.getMapperService(), new BytesRef(globalText), suggestionContext); + suggestionContext.setShardContext(context); + // TODO make field mandatory in the builder, then remove this + if (suggestionContext.getField() == null) { + throw new IllegalArgumentException("The required field option is missing"); + } + return suggestionContext; + } + + protected abstract SuggestionContext innerBuild(QueryShardContext context) throws IOException; public String getSuggesterName() { //default impl returns the same as writeable name, but we keep the distinction between the two just to make sure diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java index 02953356876f..206b88117929 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java @@ -127,6 +127,21 @@ public class SuggestionSearchContext { public QueryShardContext getShardContext() { return this.shardContext; } + + @Override + public String toString() { + return "[" + + "text=" + text + + ",field=" + field + + ",prefix=" + prefix + + ",regex=" + regex + + ",size=" + size + + ",shardSize=" + shardSize + + ",suggester=" + suggester + + ",analyzer=" + analyzer + + ",shardContext=" + shardContext + + "]"; + } } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 0bd37be128d0..1f4a5cda9c3e 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -379,7 +379,7 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder { } public static final int TYPE = 1; - private Sort sort; + private TermSuggestionBuilder.SortBy sort; public TermSuggestion() { } - public TermSuggestion(String name, int size, Sort sort) { + public TermSuggestion(String name, int size, TermSuggestionBuilder.SortBy sort) { super(name, size); this.sort = sort; } @@ -110,7 +111,7 @@ public class TermSuggestion extends Suggestion { @Override protected void innerReadFrom(StreamInput in) throws IOException { super.innerReadFrom(in); - sort = Sort.fromId(in.readByte()); + sort = TermSuggestionBuilder.SortBy.fromId(in.readByte()); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java index 1f190b674a6c..3988da23e56e 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.suggest.term; import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -27,6 +28,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.search.suggest.DirectSpellcheckerSettings; +import org.elasticsearch.search.suggest.SuggestUtils; import org.elasticsearch.search.suggest.SuggestionBuilder; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; @@ -383,6 +386,12 @@ public class TermSuggestionBuilder extends SuggestionBuilder { /** Only suggest terms in the suggest text that aren't in the index. This is the default. */ @@ -472,12 +498,18 @@ public class TermSuggestionBuilder extends SuggestionBuilder { /** Sort should first be based on score, then document frequency and then the term itself. */ - SCORE, + SCORE((byte) 0x0), /** Sort should first be based on document frequency, then score and then the term itself. */ - FREQUENCY; + FREQUENCY((byte) 0x1); protected static SortBy PROTOTYPE = SCORE; + private byte id; + + SortBy(byte id) { + this.id = id; + } + @Override public void writeTo(final StreamOutput out) throws IOException { out.writeVInt(ordinal()); @@ -496,6 +528,20 @@ public class TermSuggestionBuilder extends SuggestionBuilder options = new HashMap<>(); + options.put("field", randomField); + options.put("suffix", randomSuffix); + return new CustomSuggester.CustomSuggestionsContext(new CustomSuggester(), options); } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java index ac14efdb4d14..7085ed415d2c 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java @@ -20,7 +20,11 @@ package org.elasticsearch.search.suggest.term; import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; +import org.elasticsearch.search.suggest.DirectSpellcheckerSettings; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; +import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SortBy; +import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.StringDistanceImpl; +import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; import java.io.IOException; @@ -31,20 +35,9 @@ import static org.hamcrest.Matchers.notNullValue; */ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCase { - /** - * creates random suggestion builder, renders it to xContent and back to new instance that should be equal to original - */ @Override - public void testFromXContent() throws IOException { - // skip for now - } - - /** - * creates random suggestion builder, renders it to xContent and back to new instance that should be equal to original - */ - @Override - public void testBuild() throws IOException { - // skip for now + public void testFromXContent() { + // NORELEASE : remove this when TermSuggestionBuilder's fromXContent is in } @Override @@ -261,7 +254,32 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas @Override protected void assertSuggestionContext(SuggestionContext oldSuggestion, SuggestionContext newSuggestion) { - // put assertions on TermSuggestionContext here + @SuppressWarnings("unchecked") + TermSuggestionContext oldContext = (TermSuggestionContext) oldSuggestion; + @SuppressWarnings("unchecked") + TermSuggestionContext newContext = (TermSuggestionContext) newSuggestion; + assertSpellcheckerSettings(oldContext.getDirectSpellCheckerSettings(), newContext.getDirectSpellCheckerSettings()); + + } + + private void assertSpellcheckerSettings(DirectSpellcheckerSettings oldSettings, DirectSpellcheckerSettings newSettings) { + final double delta = 0.0d; + // make sure the objects aren't the same + assertNotSame(oldSettings, newSettings); + // make sure the objects aren't null + assertNotNull(oldSettings); + assertNotNull(newSettings); + // and now, make sure they are equal.. + assertEquals(oldSettings.accuracy(), newSettings.accuracy(), delta); + assertEquals(oldSettings.maxEdits(), newSettings.maxEdits()); + assertEquals(oldSettings.maxInspections(), newSettings.maxInspections()); + assertEquals(oldSettings.maxTermFreq(), newSettings.maxTermFreq(), delta); + assertEquals(oldSettings.minDocFreq(), newSettings.minDocFreq(), delta); + assertEquals(oldSettings.minWordLength(), newSettings.minWordLength()); + assertEquals(oldSettings.prefixLength(), newSettings.prefixLength()); + assertEquals(oldSettings.sort(), newSettings.sort()); + assertEquals(oldSettings.stringDistance().getClass(), newSettings.stringDistance().getClass()); + assertEquals(oldSettings.suggestMode().getClass(), newSettings.suggestMode().getClass()); } } From e0023a6692cd933b12dca790c3c5f19a2c90b4b6 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Wed, 10 Feb 2016 11:31:23 +0100 Subject: [PATCH 019/320] Remove deprecated parameter from field sort builder. This removes the deprecated ignore_unmapped parameter from field sort builder. This is in preparation of #16127 --- .../search/sort/FieldSortBuilder.java | 16 ---------------- .../search/sort/SortParseElement.java | 8 -------- 2 files changed, 24 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index c415fd5a70b7..4f082b057da8 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -35,8 +35,6 @@ public class FieldSortBuilder extends SortBuilder { private Object missing; - private Boolean ignoreUnmapped; - private String unmappedType; private String sortMode; @@ -76,17 +74,6 @@ public class FieldSortBuilder extends SortBuilder { return this; } - /** - * Sets if the field does not exists in the index, it should be ignored and not sorted by or not. Defaults - * to false (not ignoring). - * @deprecated Use {@link #unmappedType(String)} instead. - */ - @Deprecated - public FieldSortBuilder ignoreUnmapped(boolean ignoreUnmapped) { - this.ignoreUnmapped = ignoreUnmapped; - return this; - } - /** * Set the type to use in case the current field is not mapped in an index. * Specifying a type tells Elasticsearch what type the sort values should have, which is important @@ -138,9 +125,6 @@ public class FieldSortBuilder extends SortBuilder { if (missing != null) { builder.field("missing", missing); } - if (ignoreUnmapped != null) { - builder.field(SortParseElement.IGNORE_UNMAPPED.getPreferredName(), ignoreUnmapped); - } if (unmappedType != null) { builder.field(SortParseElement.UNMAPPED_TYPE.getPreferredName(), unmappedType); } diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java b/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java index a99158787d3a..5349d6fc0d6f 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.SearchParseElement; @@ -55,7 +54,6 @@ public class SortParseElement implements SearchParseElement { private static final SortField SORT_DOC = new SortField(null, SortField.Type.DOC); private static final SortField SORT_DOC_REVERSE = new SortField(null, SortField.Type.DOC, true); - public static final ParseField IGNORE_UNMAPPED = new ParseField("ignore_unmapped"); public static final ParseField UNMAPPED_TYPE = new ParseField("unmapped_type"); public static final String SCORE_FIELD_NAME = "_score"; @@ -156,12 +154,6 @@ public class SortParseElement implements SearchParseElement { } } else if ("missing".equals(innerJsonName)) { missing = parser.textOrNull(); - } else if (context.parseFieldMatcher().match(innerJsonName, IGNORE_UNMAPPED)) { - // backward compatibility: ignore_unmapped has been replaced with unmapped_type - if (unmappedType == null // don't override if unmapped_type has been provided too - && parser.booleanValue()) { - unmappedType = LongFieldMapper.CONTENT_TYPE; - } } else if (context.parseFieldMatcher().match(innerJsonName, UNMAPPED_TYPE)) { unmappedType = parser.textOrNull(); } else if ("mode".equals(innerJsonName)) { From 9e0f6e3f9c3ba7a539dcb9366529db5ab3295d61 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 2 Feb 2016 15:48:27 +0100 Subject: [PATCH 020/320] Adding method to build SuggestionContext to PhraseSuggestionBuilder This adds a build method for the SuggestionContext to the PhraseSuggestionBuilder and another one that creates the SuggestionSearchContext to the top level SuggestBuilder. Also adding tests that make sure the current way of parsing xContent to a SuggestionContext is reflected in the output the builders create. --- .../suggest/TransportSuggestAction.java | 3 +- .../common/io/stream/StreamInput.java | 2 +- .../common/io/stream/StreamOutput.java | 2 +- .../indices/query/IndicesQueriesRegistry.java | 4 +- .../elasticsearch/search/SearchService.java | 4 +- .../search/suggest/SuggestBuilder.java | 18 + .../search/suggest/SuggestContextParser.java | 5 +- .../search/suggest/SuggestParseElement.java | 26 +- .../search/suggest/SuggestUtils.java | 3 +- .../search/suggest/Suggesters.java | 18 +- .../search/suggest/SuggestionBuilder.java | 55 ++ .../suggest/SuggestionSearchContext.java | 41 +- .../completion/CompletionSuggestParser.java | 8 +- .../completion/CompletionSuggester.java | 10 +- .../CompletionSuggestionBuilder.java | 9 + .../CompletionSuggestionContext.java | 25 +- .../DirectCandidateGeneratorBuilder.java | 4 +- .../search/suggest/phrase/Laplace.java | 126 +++++ .../suggest/phrase/LinearInterpolation.java | 176 ++++++ .../suggest/phrase/PhraseSuggestParser.java | 16 +- .../suggest/phrase/PhraseSuggester.java | 20 +- .../phrase/PhraseSuggestionBuilder.java | 524 ++++-------------- .../phrase/PhraseSuggestionContext.java | 9 +- .../search/suggest/phrase/SmoothingModel.java | 105 ++++ .../search/suggest/phrase/StupidBackoff.java | 129 +++++ .../suggest/term/TermSuggestParser.java | 7 +- .../search/suggest/term/TermSuggester.java | 2 + .../suggest/term/TermSuggestionBuilder.java | 8 + .../suggest/term/TermSuggestionContext.java | 7 +- .../elasticsearch/index/IndexModuleTests.java | 2 +- .../AbstractSuggestionBuilderTestCase.java | 164 +++++- .../search/suggest/CustomSuggester.java | 9 +- .../suggest/CustomSuggesterSearchIT.java | 8 + .../search/suggest/SuggestBuilderTests.java | 10 +- .../phrase/DirectCandidateGeneratorTests.java | 52 +- .../suggest/phrase/LaplaceModelTests.java | 3 - .../phrase/LinearInterpolationModelTests.java | 3 - .../phrase/PhraseSuggestionBuilderTests.java | 47 +- .../phrase/SmoothingModelTestCase.java | 4 - .../phrase/StupidBackoffModelTests.java | 3 - .../term/TermSuggestionBuilderTests.java | 14 + .../messy/tests/SuggestSearchTests.java | 108 ++-- 42 files changed, 1135 insertions(+), 658 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/search/suggest/phrase/Laplace.java create mode 100644 core/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolation.java create mode 100644 core/src/main/java/org/elasticsearch/search/suggest/phrase/SmoothingModel.java create mode 100644 core/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoff.java diff --git a/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java b/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java index 0ed985785577..616dbf94937a 100644 --- a/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java +++ b/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java @@ -142,8 +142,7 @@ public class TransportSuggestAction extends TransportBroadcastAction> queryParsers; diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index cf9c0cebce7b..ff6d8897d59a 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -751,7 +751,7 @@ public class SearchService extends AbstractLifecycleComponent imp if (source.rescores() != null) { try { for (RescoreBuilder rescore : source.rescores()) { - context.addRescore(rescore.build(context.getQueryShardContext())); + context.addRescore(rescore.build(queryShardContext)); } } catch (IOException e) { throw new SearchContextException(context, "failed to create RescoreSearchContext", e); @@ -776,7 +776,7 @@ public class SearchService extends AbstractLifecycleComponent imp if (source.highlighter() != null) { HighlightBuilder highlightBuilder = source.highlighter(); try { - context.highlight(highlightBuilder.build(context.getQueryShardContext())); + context.highlight(highlightBuilder.build(queryShardContext)); } catch (IOException e) { throw new SearchContextException(context, "failed to create SearchContextHighlighter", e); } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java index d16e8e1d84aa..2852204bb6fc 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java @@ -26,9 +26,12 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import java.io.IOException; import java.util.ArrayList; @@ -137,6 +140,21 @@ public class SuggestBuilder extends ToXContentToBytes implements Writeable suggestionBuilder : suggestions) { + SuggestionContext suggestionContext = suggestionBuilder.build(context); + if (suggestionContext.getText() == null) { + if (globalText == null) { + throw new IllegalArgumentException("The required text option is missing"); + } + suggestionContext.setText(BytesRefs.toBytesRef(globalText)); + } + suggestionSearchContext.addSuggestion(suggestionBuilder.name(), suggestionContext); + } + return suggestionSearchContext; + } + @Override public SuggestBuilder readFrom(StreamInput in) throws IOException { final SuggestBuilder builder = new SuggestBuilder(); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestContextParser.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestContextParser.java index a7aa3fd60b63..53d510bf5305 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestContextParser.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestContextParser.java @@ -19,12 +19,11 @@ package org.elasticsearch.search.suggest; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldDataService; -import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; public interface SuggestContextParser { - SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexFieldDataService indexFieldDataService) throws IOException; + SuggestionSearchContext.SuggestionContext parse(XContentParser parser, QueryShardContext shardContext) throws IOException; } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java index cf6b391ec631..b9454dc264aa 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java @@ -21,8 +21,8 @@ package org.elasticsearch.search.suggest; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; @@ -44,14 +44,13 @@ public final class SuggestParseElement implements SearchParseElement { @Override public void parse(XContentParser parser, SearchContext context) throws Exception { - SuggestionSearchContext suggestionSearchContext = parseInternal(parser, context.mapperService(), context.fieldData(), - context.shardTarget().index(), context.shardTarget().shardId()); + SuggestionSearchContext suggestionSearchContext = parseInternal(parser, context.getQueryShardContext()); context.suggest(suggestionSearchContext); } - public SuggestionSearchContext parseInternal(XContentParser parser, MapperService mapperService, IndexFieldDataService fieldDataService, - String index, int shardId) throws IOException { + public SuggestionSearchContext parseInternal(XContentParser parser, QueryShardContext shardContext) throws IOException { SuggestionSearchContext suggestionSearchContext = new SuggestionSearchContext(); + MapperService mapperService = shardContext.getMapperService(); BytesRef globalText = null; String fieldName = null; @@ -95,10 +94,20 @@ public final class SuggestParseElement implements SearchParseElement { throw new IllegalArgumentException("Suggester[" + fieldName + "] not supported"); } final SuggestContextParser contextParser = suggesters.get(fieldName).getContextParser(); - suggestionContext = contextParser.parse(parser, mapperService, fieldDataService); + suggestionContext = contextParser.parse(parser, shardContext); } } if (suggestionContext != null) { + if (suggestText != null) { + suggestionContext.setText(suggestText); + } + if (prefix != null) { + suggestionContext.setPrefix(prefix); + } + if (regex != null) { + suggestionContext.setRegex(regex); + } + if (suggestText != null && prefix == null) { suggestionContext.setPrefix(suggestText); suggestionContext.setText(suggestText); @@ -110,6 +119,8 @@ public final class SuggestParseElement implements SearchParseElement { suggestionContext.setText(regex); } suggestionContexts.put(suggestionName, suggestionContext); + } else { + throw new IllegalArgumentException("suggestion context could not be parsed correctly"); } } } @@ -117,9 +128,6 @@ public final class SuggestParseElement implements SearchParseElement { for (Map.Entry entry : suggestionContexts.entrySet()) { String suggestionName = entry.getKey(); SuggestionContext suggestionContext = entry.getValue(); - - suggestionContext.setShard(shardId); - suggestionContext.setIndex(index); SuggestUtils.verifySuggestion(mapperService, globalText, suggestionContext); suggestionSearchContext.addSuggestion(suggestionName, suggestionContext); } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java index 989546d50bf9..03fb785b9102 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java @@ -271,10 +271,10 @@ public final class SuggestUtils { return false; } return true; - } + public static void verifySuggestion(MapperService mapperService, BytesRef globalText, SuggestionContext suggestion) { // Verify options and set defaults if (suggestion.getField() == null) { @@ -294,7 +294,6 @@ public final class SuggestUtils { } } - public static ShingleTokenFilterFactory.Factory getShingleFilterFactory(Analyzer analyzer) { if (analyzer instanceof NamedAnalyzer) { analyzer = ((NamedAnalyzer)analyzer).analyzer(); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java b/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java index c26649f63881..9857a06da687 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java @@ -20,8 +20,6 @@ package org.elasticsearch.search.suggest; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.ExtensionPoint; -import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.suggest.completion.CompletionSuggester; import org.elasticsearch.search.suggest.phrase.PhraseSuggester; import org.elasticsearch.search.suggest.term.TermSuggester; @@ -42,21 +40,17 @@ public final class Suggesters extends ExtensionPoint.ClassMap { this(Collections.emptyMap()); } + @Inject public Suggesters(Map suggesters) { super("suggester", Suggester.class, new HashSet<>(Arrays.asList("phrase", "term", "completion")), Suggesters.class, SuggestParseElement.class, SuggestPhase.class); - this.parsers = Collections.unmodifiableMap(suggesters); + this.parsers = Collections.unmodifiableMap(addBuildIns(suggesters)); } - @Inject - public Suggesters(Map suggesters, ScriptService scriptService, IndicesService indexServices) { - this(addBuildIns(suggesters, scriptService, indexServices)); - } - - private static Map addBuildIns(Map suggesters, ScriptService scriptService, IndicesService indexServices) { + private static Map addBuildIns(Map suggesters) { final Map map = new HashMap<>(); - map.put("phrase", new PhraseSuggester(scriptService, indexServices)); - map.put("term", new TermSuggester()); - map.put("completion", new CompletionSuggester()); + map.put("phrase", PhraseSuggester.PROTOTYPE); + map.put("term", TermSuggester.PROTOTYPE); + map.put("completion", CompletionSuggester.PROTOTYPE); map.putAll(suggesters); return map; } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java index 1fdb38df88f0..70f3d061b471 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java @@ -19,15 +19,20 @@ package org.elasticsearch.search.suggest; +import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import java.io.IOException; import java.util.Objects; @@ -192,6 +197,56 @@ public abstract class SuggestionBuilder> extends protected abstract SuggestionBuilder innerFromXContent(QueryParseContext parseContext, String name) throws IOException; + protected abstract SuggestionContext build(QueryShardContext context) throws IOException; + + /** + * Transfers the text, prefix, regex, analyzer, fieldname, size and shard size settings from the + * original {@link SuggestionBuilder} to the target {@link SuggestionContext} + */ + protected void populateCommonFields(MapperService mapperService, + SuggestionSearchContext.SuggestionContext suggestionContext) throws IOException { + + if (analyzer != null) { + Analyzer luceneAnalyzer = mapperService.analysisService().analyzer(analyzer); + if (luceneAnalyzer == null) { + throw new IllegalArgumentException("Analyzer [" + luceneAnalyzer + "] doesn't exists"); + } + suggestionContext.setAnalyzer(luceneAnalyzer); + } + + if (fieldname != null) { + suggestionContext.setField(fieldname); + } + + if (size != null) { + suggestionContext.setSize(size); + } + + if (shardSize != null) { + suggestionContext.setShardSize(shardSize); + } else { + // if no shard size is set in builder, use size (or at least 5) + suggestionContext.setShardSize(Math.max(suggestionContext.getSize(), 5)); + } + + if (text != null) { + suggestionContext.setText(BytesRefs.toBytesRef(text)); + } + if (prefix != null) { + suggestionContext.setPrefix(BytesRefs.toBytesRef(prefix)); + } + if (regex != null) { + suggestionContext.setRegex(BytesRefs.toBytesRef(regex)); + } + if (text != null && prefix == null) { + suggestionContext.setPrefix(BytesRefs.toBytesRef(text)); + } else if (text == null && prefix != null) { + suggestionContext.setText(BytesRefs.toBytesRef(prefix)); + } else if (text == null && regex != null) { + suggestionContext.setText(BytesRefs.toBytesRef(regex)); + } + } + private String getSuggesterName() { //default impl returns the same as writeable name, but we keep the distinction between the two just to make sure return getWriteableName(); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java index 1d3339e0578b..fa468e6ce92d 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.suggest; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.query.QueryShardContext; import java.util.LinkedHashMap; import java.util.Map; @@ -36,20 +37,24 @@ public class SuggestionSearchContext { public Map suggestions() { return suggestions; } - - public static class SuggestionContext { - + + public abstract static class SuggestionContext { + private BytesRef text; private BytesRef prefix; private BytesRef regex; - private final Suggester suggester; private String field; private Analyzer analyzer; private int size = 5; private int shardSize = -1; - private int shardId; - private String index; - + private QueryShardContext shardContext; + private Suggester suggester; + + protected SuggestionContext(Suggester suggester, QueryShardContext shardContext) { + this.suggester = suggester; + this.shardContext = shardContext; + } + public BytesRef getText() { return text; } @@ -74,12 +79,8 @@ public class SuggestionSearchContext { this.regex = regex; } - public SuggestionContext(Suggester suggester) { - this.suggester = suggester; - } - public Suggester getSuggester() { - return this.suggester; + return ((Suggester) suggester); } public Analyzer getAnalyzer() { @@ -119,21 +120,9 @@ public class SuggestionSearchContext { } this.shardSize = shardSize; } - - public void setShard(int shardId) { - this.shardId = shardId; - } - public void setIndex(String index) { - this.index = index; - } - - public String getIndex() { - return index; - } - - public int getShard() { - return shardId; + public QueryShardContext getShardContext() { + return this.shardContext; } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java index 702b03f359ed..9d2952511512 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java @@ -20,17 +20,16 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.RegexpFlag; import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestUtils.Fields; @@ -135,8 +134,9 @@ public class CompletionSuggestParser implements SuggestContextParser { } @Override - public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexFieldDataService fieldDataService) throws IOException { - final CompletionSuggestionContext suggestion = new CompletionSuggestionContext(completionSuggester, mapperService, fieldDataService); + public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, QueryShardContext shardContext) throws IOException { + MapperService mapperService = shardContext.getMapperService(); + final CompletionSuggestionContext suggestion = new CompletionSuggestionContext(shardContext); final ContextAndSuggest contextAndSuggest = new ContextAndSuggest(mapperService); TLP_PARSER.parse(parser, suggestion, contextAndSuggest); final XContentParser contextParser = contextAndSuggest.contextParser; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index 8cd9d386a134..be90a2e7e73e 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -34,7 +34,9 @@ import org.elasticsearch.common.text.Text; import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.Suggester; @@ -51,6 +53,8 @@ import java.util.Set; public class CompletionSuggester extends Suggester { + public static final CompletionSuggester PROTOTYPE = new CompletionSuggester(); + @Override public SuggestContextParser getContextParser() { return new CompletionSuggestParser(this); @@ -86,9 +90,11 @@ public class CompletionSuggester extends Suggester final LeafReaderContext subReaderContext = leaves.get(readerIndex); final int subDocId = suggestDoc.doc - subReaderContext.docBase; for (String field : payloadFields) { - MappedFieldType payloadFieldType = suggestionContext.getMapperService().fullName(field); + MapperService mapperService = suggestionContext.getShardContext().getMapperService(); + MappedFieldType payloadFieldType = mapperService.fullName(field); if (payloadFieldType != null) { - final AtomicFieldData data = suggestionContext.getIndexFieldDataService().getForField(payloadFieldType) + QueryShardContext shardContext = suggestionContext.getShardContext(); + final AtomicFieldData data = shardContext.getForField(payloadFieldType) .load(subReaderContext); final ScriptDocValues scriptValues = data.getScriptValues(); scriptValues.setNextDocId(subDocId); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 29992c1a077b..0bd37be128d0 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -28,8 +28,10 @@ import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.RegexpFlag; import org.elasticsearch.search.suggest.SuggestionBuilder; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; @@ -372,9 +374,16 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder> queryContexts = Collections.emptyMap(); - private final MapperService mapperService; - private final IndexFieldDataService indexFieldDataService; private Set payloadFields = Collections.emptySet(); - CompletionSuggestionContext(Suggester suggester, MapperService mapperService, IndexFieldDataService indexFieldDataService) { - super(suggester); - this.indexFieldDataService = indexFieldDataService; - this.mapperService = mapperService; - } - CompletionFieldMapper.CompletionFieldType getFieldType() { return this.fieldType; } @@ -73,15 +67,6 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest this.queryContexts = queryContexts; } - - MapperService getMapperService() { - return mapperService; - } - - IndexFieldDataService getIndexFieldDataService() { - return indexFieldDataService; - } - void setPayloadFields(Set fields) { this.payloadFields = fields; } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java index 8cc834ef0d5e..dd1be571bd86 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.suggest.SuggestUtils; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.CandidateGenerator; @@ -349,8 +348,7 @@ public final class DirectCandidateGeneratorBuilder return replaceField(tmpFieldName.iterator().next(), tempGenerator); } - public PhraseSuggestionContext.DirectCandidateGenerator build(QueryShardContext context) throws IOException { - MapperService mapperService = context.getMapperService(); + public PhraseSuggestionContext.DirectCandidateGenerator build(MapperService mapperService) throws IOException { PhraseSuggestionContext.DirectCandidateGenerator generator = new PhraseSuggestionContext.DirectCandidateGenerator(); generator.setField(this.field); transferIfNotNull(this.size, generator::size); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/Laplace.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/Laplace.java new file mode 100644 index 000000000000..e11a920f966d --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/Laplace.java @@ -0,0 +1,126 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.suggest.phrase; + +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.Terms; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory; + +import java.io.IOException; +import java.util.Objects; + +/** + * An additive + * smoothing model. + *

      + * See N-Gram + * Smoothing for details. + *

      + */ +public final class Laplace extends SmoothingModel { + private double alpha = DEFAULT_LAPLACE_ALPHA; + private static final String NAME = "laplace"; + private static final ParseField ALPHA_FIELD = new ParseField("alpha"); + static final ParseField PARSE_FIELD = new ParseField(NAME); + /** + * Default alpha parameter for laplace smoothing + */ + public static final double DEFAULT_LAPLACE_ALPHA = 0.5; + public static final Laplace PROTOTYPE = new Laplace(DEFAULT_LAPLACE_ALPHA); + + /** + * Creates a Laplace smoothing model. + * + */ + public Laplace(double alpha) { + this.alpha = alpha; + } + + /** + * @return the laplace model alpha parameter + */ + public double getAlpha() { + return this.alpha; + } + + @Override + protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(ALPHA_FIELD.getPreferredName(), alpha); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeDouble(alpha); + } + + @Override + public SmoothingModel readFrom(StreamInput in) throws IOException { + return new Laplace(in.readDouble()); + } + + @Override + protected boolean doEquals(SmoothingModel other) { + Laplace otherModel = (Laplace) other; + return Objects.equals(alpha, otherModel.alpha); + } + + @Override + protected final int doHashCode() { + return Objects.hash(alpha); + } + + @Override + public SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException { + XContentParser parser = parseContext.parser(); + XContentParser.Token token; + String fieldName = null; + double alpha = DEFAULT_LAPLACE_ALPHA; + while ((token = parser.nextToken()) != Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + fieldName = parser.currentName(); + } + if (token.isValue() && parseContext.parseFieldMatcher().match(fieldName, ALPHA_FIELD)) { + alpha = parser.doubleValue(); + } + } + return new Laplace(alpha); + } + + @Override + public WordScorerFactory buildWordScorerFactory() { + return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) + -> new LaplaceScorer(reader, terms, field, realWordLikelyhood, separator, alpha); + } +} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolation.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolation.java new file mode 100644 index 000000000000..b94ea333fdb1 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolation.java @@ -0,0 +1,176 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.suggest.phrase; + +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.Terms; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory; + +import java.io.IOException; +import java.util.Objects; + +/** + * Linear interpolation smoothing model. + *

      + * See N-Gram + * Smoothing for details. + *

      + */ +public final class LinearInterpolation extends SmoothingModel { + private static final String NAME = "linear"; + public static final LinearInterpolation PROTOTYPE = new LinearInterpolation(0.8, 0.1, 0.1); + private final double trigramLambda; + private final double bigramLambda; + private final double unigramLambda; + static final ParseField PARSE_FIELD = new ParseField(NAME); + private static final ParseField TRIGRAM_FIELD = new ParseField("trigram_lambda"); + private static final ParseField BIGRAM_FIELD = new ParseField("bigram_lambda"); + private static final ParseField UNIGRAM_FIELD = new ParseField("unigram_lambda"); + + /** + * Creates a linear interpolation smoothing model. + * + * Note: the lambdas must sum up to one. + * + * @param trigramLambda + * the trigram lambda + * @param bigramLambda + * the bigram lambda + * @param unigramLambda + * the unigram lambda + */ + public LinearInterpolation(double trigramLambda, double bigramLambda, double unigramLambda) { + double sum = trigramLambda + bigramLambda + unigramLambda; + if (Math.abs(sum - 1.0) > 0.001) { + throw new IllegalArgumentException("linear smoothing lambdas must sum to 1"); + } + this.trigramLambda = trigramLambda; + this.bigramLambda = bigramLambda; + this.unigramLambda = unigramLambda; + } + + public double getTrigramLambda() { + return this.trigramLambda; + } + + public double getBigramLambda() { + return this.bigramLambda; + } + + public double getUnigramLambda() { + return this.unigramLambda; + } + + @Override + protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(TRIGRAM_FIELD.getPreferredName(), trigramLambda); + builder.field(BIGRAM_FIELD.getPreferredName(), bigramLambda); + builder.field(UNIGRAM_FIELD.getPreferredName(), unigramLambda); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeDouble(trigramLambda); + out.writeDouble(bigramLambda); + out.writeDouble(unigramLambda); + } + + @Override + public LinearInterpolation readFrom(StreamInput in) throws IOException { + return new LinearInterpolation(in.readDouble(), in.readDouble(), in.readDouble()); + } + + @Override + protected boolean doEquals(SmoothingModel other) { + final LinearInterpolation otherModel = (LinearInterpolation) other; + return Objects.equals(trigramLambda, otherModel.trigramLambda) && + Objects.equals(bigramLambda, otherModel.bigramLambda) && + Objects.equals(unigramLambda, otherModel.unigramLambda); + } + + @Override + protected final int doHashCode() { + return Objects.hash(trigramLambda, bigramLambda, unigramLambda); + } + + @Override + public LinearInterpolation innerFromXContent(QueryParseContext parseContext) throws IOException { + XContentParser parser = parseContext.parser(); + XContentParser.Token token; + String fieldName = null; + double trigramLambda = 0.0; + double bigramLambda = 0.0; + double unigramLambda = 0.0; + ParseFieldMatcher matcher = parseContext.parseFieldMatcher(); + while ((token = parser.nextToken()) != Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + fieldName = parser.currentName(); + } else if (token.isValue()) { + if (matcher.match(fieldName, TRIGRAM_FIELD)) { + trigramLambda = parser.doubleValue(); + if (trigramLambda < 0) { + throw new IllegalArgumentException("trigram_lambda must be positive"); + } + } else if (matcher.match(fieldName, BIGRAM_FIELD)) { + bigramLambda = parser.doubleValue(); + if (bigramLambda < 0) { + throw new IllegalArgumentException("bigram_lambda must be positive"); + } + } else if (matcher.match(fieldName, UNIGRAM_FIELD)) { + unigramLambda = parser.doubleValue(); + if (unigramLambda < 0) { + throw new IllegalArgumentException("unigram_lambda must be positive"); + } + } else { + throw new IllegalArgumentException( + "suggester[phrase][smoothing][linear] doesn't support field [" + fieldName + "]"); + } + } else { + throw new ParsingException(parser.getTokenLocation(), + "[" + NAME + "] unknown token [" + token + "] after [" + fieldName + "]"); + } + } + return new LinearInterpolation(trigramLambda, bigramLambda, unigramLambda); + } + + @Override + public WordScorerFactory buildWordScorerFactory() { + return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) -> + new LinearInterpoatingScorer(reader, terms, field, realWordLikelyhood, separator, trigramLambda, bigramLambda, + unigramLambda); + } +} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java index fc60fc6fc80d..e4400fb5cd20 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java @@ -26,17 +26,16 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; -import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.Template; import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestUtils; import org.elasticsearch.search.suggest.SuggestionSearchContext; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.Laplace; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.StupidBackoff; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator; import java.io.IOException; @@ -51,8 +50,10 @@ public final class PhraseSuggestParser implements SuggestContextParser { } @Override - public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexFieldDataService fieldDataService) throws IOException { - PhraseSuggestionContext suggestion = new PhraseSuggestionContext(suggester); + public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, QueryShardContext shardContext) throws IOException { + MapperService mapperService = shardContext.getMapperService(); + ScriptService scriptService = shardContext.getScriptService(); + PhraseSuggestionContext suggestion = new PhraseSuggestionContext(shardContext); ParseFieldMatcher parseFieldMatcher = mapperService.getIndexSettings().getParseFieldMatcher(); XContentParser.Token token; String fieldName = null; @@ -135,7 +136,7 @@ public final class PhraseSuggestParser implements SuggestContextParser { throw new IllegalArgumentException("suggester[phrase][collate] query already set, doesn't support additional [" + fieldName + "]"); } Template template = Template.parse(parser, parseFieldMatcher); - CompiledScript compiledScript = suggester.scriptService().compile(template, ScriptContext.Standard.SEARCH, Collections.emptyMap()); + CompiledScript compiledScript = scriptService.compile(template, ScriptContext.Standard.SEARCH, Collections.emptyMap()); suggestion.setCollateQueryScript(compiledScript); } else if ("params".equals(fieldName)) { suggestion.setCollateScriptParams(parser.map()); @@ -199,9 +200,6 @@ public final class PhraseSuggestParser implements SuggestContextParser { suggestion.addGenerator(generator); } } - - - return suggestion; } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java index fbfa2b03ceb4..8f3e5164e407 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java @@ -31,9 +31,7 @@ import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.text.Text; -import org.elasticsearch.index.IndexService; import org.elasticsearch.index.query.ParsedQuery; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptService; @@ -44,6 +42,7 @@ import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestUtils; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.SuggestionBuilder; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.phrase.NoisyChannelSpellChecker.Result; import java.io.IOException; @@ -54,13 +53,8 @@ import java.util.Map; public final class PhraseSuggester extends Suggester { private final BytesRef SEPARATOR = new BytesRef(" "); private static final String SUGGESTION_TEMPLATE_VAR_NAME = "suggestion"; - private final ScriptService scriptService; - private final IndicesService indicesService; - public PhraseSuggester(ScriptService scriptService, IndicesService indicesService) { - this.scriptService = scriptService; - this.indicesService = indicesService; - } + public static final PhraseSuggester PROTOTYPE = new PhraseSuggester(); /* * More Ideas: @@ -118,10 +112,10 @@ public final class PhraseSuggester extends Suggester { // from the index for a correction, collateMatch is updated final Map vars = suggestion.getCollateScriptParams(); vars.put(SUGGESTION_TEMPLATE_VAR_NAME, spare.toString()); + ScriptService scriptService = suggestion.getShardContext().getScriptService(); final ExecutableScript executable = scriptService.executable(collateScript, vars); final BytesReference querySource = (BytesReference) executable.run(); - IndexService indexService = indicesService.indexService(suggestion.getIndex()); - final ParsedQuery parsedQuery = indexService.newQueryShardContext().parse(querySource); + final ParsedQuery parsedQuery = suggestion.getShardContext().parse(querySource); collateMatch = Lucene.exists(searcher, parsedQuery.query()); } if (!collateMatch && !collatePrune) { @@ -145,15 +139,11 @@ public final class PhraseSuggester extends Suggester { return response; } - private PhraseSuggestion.Entry buildResultEntry(PhraseSuggestionContext suggestion, CharsRefBuilder spare, double cutoffScore) { + private PhraseSuggestion.Entry buildResultEntry(SuggestionContext suggestion, CharsRefBuilder spare, double cutoffScore) { spare.copyUTF8Bytes(suggestion.getText()); return new PhraseSuggestion.Entry(new Text(spare.toString()), 0, spare.length(), cutoffScore); } - ScriptService scriptService() { - return scriptService; - } - @Override public SuggestContextParser getContextParser() { return new PhraseSuggestParser(this); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java index c079812afe45..c83f68716bc7 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java @@ -19,27 +19,32 @@ package org.elasticsearch.search.suggest.phrase; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.Terms; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.Template; +import org.elasticsearch.search.suggest.SuggestUtils; import org.elasticsearch.search.suggest.SuggestionBuilder; -import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -239,7 +244,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder= 1"); + } this.tokenLimit = tokenLimit; return this; } @@ -389,413 +397,6 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder Katz's - * Backoff. This model is used as the default if no model is configured. - *

      - * See N-Gram - * Smoothing for details. - *

      - */ - public static final class StupidBackoff extends SmoothingModel { - /** - * Default discount parameter for {@link StupidBackoff} smoothing - */ - public static final double DEFAULT_BACKOFF_DISCOUNT = 0.4; - public static final StupidBackoff PROTOTYPE = new StupidBackoff(DEFAULT_BACKOFF_DISCOUNT); - private double discount = DEFAULT_BACKOFF_DISCOUNT; - private static final String NAME = "stupid_backoff"; - private static final ParseField DISCOUNT_FIELD = new ParseField("discount"); - private static final ParseField PARSE_FIELD = new ParseField(NAME); - - /** - * Creates a Stupid-Backoff smoothing model. - * - * @param discount - * the discount given to lower order ngrams if the higher order ngram doesn't exits - */ - public StupidBackoff(double discount) { - this.discount = discount; - } - - /** - * @return the discount parameter of the model - */ - public double getDiscount() { - return this.discount; - } - - @Override - protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(DISCOUNT_FIELD.getPreferredName(), discount); - return builder; - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeDouble(discount); - } - - @Override - public StupidBackoff readFrom(StreamInput in) throws IOException { - return new StupidBackoff(in.readDouble()); - } - - @Override - protected boolean doEquals(SmoothingModel other) { - StupidBackoff otherModel = (StupidBackoff) other; - return Objects.equals(discount, otherModel.discount); - } - - @Override - protected final int doHashCode() { - return Objects.hash(discount); - } - - @Override - public SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException { - XContentParser parser = parseContext.parser(); - XContentParser.Token token; - String fieldName = null; - double discount = DEFAULT_BACKOFF_DISCOUNT; - while ((token = parser.nextToken()) != Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - } - if (token.isValue() && parseContext.parseFieldMatcher().match(fieldName, DISCOUNT_FIELD)) { - discount = parser.doubleValue(); - } - } - return new StupidBackoff(discount); - } - - @Override - public WordScorerFactory buildWordScorerFactory() { - return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) - -> new StupidBackoffScorer(reader, terms, field, realWordLikelyhood, separator, discount); - } - } - - /** - * An additive - * smoothing model. - *

      - * See N-Gram - * Smoothing for details. - *

      - */ - public static final class Laplace extends SmoothingModel { - private double alpha = DEFAULT_LAPLACE_ALPHA; - private static final String NAME = "laplace"; - private static final ParseField ALPHA_FIELD = new ParseField("alpha"); - private static final ParseField PARSE_FIELD = new ParseField(NAME); - /** - * Default alpha parameter for laplace smoothing - */ - public static final double DEFAULT_LAPLACE_ALPHA = 0.5; - public static final Laplace PROTOTYPE = new Laplace(DEFAULT_LAPLACE_ALPHA); - - /** - * Creates a Laplace smoothing model. - * - */ - public Laplace(double alpha) { - this.alpha = alpha; - } - - /** - * @return the laplace model alpha parameter - */ - public double getAlpha() { - return this.alpha; - } - - @Override - protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(ALPHA_FIELD.getPreferredName(), alpha); - return builder; - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeDouble(alpha); - } - - @Override - public SmoothingModel readFrom(StreamInput in) throws IOException { - return new Laplace(in.readDouble()); - } - - @Override - protected boolean doEquals(SmoothingModel other) { - Laplace otherModel = (Laplace) other; - return Objects.equals(alpha, otherModel.alpha); - } - - @Override - protected final int doHashCode() { - return Objects.hash(alpha); - } - - @Override - public SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException { - XContentParser parser = parseContext.parser(); - XContentParser.Token token; - String fieldName = null; - double alpha = DEFAULT_LAPLACE_ALPHA; - while ((token = parser.nextToken()) != Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - } - if (token.isValue() && parseContext.parseFieldMatcher().match(fieldName, ALPHA_FIELD)) { - alpha = parser.doubleValue(); - } - } - return new Laplace(alpha); - } - - @Override - public WordScorerFactory buildWordScorerFactory() { - return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) - -> new LaplaceScorer(reader, terms, field, realWordLikelyhood, separator, alpha); - } - } - - - public static abstract class SmoothingModel implements NamedWriteable, ToXContent { - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(getWriteableName()); - innerToXContent(builder,params); - builder.endObject(); - return builder; - } - - @Override - public final boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - SmoothingModel other = (SmoothingModel) obj; - return doEquals(other); - } - - public static SmoothingModel fromXContent(QueryParseContext parseContext) throws IOException { - XContentParser parser = parseContext.parser(); - ParseFieldMatcher parseFieldMatcher = parseContext.parseFieldMatcher(); - XContentParser.Token token; - String fieldName = null; - SmoothingModel model = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - } else if (token == XContentParser.Token.START_OBJECT) { - if (parseFieldMatcher.match(fieldName, LinearInterpolation.PARSE_FIELD)) { - model = LinearInterpolation.PROTOTYPE.innerFromXContent(parseContext); - } else if (parseFieldMatcher.match(fieldName, Laplace.PARSE_FIELD)) { - model = Laplace.PROTOTYPE.innerFromXContent(parseContext); - } else if (parseFieldMatcher.match(fieldName, StupidBackoff.PARSE_FIELD)) { - model = StupidBackoff.PROTOTYPE.innerFromXContent(parseContext); - } else { - throw new IllegalArgumentException("suggester[phrase] doesn't support object field [" + fieldName + "]"); - } - } else { - throw new ParsingException(parser.getTokenLocation(), - "[smoothing] unknown token [" + token + "] after [" + fieldName + "]"); - } - } - return model; - } - - public abstract SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException; - - @Override - public final int hashCode() { - /* - * Override hashCode here and forward to an abstract method to force extensions of this class to override hashCode in the same - * way that we force them to override equals. This also prevents false positives in CheckStyle's EqualsHashCode check. - */ - return doHashCode(); - } - - public abstract WordScorerFactory buildWordScorerFactory(); - - /** - * subtype specific implementation of "equals". - */ - protected abstract boolean doEquals(SmoothingModel other); - - protected abstract int doHashCode(); - - protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException; - } - - /** - * Linear interpolation smoothing model. - *

      - * See N-Gram - * Smoothing for details. - *

      - */ - public static final class LinearInterpolation extends SmoothingModel { - private static final String NAME = "linear"; - public static final LinearInterpolation PROTOTYPE = new LinearInterpolation(0.8, 0.1, 0.1); - private final double trigramLambda; - private final double bigramLambda; - private final double unigramLambda; - private static final ParseField PARSE_FIELD = new ParseField(NAME); - private static final ParseField TRIGRAM_FIELD = new ParseField("trigram_lambda"); - private static final ParseField BIGRAM_FIELD = new ParseField("bigram_lambda"); - private static final ParseField UNIGRAM_FIELD = new ParseField("unigram_lambda"); - - /** - * Creates a linear interpolation smoothing model. - * - * Note: the lambdas must sum up to one. - * - * @param trigramLambda - * the trigram lambda - * @param bigramLambda - * the bigram lambda - * @param unigramLambda - * the unigram lambda - */ - public LinearInterpolation(double trigramLambda, double bigramLambda, double unigramLambda) { - double sum = trigramLambda + bigramLambda + unigramLambda; - if (Math.abs(sum - 1.0) > 0.001) { - throw new IllegalArgumentException("linear smoothing lambdas must sum to 1"); - } - this.trigramLambda = trigramLambda; - this.bigramLambda = bigramLambda; - this.unigramLambda = unigramLambda; - } - - public double getTrigramLambda() { - return this.trigramLambda; - } - - public double getBigramLambda() { - return this.bigramLambda; - } - - public double getUnigramLambda() { - return this.unigramLambda; - } - - @Override - protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(TRIGRAM_FIELD.getPreferredName(), trigramLambda); - builder.field(BIGRAM_FIELD.getPreferredName(), bigramLambda); - builder.field(UNIGRAM_FIELD.getPreferredName(), unigramLambda); - return builder; - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeDouble(trigramLambda); - out.writeDouble(bigramLambda); - out.writeDouble(unigramLambda); - } - - @Override - public LinearInterpolation readFrom(StreamInput in) throws IOException { - return new LinearInterpolation(in.readDouble(), in.readDouble(), in.readDouble()); - } - - @Override - protected boolean doEquals(SmoothingModel other) { - final LinearInterpolation otherModel = (LinearInterpolation) other; - return Objects.equals(trigramLambda, otherModel.trigramLambda) && - Objects.equals(bigramLambda, otherModel.bigramLambda) && - Objects.equals(unigramLambda, otherModel.unigramLambda); - } - - @Override - protected final int doHashCode() { - return Objects.hash(trigramLambda, bigramLambda, unigramLambda); - } - - @Override - public LinearInterpolation innerFromXContent(QueryParseContext parseContext) throws IOException { - XContentParser parser = parseContext.parser(); - XContentParser.Token token; - String fieldName = null; - double trigramLambda = 0.0; - double bigramLambda = 0.0; - double unigramLambda = 0.0; - ParseFieldMatcher matcher = parseContext.parseFieldMatcher(); - while ((token = parser.nextToken()) != Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - } else if (token.isValue()) { - if (matcher.match(fieldName, TRIGRAM_FIELD)) { - trigramLambda = parser.doubleValue(); - if (trigramLambda < 0) { - throw new IllegalArgumentException("trigram_lambda must be positive"); - } - } else if (matcher.match(fieldName, BIGRAM_FIELD)) { - bigramLambda = parser.doubleValue(); - if (bigramLambda < 0) { - throw new IllegalArgumentException("bigram_lambda must be positive"); - } - } else if (matcher.match(fieldName, UNIGRAM_FIELD)) { - unigramLambda = parser.doubleValue(); - if (unigramLambda < 0) { - throw new IllegalArgumentException("unigram_lambda must be positive"); - } - } else { - throw new IllegalArgumentException( - "suggester[phrase][smoothing][linear] doesn't support field [" + fieldName + "]"); - } - } else { - throw new ParsingException(parser.getTokenLocation(), - "[" + NAME + "] unknown token [" + token + "] after [" + fieldName + "]"); - } - } - return new LinearInterpolation(trigramLambda, bigramLambda, unigramLambda); - } - - @Override - public WordScorerFactory buildWordScorerFactory() { - return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) -> - new LinearInterpoatingScorer(reader, terms, field, realWordLikelyhood, separator, trigramLambda, bigramLambda, - unigramLambda); - } - } - @Override protected PhraseSuggestionBuilder innerFromXContent(QueryParseContext parseContext, String suggestionName) throws IOException { XContentParser parser = parseContext.parser(); @@ -873,7 +474,6 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder candidateGenerators : this.generators.values()) { + for (CandidateGenerator candidateGenerator : candidateGenerators) { + suggestionContext.addGenerator(candidateGenerator.build(mapperService)); + } + } + + if (this.model != null) { + suggestionContext.setModel(this.model.buildWordScorerFactory()); + } + + if (this.collateQuery != null) { + CompiledScript compiledScript = context.getScriptService().compile(this.collateQuery, ScriptContext.Standard.SEARCH, + Collections.emptyMap()); + suggestionContext.setCollateQueryScript(compiledScript); + if (this.collateParams != null) { + suggestionContext.setCollateScriptParams(this.collateParams); + } + suggestionContext.setCollatePrune(this.collatePrune); + } + + // TODO remove this when field is mandatory in builder ctor + if (suggestionContext.getField() == null) { + throw new IllegalArgumentException("The required field option is missing"); + } + + MappedFieldType fieldType = mapperService.fullName(suggestionContext.getField()); + if (fieldType == null) { + throw new IllegalArgumentException("No mapping found for field [" + suggestionContext.getField() + "]"); + } else if (suggestionContext.getAnalyzer() == null) { + // no analyzer name passed in, so try the field's analyzer, or the default analyzer + if (fieldType.searchAnalyzer() == null) { + suggestionContext.setAnalyzer(mapperService.searchAnalyzer()); + } else { + suggestionContext.setAnalyzer(fieldType.searchAnalyzer()); + } + } + + if (suggestionContext.model() == null) { + suggestionContext.setModel(StupidBackoffScorer.FACTORY); + } + + if (this.gramSize == null || suggestionContext.generators().isEmpty()) { + final ShingleTokenFilterFactory.Factory shingleFilterFactory = SuggestUtils + .getShingleFilterFactory(suggestionContext.getAnalyzer()); + if (this.gramSize == null) { + // try to detect the shingle size + if (shingleFilterFactory != null) { + suggestionContext.setGramSize(shingleFilterFactory.getMaxShingleSize()); + if (suggestionContext.getAnalyzer() == null && shingleFilterFactory.getMinShingleSize() > 1 + && !shingleFilterFactory.getOutputUnigrams()) { + throw new IllegalArgumentException("The default analyzer for field: [" + suggestionContext.getField() + + "] doesn't emit unigrams. If this is intentional try to set the analyzer explicitly"); + } + } + } + if (suggestionContext.generators().isEmpty()) { + if (shingleFilterFactory != null && shingleFilterFactory.getMinShingleSize() > 1 + && !shingleFilterFactory.getOutputUnigrams() && suggestionContext.getRequireUnigram()) { + throw new IllegalArgumentException("The default candidate generator for phrase suggest can't operate on field: [" + + suggestionContext.getField() + "] since it doesn't emit unigrams. " + + "If this is intentional try to set the candidate generator field explicitly"); + } + // use a default generator on the same field + DirectCandidateGenerator generator = new DirectCandidateGenerator(); + generator.setField(suggestionContext.getField()); + suggestionContext.addGenerator(generator); + } + } + return suggestionContext; + } + private static void ensureNoSmoothing(PhraseSuggestionBuilder suggestion) { if (suggestion.smoothingModel() != null) { throw new IllegalArgumentException("only one smoothing model supported"); @@ -1010,5 +702,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder collateScriptParams = new HashMap<>(1); private WordScorer.WordScorerFactory scorer; - public PhraseSuggestionContext(Suggester suggester) { - super(suggester); + public PhraseSuggestionContext(QueryShardContext shardContext) { + super(PhraseSuggester.PROTOTYPE, shardContext); } public float maxErrors() { @@ -154,8 +154,6 @@ class PhraseSuggestionContext extends SuggestionContext { public void postFilter(Analyzer postFilter) { this.postFilter = postFilter; } - - } public void setRequireUnigram(boolean requireUnigram) { @@ -213,5 +211,4 @@ class PhraseSuggestionContext extends SuggestionContext { boolean collatePrune() { return prune; } - } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/SmoothingModel.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/SmoothingModel.java new file mode 100644 index 000000000000..0163c560de4f --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/SmoothingModel.java @@ -0,0 +1,105 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.suggest.phrase; + +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory; + +import java.io.IOException; + +public abstract class SmoothingModel implements NamedWriteable, ToXContent { + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(getWriteableName()); + innerToXContent(builder,params); + builder.endObject(); + return builder; + } + + @Override + public final boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + SmoothingModel other = (SmoothingModel) obj; + return doEquals(other); + } + + @Override + public final int hashCode() { + /* + * Override hashCode here and forward to an abstract method to force + * extensions of this class to override hashCode in the same way that we + * force them to override equals. This also prevents false positives in + * CheckStyle's EqualsHashCode check. + */ + return doHashCode(); + } + + protected abstract int doHashCode(); + + public static SmoothingModel fromXContent(QueryParseContext parseContext) throws IOException { + XContentParser parser = parseContext.parser(); + ParseFieldMatcher parseFieldMatcher = parseContext.parseFieldMatcher(); + XContentParser.Token token; + String fieldName = null; + SmoothingModel model = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + fieldName = parser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + if (parseFieldMatcher.match(fieldName, LinearInterpolation.PARSE_FIELD)) { + model = LinearInterpolation.PROTOTYPE.innerFromXContent(parseContext); + } else if (parseFieldMatcher.match(fieldName, Laplace.PARSE_FIELD)) { + model = Laplace.PROTOTYPE.innerFromXContent(parseContext); + } else if (parseFieldMatcher.match(fieldName, StupidBackoff.PARSE_FIELD)) { + model = StupidBackoff.PROTOTYPE.innerFromXContent(parseContext); + } else { + throw new IllegalArgumentException("suggester[phrase] doesn't support object field [" + fieldName + "]"); + } + } else { + throw new ParsingException(parser.getTokenLocation(), + "[smoothing] unknown token [" + token + "] after [" + fieldName + "]"); + } + } + return model; + } + + public abstract SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException; + + public abstract WordScorerFactory buildWordScorerFactory(); + + /** + * subtype specific implementation of "equals". + */ + protected abstract boolean doEquals(SmoothingModel other); + + protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException; +} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoff.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoff.java new file mode 100644 index 000000000000..9611622d8c6e --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoff.java @@ -0,0 +1,129 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.suggest.phrase; + +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.Terms; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory; + +import java.io.IOException; +import java.util.Objects; + +/** + * A "stupid-backoff" smoothing model simialr to Katz's + * Backoff. This model is used as the default if no model is configured. + *

      + * See N-Gram + * Smoothing for details. + *

      + */ +public final class StupidBackoff extends SmoothingModel { + /** + * Default discount parameter for {@link StupidBackoff} smoothing + */ + public static final double DEFAULT_BACKOFF_DISCOUNT = 0.4; + public static final StupidBackoff PROTOTYPE = new StupidBackoff(DEFAULT_BACKOFF_DISCOUNT); + private double discount = DEFAULT_BACKOFF_DISCOUNT; + private static final String NAME = "stupid_backoff"; + private static final ParseField DISCOUNT_FIELD = new ParseField("discount"); + static final ParseField PARSE_FIELD = new ParseField(NAME); + + /** + * Creates a Stupid-Backoff smoothing model. + * + * @param discount + * the discount given to lower order ngrams if the higher order ngram doesn't exits + */ + public StupidBackoff(double discount) { + this.discount = discount; + } + + /** + * @return the discount parameter of the model + */ + public double getDiscount() { + return this.discount; + } + + @Override + protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(DISCOUNT_FIELD.getPreferredName(), discount); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeDouble(discount); + } + + @Override + public StupidBackoff readFrom(StreamInput in) throws IOException { + return new StupidBackoff(in.readDouble()); + } + + @Override + protected boolean doEquals(SmoothingModel other) { + StupidBackoff otherModel = (StupidBackoff) other; + return Objects.equals(discount, otherModel.discount); + } + + @Override + protected final int doHashCode() { + return Objects.hash(discount); + } + + @Override + public SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException { + XContentParser parser = parseContext.parser(); + XContentParser.Token token; + String fieldName = null; + double discount = DEFAULT_BACKOFF_DISCOUNT; + while ((token = parser.nextToken()) != Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + fieldName = parser.currentName(); + } + if (token.isValue() && parseContext.parseFieldMatcher().match(fieldName, DISCOUNT_FIELD)) { + discount = parser.doubleValue(); + } + } + return new StupidBackoff(discount); + } + + @Override + public WordScorerFactory buildWordScorerFactory() { + return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) + -> new StupidBackoffScorer(reader, terms, field, realWordLikelyhood, separator, discount); + } +} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java index a2fd680c215a..7e75976d3a39 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java @@ -20,8 +20,8 @@ package org.elasticsearch.search.suggest.term; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.suggest.DirectSpellcheckerSettings; import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestUtils; @@ -38,10 +38,11 @@ public final class TermSuggestParser implements SuggestContextParser { } @Override - public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexFieldDataService fieldDataService) throws IOException { + public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, QueryShardContext shardContext) throws IOException { + MapperService mapperService = shardContext.getMapperService(); XContentParser.Token token; String fieldName = null; - TermSuggestionContext suggestion = new TermSuggestionContext(suggester); + TermSuggestionContext suggestion = new TermSuggestionContext(shardContext); DirectSpellcheckerSettings settings = suggestion.getDirectSpellCheckerSettings(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java index e67e619bf516..78ed8be6a28f 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java @@ -40,6 +40,8 @@ import java.util.List; public final class TermSuggester extends Suggester { + public static final TermSuggester PROTOTYPE = new TermSuggester(); + @Override public TermSuggestion innerExecute(String name, TermSuggestionContext suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java index 1378c362c542..62d6718cd2c9 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java @@ -26,7 +26,9 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.suggest.SuggestionBuilder; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import java.io.IOException; import java.util.Locale; @@ -381,6 +383,12 @@ public class TermSuggestionBuilder extends SuggestionBuilder suggester) { - super(suggester); + public TermSuggestionContext(QueryShardContext shardContext) { + super(TermSuggester.PROTOTYPE, shardContext); } public DirectSpellcheckerSettings getDirectSpellCheckerSettings() { return settings; } - } \ No newline at end of file diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 0b0691bc5883..656e3ab6a618 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -63,8 +63,8 @@ import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptSettings; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptSettings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.engine.MockEngineFactory; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 3c5797f1e4a8..71431e70f3af 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.suggest; +import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; @@ -31,19 +32,48 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AnalysisService; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.mapper.ContentPath; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperBuilders; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptContextRegistry; +import org.elasticsearch.script.ScriptEngineRegistry; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptServiceTests.TestEngineService; +import org.elasticsearch.script.ScriptSettings; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.watcher.ResourceWatcherService; import org.junit.AfterClass; import org.junit.BeforeClass; import java.io.IOException; +import java.nio.file.Path; import java.util.Collections; +import java.util.Iterator; +import java.util.Map; +import java.util.Map.Entry; import java.util.function.Consumer; import java.util.function.Supplier; +import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -51,13 +81,35 @@ public abstract class AbstractSuggestionBuilderTestCase params) { + return new CompiledScript(ScriptType.INLINE, "mockName", "mocklang", script); + } + }; + suggesters = new Suggesters(Collections.emptyMap()); + parseElement = new SuggestParseElement(suggesters); + namedWriteableRegistry = new NamedWriteableRegistry(); namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, TermSuggestionBuilder.PROTOTYPE); namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, PhraseSuggestionBuilder.PROTOTYPE); @@ -69,7 +121,6 @@ public abstract class AbstractSuggestionBuilderTestCase> iterator = buildSuggestSearchContext.suggestions().entrySet().iterator(); + for (Entry entry : parsedSuggestionSearchContext.suggestions().entrySet()) { + Entry other = iterator.next(); + assertEquals(entry.getKey(), other.getKey()); + + SuggestionContext oldSchoolContext = entry.getValue(); + SuggestionContext newSchoolContext = other.getValue(); + assertNotSame(oldSchoolContext, newSchoolContext); + // deep comparison of analyzers is difficult here, but we check they are set or not set + if (oldSchoolContext.getAnalyzer() != null) { + assertNotNull(newSchoolContext.getAnalyzer()); + } else { + assertNull(newSchoolContext.getAnalyzer()); + } + assertEquals(oldSchoolContext.getField(), newSchoolContext.getField()); + assertEquals(oldSchoolContext.getPrefix(), newSchoolContext.getPrefix()); + assertEquals(oldSchoolContext.getRegex(), newSchoolContext.getRegex()); + assertEquals(oldSchoolContext.getShardSize(), newSchoolContext.getShardSize()); + assertEquals(oldSchoolContext.getSize(), newSchoolContext.getSize()); + assertEquals(oldSchoolContext.getSuggester().getClass(), newSchoolContext.getSuggester().getClass()); + assertEquals(oldSchoolContext.getText(), newSchoolContext.getText()); + assertEquals(oldSchoolContext.getClass(), newSchoolContext.getClass()); + + assertSuggestionContext(oldSchoolContext, newSchoolContext); + } + } + } + + /** + * compare two SuggestionContexte implementations for the special suggestion type under test + */ + protected abstract void assertSuggestionContext(SuggestionContext oldSuggestion, SuggestionContext newSuggestion); + private SB mutate(SB firstBuilder) throws IOException { SB mutation = serializedCopy(firstBuilder); assertNotSame(mutation, firstBuilder); @@ -201,14 +344,16 @@ public abstract class AbstractSuggestionBuilderTestCase T randomValueOtherThan(T input, Supplier randomSupplier) { T randomValue = null; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java index 4dbae08080aa..68e62983b54a 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.suggest; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.common.text.Text; +import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.util.Locale; @@ -54,9 +55,9 @@ public class CustomSuggester extends Suggester { + return (parser, shardContext) -> { Map options = parser.map(); - CustomSuggestionsContext suggestionContext = new CustomSuggestionsContext(CustomSuggester.this, options); + CustomSuggestionsContext suggestionContext = new CustomSuggestionsContext(shardContext, options); suggestionContext.setField((String) options.get("field")); return suggestionContext; }; @@ -66,8 +67,8 @@ public class CustomSuggester extends Suggester options; - public CustomSuggestionsContext(Suggester suggester, Map options) { - super(suggester); + public CustomSuggestionsContext(QueryShardContext context, Map options) { + super(new CustomSuggester(), context); this.options = options; } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java index b3af0eee142e..5bddad8bb022 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java @@ -25,7 +25,9 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -132,6 +134,12 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { return new CustomSuggestionBuilder(name, randomField, randomSuffix); } + @Override + protected SuggestionContext build(QueryShardContext context) throws IOException { + // NORELEASE + return null; + } + } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java index 2f53aaed9cfb..f4551b3de9a0 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java @@ -32,12 +32,12 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; import org.elasticsearch.search.suggest.completion.WritableTestCase; +import org.elasticsearch.search.suggest.phrase.Laplace; +import org.elasticsearch.search.suggest.phrase.LinearInterpolation; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.Laplace; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.LinearInterpolation; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.StupidBackoff; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilderTests; +import org.elasticsearch.search.suggest.phrase.SmoothingModel; +import org.elasticsearch.search.suggest.phrase.StupidBackoff; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import java.io.IOException; @@ -62,7 +62,7 @@ public class SuggestBuilderTests extends WritableTestCase { * creates random suggestion builder, renders it to xContent and back to new instance that should be equal to original */ public void testFromXContent() throws IOException { - Suggesters suggesters = new Suggesters(Collections.emptyMap(), null, null); + Suggesters suggesters = new Suggesters(Collections.emptyMap()); QueryParseContext context = new QueryParseContext(null); context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY)); for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) { diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index 02826b9a7eb2..9bf8447f8d80 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -34,15 +34,10 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.index.mapper.MapperBuilders; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator; @@ -171,19 +166,10 @@ public class DirectCandidateGeneratorTests extends ESTestCase{ } }; - QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, mockMapperService, null, null, null) { - @Override - public MappedFieldType fieldMapper(String name) { - StringFieldMapper.Builder builder = MapperBuilders.stringField(name); - return builder.build(new Mapper.BuilderContext(idxSettings.getSettings(), new ContentPath(1))).fieldType(); - } - }; - mockShardContext.setMapUnmappedFieldAsString(true); - for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) { DirectCandidateGeneratorBuilder generator = randomCandidateGenerator(); // first, build via DirectCandidateGenerator#build() - DirectCandidateGenerator contextGenerator = generator.build(mockShardContext); + DirectCandidateGenerator contextGenerator = generator.build(mockMapperService); // second, render random test generator to xContent and parse using // PhraseSuggestParser @@ -195,28 +181,32 @@ public class DirectCandidateGeneratorTests extends ESTestCase{ XContentParser parser = XContentHelper.createParser(builder.bytes()); DirectCandidateGenerator secondGenerator = PhraseSuggestParser.parseCandidateGenerator(parser, - mockShardContext.getMapperService(), mockShardContext.parseFieldMatcher()); + mockMapperService, ParseFieldMatcher.EMPTY); // compare their properties assertNotSame(contextGenerator, secondGenerator); - assertEquals(contextGenerator.field(), secondGenerator.field()); - assertEquals(contextGenerator.accuracy(), secondGenerator.accuracy(), Float.MIN_VALUE); - assertEquals(contextGenerator.maxTermFreq(), secondGenerator.maxTermFreq(), Float.MIN_VALUE); - assertEquals(contextGenerator.maxEdits(), secondGenerator.maxEdits()); - assertEquals(contextGenerator.maxInspections(), secondGenerator.maxInspections()); - assertEquals(contextGenerator.minDocFreq(), secondGenerator.minDocFreq(), Float.MIN_VALUE); - assertEquals(contextGenerator.minWordLength(), secondGenerator.minWordLength()); - assertEquals(contextGenerator.postFilter(), secondGenerator.postFilter()); - assertEquals(contextGenerator.prefixLength(), secondGenerator.prefixLength()); - assertEquals(contextGenerator.preFilter(), secondGenerator.preFilter()); - assertEquals(contextGenerator.sort(), secondGenerator.sort()); - assertEquals(contextGenerator.size(), secondGenerator.size()); - // some instances of StringDistance don't support equals, just checking the class here - assertEquals(contextGenerator.stringDistance().getClass(), secondGenerator.stringDistance().getClass()); - assertEquals(contextGenerator.suggestMode(), secondGenerator.suggestMode()); + assertEqualGenerators(contextGenerator, secondGenerator); } } + public static void assertEqualGenerators(DirectCandidateGenerator first, DirectCandidateGenerator second) { + assertEquals(first.field(), second.field()); + assertEquals(first.accuracy(), second.accuracy(), Float.MIN_VALUE); + assertEquals(first.maxTermFreq(), second.maxTermFreq(), Float.MIN_VALUE); + assertEquals(first.maxEdits(), second.maxEdits()); + assertEquals(first.maxInspections(), second.maxInspections()); + assertEquals(first.minDocFreq(), second.minDocFreq(), Float.MIN_VALUE); + assertEquals(first.minWordLength(), second.minWordLength()); + assertEquals(first.postFilter(), second.postFilter()); + assertEquals(first.prefixLength(), second.prefixLength()); + assertEquals(first.preFilter(), second.preFilter()); + assertEquals(first.sort(), second.sort()); + assertEquals(first.size(), second.size()); + // some instances of StringDistance don't support equals, just checking the class here + assertEquals(first.stringDistance().getClass(), second.stringDistance().getClass()); + assertEquals(first.suggestMode(), second.suggestMode()); + } + /** * test that bad xContent throws exception */ diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/LaplaceModelTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/LaplaceModelTests.java index 1a939018b8fb..96ac0c9cb27b 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/LaplaceModelTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/LaplaceModelTests.java @@ -19,9 +19,6 @@ package org.elasticsearch.search.suggest.phrase; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.Laplace; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel; - import static org.hamcrest.Matchers.instanceOf; public class LaplaceModelTests extends SmoothingModelTestCase { diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/LinearInterpolationModelTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/LinearInterpolationModelTests.java index 7984395abcce..ed663ef52410 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/LinearInterpolationModelTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/LinearInterpolationModelTests.java @@ -19,9 +19,6 @@ package org.elasticsearch.search.suggest.phrase; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.LinearInterpolation; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel; - import static org.hamcrest.Matchers.instanceOf; public class LinearInterpolationModelTests extends SmoothingModelTestCase { diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java index d74719fa6f77..2a553ef8cb9f 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java @@ -21,16 +21,17 @@ package org.elasticsearch.search.suggest.phrase; import org.elasticsearch.script.Template; import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.Laplace; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.LinearInterpolation; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.StupidBackoff; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator; import org.junit.BeforeClass; import java.io.IOException; import java.util.HashMap; +import java.util.Iterator; import java.util.Map; +import static org.hamcrest.Matchers.instanceOf; + public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestCase { @BeforeClass @@ -70,7 +71,7 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC } maybeSet(testBuilder::gramSize, randomIntBetween(1, 5)); maybeSet(testBuilder::forceUnigrams, randomBoolean()); - maybeSet(testBuilder::tokenLimit, randomInt(20)); + maybeSet(testBuilder::tokenLimit, randomIntBetween(1, 20)); if (randomBoolean()) { testBuilder.smoothingModel(randomSmoothingModel()); } @@ -115,7 +116,7 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC builder.gramSize(randomValueOtherThan(builder.gramSize(), () -> randomIntBetween(1, 5))); break; case 4: - builder.tokenLimit(randomValueOtherThan(builder.tokenLimit(), () -> randomInt(20))); + builder.tokenLimit(randomValueOtherThan(builder.tokenLimit(), () -> randomIntBetween(1, 20))); break; case 5: builder.separator(randomValueOtherThan(builder.separator(), () -> randomAsciiOfLengthBetween(1, 10))); @@ -158,4 +159,38 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC } } + @Override + protected void assertSuggestionContext(SuggestionContext oldSuggestion, SuggestionContext newSuggestion) { + assertThat(oldSuggestion, instanceOf(PhraseSuggestionContext.class)); + assertThat(newSuggestion, instanceOf(PhraseSuggestionContext.class)); + PhraseSuggestionContext oldPhraseSuggestion = (PhraseSuggestionContext) oldSuggestion; + PhraseSuggestionContext newPhraseSuggestion = (PhraseSuggestionContext) newSuggestion; + assertEquals(oldPhraseSuggestion.confidence(), newPhraseSuggestion.confidence(), Float.MIN_VALUE); + assertEquals(oldPhraseSuggestion.collatePrune(), newPhraseSuggestion.collatePrune()); + assertEquals(oldPhraseSuggestion.gramSize(), newPhraseSuggestion.gramSize()); + assertEquals(oldPhraseSuggestion.realworldErrorLikelyhood(), newPhraseSuggestion.realworldErrorLikelyhood(), Float.MIN_VALUE); + assertEquals(oldPhraseSuggestion.maxErrors(), newPhraseSuggestion.maxErrors(), Float.MIN_VALUE); + assertEquals(oldPhraseSuggestion.separator(), newPhraseSuggestion.separator()); + assertEquals(oldPhraseSuggestion.getTokenLimit(), newPhraseSuggestion.getTokenLimit()); + assertEquals(oldPhraseSuggestion.getRequireUnigram(), newPhraseSuggestion.getRequireUnigram()); + assertEquals(oldPhraseSuggestion.getPreTag(), newPhraseSuggestion.getPreTag()); + assertEquals(oldPhraseSuggestion.getPostTag(), newPhraseSuggestion.getPostTag()); + if (oldPhraseSuggestion.getCollateQueryScript() != null) { + // only assert that we have a compiled script on the other side + assertNotNull(newPhraseSuggestion.getCollateQueryScript()); + } + if (oldPhraseSuggestion.generators() != null) { + assertNotNull(newPhraseSuggestion.generators()); + assertEquals(oldPhraseSuggestion.generators().size(), newPhraseSuggestion.generators().size()); + Iterator secondList = newPhraseSuggestion.generators().iterator(); + for (DirectCandidateGenerator candidateGenerator : newPhraseSuggestion.generators()) { + DirectCandidateGeneratorTests.assertEqualGenerators(candidateGenerator, secondList.next()); + } + } + assertEquals(oldPhraseSuggestion.getCollateScriptParams(), newPhraseSuggestion.getCollateScriptParams()); + if (oldPhraseSuggestion.model() != null) { + assertNotNull(newPhraseSuggestion.model()); + } + } + } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java index 4672d9db9777..b0912c3ac489 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java @@ -45,10 +45,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.Laplace; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.LinearInterpolation; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.StupidBackoff; import org.elasticsearch.test.ESTestCase; import org.junit.AfterClass; import org.junit.BeforeClass; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/StupidBackoffModelTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/StupidBackoffModelTests.java index 3a59c19b13ea..1b6e1cf2c883 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/StupidBackoffModelTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/StupidBackoffModelTests.java @@ -19,9 +19,6 @@ package org.elasticsearch.search.suggest.phrase; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.StupidBackoff; - import static org.hamcrest.Matchers.instanceOf; public class StupidBackoffModelTests extends SmoothingModelTestCase { diff --git a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java index ca5f3f880ec1..f389b0cb18bd 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.suggest.term; import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SortBy; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.StringDistanceImpl; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; @@ -33,6 +34,14 @@ import static org.hamcrest.Matchers.notNullValue; */ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCase { + /** + * creates random suggestion builder, renders it to xContent and back to new instance that should be equal to original + */ + @Override + public void testBuild() throws IOException { + // skip for now + } + @Override protected TermSuggestionBuilder randomSuggestionBuilder() { TermSuggestionBuilder testBuilder = new TermSuggestionBuilder(randomAsciiOfLength(10)); @@ -245,4 +254,9 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas assertThat(builder.suggestMode(), notNullValue()); } + @Override + protected void assertSuggestionContext(SuggestionContext oldSuggestion, SuggestionContext newSuggestion) { + // put assertions on TermSuggestionContext here + } + } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java index d66ee0a6b3c9..1f2952e36178 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java @@ -20,38 +20,6 @@ package org.elasticsearch.messy.tests; -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; -import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; -import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion; -import static org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.candidateGenerator; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestion; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionPhraseCollateMatchExists; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionSize; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; -import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.endsWith; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.nullValue; - -import java.io.IOException; -import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutionException; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -71,13 +39,47 @@ import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.SuggestionBuilder; import org.elasticsearch.search.suggest.phrase.DirectCandidateGeneratorBuilder; +import org.elasticsearch.search.suggest.phrase.Laplace; +import org.elasticsearch.search.suggest.phrase.LinearInterpolation; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; +import org.elasticsearch.search.suggest.phrase.StupidBackoff; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SortBy; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; +import java.io.IOException; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; + +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.elasticsearch.index.query.QueryBuilders.matchQuery; +import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; +import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestion; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionPhraseCollateMatchExists; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionSize; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.nullValue; + /** * Integration tests for term and phrase suggestions. Many of these tests many requests that vary only slightly from one another. Where * possible these tests should declare for the first request, make the request, modify the configuration for the next request, make that @@ -227,6 +229,16 @@ public class SuggestSearchTests extends ESIntegTestCase { assertSuggestionSize(searchSuggest, 0, 0, "did_you_mean"); } + /** + * Creates a new {@link DirectCandidateGeneratorBuilder} + * + * @param field + * the field this candidate generator operates on. + */ + private DirectCandidateGeneratorBuilder candidateGenerator(String field) { + return new DirectCandidateGeneratorBuilder(field); + } + // see #2729 public void testSizeOneShard() throws Exception { prepareCreate("test").setSettings( @@ -286,7 +298,7 @@ public class SuggestSearchTests extends ESIntegTestCase { refresh(); PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("did_you_mean").field("name.shingled") - .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2)) + .addCandidateGenerator(candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2)) .gramSize(3); Suggest searchSuggest = searchSuggest( "ice tea", phraseSuggestion); assertSuggestion(searchSuggest, 0, 0, "did_you_mean", "iced tea"); @@ -439,7 +451,7 @@ public class SuggestSearchTests extends ESIntegTestCase { Suggest searchSuggest = searchSuggest( "a an the", phraseSuggestion("simple_phrase").field("body").gramSize(1) - .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").minWordLength(1).suggestMode("always")) + .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always")) .size(1)); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); } @@ -475,13 +487,13 @@ public class SuggestSearchTests extends ESIntegTestCase { Suggest searchSuggest = searchSuggest( "hello word", phraseSuggestion("simple_phrase").field("body") - .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").prefixLength(4).minWordLength(1).suggestMode("always")) + .addCandidateGenerator(candidateGenerator("body").prefixLength(4).minWordLength(1).suggestMode("always")) .size(1).confidence(1.0f)); assertSuggestion(searchSuggest, 0, "simple_phrase", "hello words"); searchSuggest = searchSuggest( "hello word", phraseSuggestion("simple_phrase").field("body") - .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").prefixLength(2).minWordLength(1).suggestMode("always")) + .addCandidateGenerator(candidateGenerator("body").prefixLength(2).minWordLength(1).suggestMode("always")) .size(1).confidence(1.0f)); assertSuggestion(searchSuggest, 0, "simple_phrase", "hello world"); } @@ -573,17 +585,17 @@ public class SuggestSearchTests extends ESIntegTestCase { // set all mass to trigrams (not indexed) phraseSuggest.clearCandidateGenerators() .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always")) - .smoothingModel(new PhraseSuggestionBuilder.LinearInterpolation(1,0,0)); + .smoothingModel(new LinearInterpolation(1,0,0)); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); // set all mass to bigrams - phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.LinearInterpolation(0,1,0)); + phraseSuggest.smoothingModel(new LinearInterpolation(0,1,0)); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); // distribute mass - phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.LinearInterpolation(0.4,0.4,0.2)); + phraseSuggest.smoothingModel(new LinearInterpolation(0.4,0.4,0.2)); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); @@ -591,15 +603,15 @@ public class SuggestSearchTests extends ESIntegTestCase { assertSuggestion(searchSuggest, 0, "simple_phrase", "american ace"); // try all smoothing methods - phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.LinearInterpolation(0.4,0.4,0.2)); + phraseSuggest.smoothingModel(new LinearInterpolation(0.4,0.4,0.2)); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); - phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.Laplace(0.2)); + phraseSuggest.smoothingModel(new Laplace(0.2)); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); - phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.StupidBackoff(0.1)); + phraseSuggest.smoothingModel(new StupidBackoff(0.1)); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); @@ -608,7 +620,7 @@ public class SuggestSearchTests extends ESIntegTestCase { searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); - phraseSuggest.tokenLimit(15).smoothingModel(new PhraseSuggestionBuilder.StupidBackoff(0.1)); + phraseSuggest.tokenLimit(15).smoothingModel(new StupidBackoff(0.1)); searchSuggest = searchSuggest( "Xor the Got-Jewel Xor the Got-Jewel Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel xorr the god jewel xorr the god jewel"); // Check the name this time because we're repeating it which is funky @@ -671,7 +683,7 @@ public class SuggestSearchTests extends ESIntegTestCase { .gramSize(2) .analyzer("body") .addCandidateGenerator(candidateGenerator("body").minWordLength(1).prefixLength(1).suggestMode("always").size(1).accuracy(0.1f)) - .smoothingModel(new PhraseSuggestionBuilder.StupidBackoff(0.1)) + .smoothingModel(new StupidBackoff(0.1)) .maxErrors(1.0f) .size(5); Suggest searchSuggest = searchSuggest( "Xorr the Gut-Jewel", phraseSuggestion); @@ -931,7 +943,7 @@ public class SuggestSearchTests extends ESIntegTestCase { Suggest searchSuggest = searchSuggest("nobel prize", phraseSuggestion("simple_phrase") .field("body") - .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f)) + .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f)) .confidence(2f) .maxErrors(5f) .size(1)); @@ -939,7 +951,7 @@ public class SuggestSearchTests extends ESIntegTestCase { searchSuggest = searchSuggest("noble prize", phraseSuggestion("simple_phrase") .field("body") - .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f)) + .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f)) .confidence(2f) .maxErrors(5f) .size(1)); @@ -1070,7 +1082,7 @@ public class SuggestSearchTests extends ESIntegTestCase { PhraseSuggestionBuilder suggest = phraseSuggestion("title") .field("title") - .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("title") + .addCandidateGenerator(candidateGenerator("title") .suggestMode("always") .maxTermFreq(.99f) .size(1000) // Setting a silly high size helps of generate a larger list of candidates for testing. @@ -1135,7 +1147,7 @@ public class SuggestSearchTests extends ESIntegTestCase { // suggest without collate PhraseSuggestionBuilder suggest = phraseSuggestion("title") .field("title") - .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("title") + .addCandidateGenerator(candidateGenerator("title") .suggestMode("always") .maxTermFreq(.99f) .size(10) From 2038429f63cd31721c0522d2d49eab66303c68fb Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Wed, 10 Feb 2016 16:21:24 -0500 Subject: [PATCH 021/320] initial refactoring of completion suggester --- .../common/io/stream/StreamInput.java | 10 +- .../common/io/stream/StreamOutput.java | 10 +- .../completion/CompletionSuggestParser.java | 27 +- .../CompletionSuggestionBuilder.java | 310 +++++------------- .../CompletionSuggestionContext.java | 34 +- .../suggest/completion/FuzzyOptions.java | 277 ++++++++++++++++ .../suggest/completion/RegexOptions.java | 153 +++++++++ .../context/CategoryContextMapping.java | 8 +- .../context/CategoryQueryContext.java | 144 +++++--- .../completion/context/ContextMappings.java | 7 +- .../completion/context/GeoContextMapping.java | 9 +- .../completion/context/GeoQueryContext.java | 219 ++++++++----- .../completion/context/QueryContext.java | 34 ++ .../AbstractSuggestionBuilderTestCase.java | 29 +- .../suggest/CompletionSuggestSearchIT.java | 35 +- .../search/suggest/SuggestBuilderTests.java | 2 +- .../completion/CategoryQueryContextTests.java | 95 ++++++ .../CompletionSuggesterBuilderTests.java | 135 ++++++++ .../suggest/completion/FuzzyOptionsTests.java | 131 ++++++++ .../completion/GeoQueryContextTests.java | 136 ++++++++ .../completion/QueryContextTestCase.java | 60 ++++ .../suggest/completion/RegexOptionsTests.java | 71 ++++ .../suggest/completion/WritableTestCase.java | 4 +- .../phrase/DirectCandidateGeneratorTests.java | 6 - .../org/elasticsearch/test/ESTestCase.java | 22 ++ 25 files changed, 1525 insertions(+), 443 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java create mode 100644 core/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java create mode 100644 core/src/main/java/org/elasticsearch/search/suggest/completion/context/QueryContext.java create mode 100644 core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryQueryContextTests.java create mode 100644 core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java create mode 100644 core/src/test/java/org/elasticsearch/search/suggest/completion/FuzzyOptionsTests.java create mode 100644 core/src/test/java/org/elasticsearch/search/suggest/completion/GeoQueryContextTests.java create mode 100644 core/src/test/java/org/elasticsearch/search/suggest/completion/QueryContextTestCase.java create mode 100644 core/src/test/java/org/elasticsearch/search/suggest/completion/RegexOptionsTests.java diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index 8c2dc444d27a..10f7a5dba888 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -39,6 +39,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.suggest.SuggestionBuilder; +import org.elasticsearch.search.suggest.completion.context.QueryContext; import org.elasticsearch.search.suggest.phrase.SmoothingModel; import org.elasticsearch.tasks.Task; import org.joda.time.DateTime; @@ -693,6 +694,13 @@ public abstract class StreamInput extends InputStream { return readNamedWriteable(SuggestionBuilder.class); } + /** + * Reads a completion {@link QueryContext} from the current stream + */ + public QueryContext readCompletionSuggestionQueryContext() throws IOException { + return readNamedWriteable(QueryContext.class); + } + /** * Reads a {@link org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder} from the current stream */ @@ -706,7 +714,7 @@ public abstract class StreamInput extends InputStream { public SmoothingModel readPhraseSuggestionSmoothingModel() throws IOException { return readNamedWriteable(SmoothingModel.class); } - + /** * Reads a {@link Task.Status} from the current stream. */ diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index d14d6e77ff4a..47019483476a 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -38,6 +38,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.suggest.SuggestionBuilder; +import org.elasticsearch.search.suggest.completion.context.QueryContext; import org.elasticsearch.search.suggest.phrase.SmoothingModel; import org.elasticsearch.tasks.Task; import org.joda.time.ReadableInstant; @@ -678,7 +679,7 @@ public abstract class StreamOutput extends OutputStream { public void writePhraseSuggestionSmoothingModel(SmoothingModel smoothinModel) throws IOException { writeNamedWriteable(smoothinModel); } - + /** * Writes a {@link Task.Status} to the current stream. */ @@ -717,4 +718,11 @@ public abstract class StreamOutput extends OutputStream { public void writeSuggestion(SuggestionBuilder suggestion) throws IOException { writeNamedWriteable(suggestion); } + + /** + * Writes a completion {@link QueryContext} to the current stream + */ + public void writeCompletionSuggestionQueryContext(QueryContext queryContext) throws IOException { + writeNamedWriteable(queryContext); + } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java index 9d2952511512..04f63042d492 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java @@ -34,8 +34,6 @@ import org.elasticsearch.index.query.RegexpFlag; import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestUtils.Fields; import org.elasticsearch.search.suggest.SuggestionSearchContext; -import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder.FuzzyOptionsBuilder; -import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder.RegexOptionsBuilder; import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMappings; @@ -76,14 +74,14 @@ import java.util.Map; public class CompletionSuggestParser implements SuggestContextParser { private static ObjectParser TLP_PARSER = new ObjectParser<>(CompletionSuggestionBuilder.SUGGESTION_NAME, null); - private static ObjectParser REGEXP_PARSER = new ObjectParser<>(RegexOptionsBuilder.REGEX_OPTIONS.getPreferredName(), CompletionSuggestionBuilder.RegexOptionsBuilder::new); - private static ObjectParser FUZZY_PARSER = new ObjectParser<>(FuzzyOptionsBuilder.FUZZY_OPTIONS.getPreferredName(), CompletionSuggestionBuilder.FuzzyOptionsBuilder::new); + private static ObjectParser REGEXP_PARSER = new ObjectParser<>(RegexOptions.REGEX_OPTIONS.getPreferredName(), RegexOptions.Builder::new); + private static ObjectParser FUZZY_PARSER = new ObjectParser<>(FuzzyOptions.FUZZY_OPTIONS.getPreferredName(), FuzzyOptions.Builder::new); static { - FUZZY_PARSER.declareInt(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setFuzzyMinLength, FuzzyOptionsBuilder.MIN_LENGTH_FIELD); - FUZZY_PARSER.declareInt(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setMaxDeterminizedStates, FuzzyOptionsBuilder.MAX_DETERMINIZED_STATES_FIELD); - FUZZY_PARSER.declareBoolean(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setUnicodeAware, FuzzyOptionsBuilder.UNICODE_AWARE_FIELD); - FUZZY_PARSER.declareInt(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setFuzzyPrefixLength, FuzzyOptionsBuilder.PREFIX_LENGTH_FIELD); - FUZZY_PARSER.declareBoolean(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setTranspositions, FuzzyOptionsBuilder.TRANSPOSITION_FIELD); + FUZZY_PARSER.declareInt(FuzzyOptions.Builder::setFuzzyMinLength, FuzzyOptions.MIN_LENGTH_FIELD); + FUZZY_PARSER.declareInt(FuzzyOptions.Builder::setMaxDeterminizedStates, FuzzyOptions.MAX_DETERMINIZED_STATES_FIELD); + FUZZY_PARSER.declareBoolean(FuzzyOptions.Builder::setUnicodeAware, FuzzyOptions.UNICODE_AWARE_FIELD); + FUZZY_PARSER.declareInt(FuzzyOptions.Builder::setFuzzyPrefixLength, FuzzyOptions.PREFIX_LENGTH_FIELD); + FUZZY_PARSER.declareBoolean(FuzzyOptions.Builder::setTranspositions, FuzzyOptions.TRANSPOSITION_FIELD); FUZZY_PARSER.declareValue((a, b) -> { try { a.setFuzziness(Fuzziness.parse(b).asDistance()); @@ -91,12 +89,12 @@ public class CompletionSuggestParser implements SuggestContextParser { throw new ElasticsearchException(e); } }, Fuzziness.FIELD); - REGEXP_PARSER.declareInt(CompletionSuggestionBuilder.RegexOptionsBuilder::setMaxDeterminizedStates, RegexOptionsBuilder.MAX_DETERMINIZED_STATES); - REGEXP_PARSER.declareStringOrNull(CompletionSuggestionBuilder.RegexOptionsBuilder::setFlags, RegexOptionsBuilder.FLAGS_VALUE); + REGEXP_PARSER.declareInt(RegexOptions.Builder::setMaxDeterminizedStates, RegexOptions.MAX_DETERMINIZED_STATES); + REGEXP_PARSER.declareStringOrNull(RegexOptions.Builder::setFlags, RegexOptions.FLAGS_VALUE); TLP_PARSER.declareStringArray(CompletionSuggestionContext::setPayloadFields, CompletionSuggestionBuilder.PAYLOAD_FIELD); - TLP_PARSER.declareObjectOrDefault(CompletionSuggestionContext::setFuzzyOptionsBuilder, FUZZY_PARSER, CompletionSuggestionBuilder.FuzzyOptionsBuilder::new, FuzzyOptionsBuilder.FUZZY_OPTIONS); - TLP_PARSER.declareObject(CompletionSuggestionContext::setRegexOptionsBuilder, REGEXP_PARSER, RegexOptionsBuilder.REGEX_OPTIONS); + TLP_PARSER.declareObjectOrDefault(CompletionSuggestionContext::setFuzzyOptionsBuilder, FUZZY_PARSER, FuzzyOptions.Builder::new, FuzzyOptions.FUZZY_OPTIONS); + TLP_PARSER.declareObject(CompletionSuggestionContext::setRegexOptionsBuilder, REGEXP_PARSER, RegexOptions.REGEX_OPTIONS); TLP_PARSER.declareString(SuggestionSearchContext.SuggestionContext::setField, Fields.FIELD); TLP_PARSER.declareField((p, v, c) -> { String analyzerName = p.text(); @@ -172,7 +170,4 @@ public class CompletionSuggestParser implements SuggestContextParser { throw new IllegalArgumentException("Field [" + suggestion.getField() + "] is not a completion suggest field"); } } - - - } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 0bd37be128d0..fa8561998e9e 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -18,9 +18,6 @@ */ package org.elasticsearch.search.suggest.completion; -import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; -import org.apache.lucene.util.automaton.Operations; -import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -29,11 +26,11 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.query.RegexpFlag; import org.elasticsearch.search.suggest.SuggestionBuilder; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; +import org.elasticsearch.search.suggest.completion.context.QueryContext; import java.io.IOException; import java.util.ArrayList; @@ -42,6 +39,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; /** @@ -57,207 +55,15 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder> queryContexts = new HashMap<>(); + private FuzzyOptions fuzzyOptions; + private RegexOptions regexOptions; + private final Map> queryContexts = new HashMap<>(); private final Set payloadFields = new HashSet<>(); public CompletionSuggestionBuilder(String name) { super(name); } - /** - * Options for fuzzy queries - */ - public static class FuzzyOptionsBuilder implements ToXContent { - static final ParseField FUZZY_OPTIONS = new ParseField("fuzzy"); - static final ParseField TRANSPOSITION_FIELD = new ParseField("transpositions"); - static final ParseField MIN_LENGTH_FIELD = new ParseField("min_length"); - static final ParseField PREFIX_LENGTH_FIELD = new ParseField("prefix_length"); - static final ParseField UNICODE_AWARE_FIELD = new ParseField("unicode_aware"); - static final ParseField MAX_DETERMINIZED_STATES_FIELD = new ParseField("max_determinized_states"); - - private int editDistance = FuzzyCompletionQuery.DEFAULT_MAX_EDITS; - private boolean transpositions = FuzzyCompletionQuery.DEFAULT_TRANSPOSITIONS; - private int fuzzyMinLength = FuzzyCompletionQuery.DEFAULT_MIN_FUZZY_LENGTH; - private int fuzzyPrefixLength = FuzzyCompletionQuery.DEFAULT_NON_FUZZY_PREFIX; - private boolean unicodeAware = FuzzyCompletionQuery.DEFAULT_UNICODE_AWARE; - private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES; - - public FuzzyOptionsBuilder() { - } - - /** - * Sets the level of fuzziness used to create suggestions using a {@link Fuzziness} instance. - * The default value is {@link Fuzziness#ONE} which allows for an "edit distance" of one. - */ - public FuzzyOptionsBuilder setFuzziness(int editDistance) { - this.editDistance = editDistance; - return this; - } - - /** - * Sets the level of fuzziness used to create suggestions using a {@link Fuzziness} instance. - * The default value is {@link Fuzziness#ONE} which allows for an "edit distance" of one. - */ - public FuzzyOptionsBuilder setFuzziness(Fuzziness fuzziness) { - this.editDistance = fuzziness.asDistance(); - return this; - } - - /** - * Sets if transpositions (swapping one character for another) counts as one character - * change or two. - * Defaults to true, meaning it uses the fuzzier option of counting transpositions as - * a single change. - */ - public FuzzyOptionsBuilder setTranspositions(boolean transpositions) { - this.transpositions = transpositions; - return this; - } - - /** - * Sets the minimum length of input string before fuzzy suggestions are returned, defaulting - * to 3. - */ - public FuzzyOptionsBuilder setFuzzyMinLength(int fuzzyMinLength) { - this.fuzzyMinLength = fuzzyMinLength; - return this; - } - - /** - * Sets the minimum length of the input, which is not checked for fuzzy alternatives, defaults to 1 - */ - public FuzzyOptionsBuilder setFuzzyPrefixLength(int fuzzyPrefixLength) { - this.fuzzyPrefixLength = fuzzyPrefixLength; - return this; - } - - /** - * Sets the maximum automaton states allowed for the fuzzy expansion - */ - public FuzzyOptionsBuilder setMaxDeterminizedStates(int maxDeterminizedStates) { - this.maxDeterminizedStates = maxDeterminizedStates; - return this; - } - - /** - * Set to true if all measurements (like edit distance, transpositions and lengths) are in unicode - * code points (actual letters) instead of bytes. Default is false. - */ - public FuzzyOptionsBuilder setUnicodeAware(boolean unicodeAware) { - this.unicodeAware = unicodeAware; - return this; - } - - /** - * Returns the maximum number of edits - */ - int getEditDistance() { - return editDistance; - } - - /** - * Returns if transpositions option is set - * - * if transpositions is set, then swapping one character for another counts as one edit instead of two. - */ - boolean isTranspositions() { - return transpositions; - } - - - /** - * Returns the length of input prefix after which edits are applied - */ - int getFuzzyMinLength() { - return fuzzyMinLength; - } - - /** - * Returns the minimum length of the input prefix required to apply any edits - */ - int getFuzzyPrefixLength() { - return fuzzyPrefixLength; - } - - /** - * Returns if all measurements (like edit distance, transpositions and lengths) are in unicode code - * points (actual letters) instead of bytes. - */ - boolean isUnicodeAware() { - return unicodeAware; - } - - /** - * Returns the maximum automaton states allowed for fuzzy expansion - */ - int getMaxDeterminizedStates() { - return maxDeterminizedStates; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(FUZZY_OPTIONS.getPreferredName()); - builder.field(Fuzziness.FIELD.getPreferredName(), editDistance); - builder.field(TRANSPOSITION_FIELD.getPreferredName(), transpositions); - builder.field(MIN_LENGTH_FIELD.getPreferredName(), fuzzyMinLength); - builder.field(PREFIX_LENGTH_FIELD.getPreferredName(), fuzzyPrefixLength); - builder.field(UNICODE_AWARE_FIELD.getPreferredName(), unicodeAware); - builder.field(MAX_DETERMINIZED_STATES_FIELD.getPreferredName(), maxDeterminizedStates); - builder.endObject(); - return builder; - } - } - - /** - * Options for regular expression queries - */ - public static class RegexOptionsBuilder implements ToXContent { - static final ParseField REGEX_OPTIONS = new ParseField("regex"); - static final ParseField FLAGS_VALUE = new ParseField("flags", "flags_value"); - static final ParseField MAX_DETERMINIZED_STATES = new ParseField("max_determinized_states"); - private int flagsValue = RegExp.ALL; - private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES; - - public RegexOptionsBuilder() { - } - - /** - * Sets the regular expression syntax flags - * see {@link RegexpFlag} - */ - public RegexOptionsBuilder setFlags(String flags) { - this.flagsValue = RegexpFlag.resolveValue(flags); - return this; - } - - /** - * Sets the maximum automaton states allowed for the regular expression expansion - */ - public RegexOptionsBuilder setMaxDeterminizedStates(int maxDeterminizedStates) { - this.maxDeterminizedStates = maxDeterminizedStates; - return this; - } - - int getFlagsValue() { - return flagsValue; - } - - int getMaxDeterminizedStates() { - return maxDeterminizedStates; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(REGEX_OPTIONS.getPreferredName()); - builder.field(FLAGS_VALUE.getPreferredName(), flagsValue); - builder.field(MAX_DETERMINIZED_STATES.getPreferredName(), maxDeterminizedStates); - builder.endObject(); - return builder; - } - } - /** * Sets the prefix to provide completions for. * The prefix gets analyzed by the suggest analyzer. @@ -273,17 +79,17 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder fields) { + this.payloadFields.addAll(fields); return this; } @@ -333,8 +139,8 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder contexts = this.queryContexts.get(name); + private CompletionSuggestionBuilder contexts(String name, QueryContext... queryContexts) { + List contexts = this.queryContexts.get(name); if (contexts == null) { contexts = new ArrayList<>(2); this.queryContexts.put(name, contexts); @@ -345,22 +151,22 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder> entry : this.queryContexts.entrySet()) { + for (Map.Entry> entry : this.queryContexts.entrySet()) { builder.startArray(entry.getKey()); for (ToXContent queryContext : entry.getValue()) { queryContext.toXContent(builder, params); @@ -374,8 +180,8 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder> namedQueryContexts : queryContexts.entrySet()) { + out.writeString(namedQueryContexts.getKey()); + List queryContexts = namedQueryContexts.getValue(); + out.writeVInt(queryContexts.size()); + for (QueryContext queryContext : queryContexts) { + out.writeCompletionSuggestionQueryContext(queryContext); + } + } + } } @Override public CompletionSuggestionBuilder doReadFrom(StreamInput in, String name) throws IOException { - // NORELEASE - throw new UnsupportedOperationException(); + CompletionSuggestionBuilder completionSuggestionBuilder = new CompletionSuggestionBuilder(name); + if (in.readBoolean()) { + int numPayloadField = in.readVInt(); + for (int i = 0; i < numPayloadField; i++) { + completionSuggestionBuilder.payloadFields.add(in.readString()); + } + } + if (in.readBoolean()) { + completionSuggestionBuilder.fuzzyOptions = FuzzyOptions.readFuzzyOptions(in); + } + if (in.readBoolean()) { + completionSuggestionBuilder.regexOptions = RegexOptions.readRegexOptions(in); + } + if (in.readBoolean()) { + int numNamedQueryContexts = in.readVInt(); + for (int i = 0; i < numNamedQueryContexts; i++) { + String queryContextName = in.readString(); + int numQueryContexts = in.readVInt(); + List queryContexts = new ArrayList<>(numQueryContexts); + for (int j = 0; j < numQueryContexts; j++) { + queryContexts.add(in.readCompletionSuggestionQueryContext()); + } + completionSuggestionBuilder.queryContexts.put(queryContextName, queryContexts); + } + } + return completionSuggestionBuilder; } @Override protected boolean doEquals(CompletionSuggestionBuilder other) { - // NORELEASE - return false; + return Objects.equals(payloadFields, other.payloadFields) && + Objects.equals(fuzzyOptions, other.fuzzyOptions) && + Objects.equals(regexOptions, other.regexOptions) && + Objects.equals(queryContexts, other.queryContexts); } @Override protected int doHashCode() { - // NORELEASE - return 0; + return Objects.hash(payloadFields, fuzzyOptions, regexOptions, queryContexts); } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java index f6d6de88f4c2..b20b9a5aeef2 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java @@ -42,8 +42,8 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest } private CompletionFieldMapper.CompletionFieldType fieldType; - private CompletionSuggestionBuilder.FuzzyOptionsBuilder fuzzyOptionsBuilder; - private CompletionSuggestionBuilder.RegexOptionsBuilder regexOptionsBuilder; + private FuzzyOptions fuzzyOptions; + private RegexOptions regexOptions; private Map> queryContexts = Collections.emptyMap(); private Set payloadFields = Collections.emptySet(); @@ -55,12 +55,12 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest this.fieldType = fieldType; } - void setRegexOptionsBuilder(CompletionSuggestionBuilder.RegexOptionsBuilder regexOptionsBuilder) { - this.regexOptionsBuilder = regexOptionsBuilder; + void setRegexOptionsBuilder(RegexOptions.Builder regexOptionsBuilder) { + this.regexOptions = regexOptionsBuilder.build(); } - void setFuzzyOptionsBuilder(CompletionSuggestionBuilder.FuzzyOptionsBuilder fuzzyOptionsBuilder) { - this.fuzzyOptionsBuilder = fuzzyOptionsBuilder; + void setFuzzyOptionsBuilder(FuzzyOptions.Builder fuzzyOptionsBuilder) { + this.fuzzyOptions = fuzzyOptionsBuilder.build(); } void setQueryContexts(Map> queryContexts) { @@ -72,7 +72,7 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest } void setPayloadFields(List fields) { - setPayloadFields(new HashSet(fields)); + setPayloadFields(new HashSet<>(fields)); } Set getPayloadFields() { @@ -83,24 +83,24 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest CompletionFieldMapper.CompletionFieldType fieldType = getFieldType(); final CompletionQuery query; if (getPrefix() != null) { - if (fuzzyOptionsBuilder != null) { + if (fuzzyOptions != null) { query = fieldType.fuzzyQuery(getPrefix().utf8ToString(), - Fuzziness.fromEdits(fuzzyOptionsBuilder.getEditDistance()), - fuzzyOptionsBuilder.getFuzzyPrefixLength(), fuzzyOptionsBuilder.getFuzzyMinLength(), - fuzzyOptionsBuilder.getMaxDeterminizedStates(), fuzzyOptionsBuilder.isTranspositions(), - fuzzyOptionsBuilder.isUnicodeAware()); + Fuzziness.fromEdits(fuzzyOptions.getEditDistance()), + fuzzyOptions.getFuzzyPrefixLength(), fuzzyOptions.getFuzzyMinLength(), + fuzzyOptions.getMaxDeterminizedStates(), fuzzyOptions.isTranspositions(), + fuzzyOptions.isUnicodeAware()); } else { query = fieldType.prefixQuery(getPrefix()); } } else if (getRegex() != null) { - if (fuzzyOptionsBuilder != null) { + if (fuzzyOptions != null) { throw new IllegalArgumentException("can not use 'fuzzy' options with 'regex"); } - if (regexOptionsBuilder == null) { - regexOptionsBuilder = new CompletionSuggestionBuilder.RegexOptionsBuilder(); + if (regexOptions == null) { + regexOptions = RegexOptions.builder().build(); } - query = fieldType.regexpQuery(getRegex(), regexOptionsBuilder.getFlagsValue(), - regexOptionsBuilder.getMaxDeterminizedStates()); + query = fieldType.regexpQuery(getRegex(), regexOptions.getFlagsValue(), + regexOptions.getMaxDeterminizedStates()); } else { throw new IllegalArgumentException("'prefix' or 'regex' must be defined"); } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java new file mode 100644 index 000000000000..317ac049d6b6 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java @@ -0,0 +1,277 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.suggest.completion; + +import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; +import org.apache.lucene.util.automaton.Operations; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +/** + * Fuzzy options for completion suggester + */ +public class FuzzyOptions implements ToXContent, Writeable { + static final ParseField FUZZY_OPTIONS = new ParseField("fuzzy"); + static final ParseField TRANSPOSITION_FIELD = new ParseField("transpositions"); + static final ParseField MIN_LENGTH_FIELD = new ParseField("min_length"); + static final ParseField PREFIX_LENGTH_FIELD = new ParseField("prefix_length"); + static final ParseField UNICODE_AWARE_FIELD = new ParseField("unicode_aware"); + static final ParseField MAX_DETERMINIZED_STATES_FIELD = new ParseField("max_determinized_states"); + + private int editDistance; + private boolean transpositions; + private int fuzzyMinLength; + private int fuzzyPrefixLength; + private boolean unicodeAware; + private int maxDeterminizedStates; + + private FuzzyOptions(int editDistance, boolean transpositions, int fuzzyMinLength, int fuzzyPrefixLength, + boolean unicodeAware, int maxDeterminizedStates) { + this.editDistance = editDistance; + this.transpositions = transpositions; + this.fuzzyMinLength = fuzzyMinLength; + this.fuzzyPrefixLength = fuzzyPrefixLength; + this.unicodeAware = unicodeAware; + this.maxDeterminizedStates = maxDeterminizedStates; + } + + private FuzzyOptions() { + } + + public static Builder builder() { + return new Builder(); + } + + /** + * Returns the maximum number of edits + */ + public int getEditDistance() { + return editDistance; + } + + /** + * Returns if transpositions option is set + * + * if transpositions is set, then swapping one character for another counts as one edit instead of two. + */ + public boolean isTranspositions() { + return transpositions; + } + + /** + * Returns the length of input prefix after which edits are applied + */ + public int getFuzzyMinLength() { + return fuzzyMinLength; + } + + /** + * Returns the minimum length of the input prefix required to apply any edits + */ + public int getFuzzyPrefixLength() { + return fuzzyPrefixLength; + } + + /** + * Returns if all measurements (like edit distance, transpositions and lengths) are in unicode code + * points (actual letters) instead of bytes. + */ + public boolean isUnicodeAware() { + return unicodeAware; + } + + /** + * Returns the maximum automaton states allowed for fuzzy expansion + */ + public int getMaxDeterminizedStates() { + return maxDeterminizedStates; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + FuzzyOptions that = (FuzzyOptions) o; + + if (editDistance != that.editDistance) return false; + if (transpositions != that.transpositions) return false; + if (fuzzyMinLength != that.fuzzyMinLength) return false; + if (fuzzyPrefixLength != that.fuzzyPrefixLength) return false; + if (unicodeAware != that.unicodeAware) return false; + return maxDeterminizedStates == that.maxDeterminizedStates; + + } + + @Override + public int hashCode() { + int result = editDistance; + result = 31 * result + (transpositions ? 1 : 0); + result = 31 * result + fuzzyMinLength; + result = 31 * result + fuzzyPrefixLength; + result = 31 * result + (unicodeAware ? 1 : 0); + result = 31 * result + maxDeterminizedStates; + return result; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(FUZZY_OPTIONS.getPreferredName()); + builder.field(Fuzziness.FIELD.getPreferredName(), editDistance); + builder.field(TRANSPOSITION_FIELD.getPreferredName(), transpositions); + builder.field(MIN_LENGTH_FIELD.getPreferredName(), fuzzyMinLength); + builder.field(PREFIX_LENGTH_FIELD.getPreferredName(), fuzzyPrefixLength); + builder.field(UNICODE_AWARE_FIELD.getPreferredName(), unicodeAware); + builder.field(MAX_DETERMINIZED_STATES_FIELD.getPreferredName(), maxDeterminizedStates); + builder.endObject(); + return builder; + } + + public static FuzzyOptions readFuzzyOptions(StreamInput in) throws IOException { + FuzzyOptions fuzzyOptions = new FuzzyOptions(); + fuzzyOptions.readFrom(in); + return fuzzyOptions; + } + + @Override + public FuzzyOptions readFrom(StreamInput in) throws IOException { + this.transpositions = in.readBoolean(); + this.unicodeAware = in.readBoolean(); + this.editDistance = in.readVInt(); + this.fuzzyMinLength = in.readVInt(); + this.fuzzyPrefixLength = in.readVInt(); + this.maxDeterminizedStates = in.readVInt(); + return this; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(transpositions); + out.writeBoolean(unicodeAware); + out.writeVInt(editDistance); + out.writeVInt(fuzzyMinLength); + out.writeVInt(fuzzyPrefixLength); + out.writeVInt(maxDeterminizedStates); + } + + /** + * Options for fuzzy queries + */ + public static class Builder { + + private int editDistance = FuzzyCompletionQuery.DEFAULT_MAX_EDITS; + private boolean transpositions = FuzzyCompletionQuery.DEFAULT_TRANSPOSITIONS; + private int fuzzyMinLength = FuzzyCompletionQuery.DEFAULT_MIN_FUZZY_LENGTH; + private int fuzzyPrefixLength = FuzzyCompletionQuery.DEFAULT_NON_FUZZY_PREFIX; + private boolean unicodeAware = FuzzyCompletionQuery.DEFAULT_UNICODE_AWARE; + private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES; + + public Builder() { + } + + /** + * Sets the level of fuzziness used to create suggestions using a {@link Fuzziness} instance. + * The default value is {@link Fuzziness#ONE} which allows for an "edit distance" of one. + */ + public Builder setFuzziness(int editDistance) { + if (editDistance < 0 || editDistance > 2) { + throw new IllegalArgumentException("fuzziness must be between 0 and 2"); + } + this.editDistance = editDistance; + return this; + } + + /** + * Sets the level of fuzziness used to create suggestions using a {@link Fuzziness} instance. + * The default value is {@link Fuzziness#ONE} which allows for an "edit distance" of one. + */ + public Builder setFuzziness(Fuzziness fuzziness) { + Objects.requireNonNull(fuzziness, "fuzziness must not be null"); + return setFuzziness(fuzziness.asDistance()); + } + + /** + * Sets if transpositions (swapping one character for another) counts as one character + * change or two. + * Defaults to true, meaning it uses the fuzzier option of counting transpositions as + * a single change. + */ + public Builder setTranspositions(boolean transpositions) { + this.transpositions = transpositions; + return this; + } + + /** + * Sets the minimum length of input string before fuzzy suggestions are returned, defaulting + * to 3. + */ + public Builder setFuzzyMinLength(int fuzzyMinLength) { + if (fuzzyMinLength < 0) { + throw new IllegalArgumentException("fuzzyMinLength must not be negative"); + } + this.fuzzyMinLength = fuzzyMinLength; + return this; + } + + /** + * Sets the minimum length of the input, which is not checked for fuzzy alternatives, defaults to 1 + */ + public Builder setFuzzyPrefixLength(int fuzzyPrefixLength) { + if (fuzzyPrefixLength < 0) { + throw new IllegalArgumentException("fuzzyPrefixLength must not be negative"); + } + this.fuzzyPrefixLength = fuzzyPrefixLength; + return this; + } + + /** + * Sets the maximum automaton states allowed for the fuzzy expansion + */ + public Builder setMaxDeterminizedStates(int maxDeterminizedStates) { + if (maxDeterminizedStates < 0) { + throw new IllegalArgumentException("maxDeterminizedStates must not be negative"); + } + this.maxDeterminizedStates = maxDeterminizedStates; + return this; + } + + /** + * Set to true if all measurements (like edit distance, transpositions and lengths) are in unicode + * code points (actual letters) instead of bytes. Default is false. + */ + public Builder setUnicodeAware(boolean unicodeAware) { + this.unicodeAware = unicodeAware; + return this; + } + + public FuzzyOptions build() { + return new FuzzyOptions(editDistance, transpositions, fuzzyMinLength, fuzzyPrefixLength, + unicodeAware, maxDeterminizedStates); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java new file mode 100644 index 000000000000..fc183cdb1c1b --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java @@ -0,0 +1,153 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.suggest.completion; + +import org.apache.lucene.util.automaton.Operations; +import org.apache.lucene.util.automaton.RegExp; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.RegexpFlag; + +import java.io.IOException; + +/** + * Regular expression options for completion suggester + */ +public class RegexOptions implements ToXContent, Writeable { + static final String NAME = "regex"; + static final ParseField REGEX_OPTIONS = new ParseField(NAME); + static final ParseField FLAGS_VALUE = new ParseField("flags", "flags_value"); + static final ParseField MAX_DETERMINIZED_STATES = new ParseField("max_determinized_states"); + private int flagsValue; + private int maxDeterminizedStates; + + private RegexOptions() { + } + + private RegexOptions(int flagsValue, int maxDeterminizedStates) { + this.flagsValue = flagsValue; + this.maxDeterminizedStates = maxDeterminizedStates; + } + + /** + * Returns internal regular expression syntax flag value + * see {@link RegexpFlag#value()} + */ + public int getFlagsValue() { + return flagsValue; + } + + /** + * Returns the maximum automaton states allowed for fuzzy expansion + */ + public int getMaxDeterminizedStates() { + return maxDeterminizedStates; + } + + public static Builder builder() { + return new Builder(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + RegexOptions that = (RegexOptions) o; + + if (flagsValue != that.flagsValue) return false; + return maxDeterminizedStates == that.maxDeterminizedStates; + + } + + @Override + public int hashCode() { + int result = flagsValue; + result = 31 * result + maxDeterminizedStates; + return result; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(REGEX_OPTIONS.getPreferredName()); + builder.field(FLAGS_VALUE.getPreferredName(), flagsValue); + builder.field(MAX_DETERMINIZED_STATES.getPreferredName(), maxDeterminizedStates); + builder.endObject(); + return builder; + } + + public static RegexOptions readRegexOptions(StreamInput in) throws IOException { + RegexOptions regexOptions = new RegexOptions(); + regexOptions.readFrom(in); + return regexOptions; + } + + @Override + public RegexOptions readFrom(StreamInput in) throws IOException { + this.flagsValue = in.readVInt(); + this.maxDeterminizedStates = in.readVInt(); + return this; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(flagsValue); + out.writeVInt(maxDeterminizedStates); + } + + /** + * Options for regular expression queries + */ + public static class Builder { + private int flagsValue = RegExp.ALL; + private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES; + + public Builder() { + } + + /** + * Sets the regular expression syntax flags + * see {@link RegexpFlag} + */ + public Builder setFlags(String flags) { + this.flagsValue = RegexpFlag.resolveValue(flags); + return this; + } + + /** + * Sets the maximum automaton states allowed for the regular expression expansion + */ + public Builder setMaxDeterminizedStates(int maxDeterminizedStates) { + if (maxDeterminizedStates < 0) { + throw new IllegalArgumentException("maxDeterminizedStates must not be negative"); + } + this.maxDeterminizedStates = maxDeterminizedStates; + return this; + } + + public RegexOptions build() { + return new RegexOptions(flagsValue, maxDeterminizedStates); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java index dffbb1aa80d8..10ac3935cc29 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java @@ -158,12 +158,12 @@ public class CategoryContextMapping extends ContextMapping { List queryContexts = new ArrayList<>(); Token token = parser.nextToken(); if (token == Token.START_OBJECT || token == Token.VALUE_STRING) { - CategoryQueryContext parse = CategoryQueryContext.parse(parser); - queryContexts.add(new QueryContext(parse.getCategory().toString(), parse.getBoost(), parse.isPrefix())); + CategoryQueryContext parse = CategoryQueryContext.PROTOTYPE.fromXContext(parser); + queryContexts.add(new QueryContext(parse.getCategory(), parse.getBoost(), parse.isPrefix())); } else if (token == Token.START_ARRAY) { while (parser.nextToken() != Token.END_ARRAY) { - CategoryQueryContext parse = CategoryQueryContext.parse(parser); - queryContexts.add(new QueryContext(parse.getCategory().toString(), parse.getBoost(), parse.isPrefix())); + CategoryQueryContext parse = CategoryQueryContext.PROTOTYPE.fromXContext(parser); + queryContexts.add(new QueryContext(parse.getCategory(), parse.getBoost(), parse.isPrefix())); } } return queryContexts; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java index c49312657768..8db9afe5ae35 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java @@ -21,12 +21,15 @@ package org.elasticsearch.search.suggest.completion.context; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.Collections; +import java.util.Objects; import static org.elasticsearch.search.suggest.completion.context.CategoryContextMapping.CONTEXT_BOOST; import static org.elasticsearch.search.suggest.completion.context.CategoryContextMapping.CONTEXT_PREFIX; @@ -35,12 +38,15 @@ import static org.elasticsearch.search.suggest.completion.context.CategoryContex /** * Defines the query context for {@link CategoryContextMapping} */ -public final class CategoryQueryContext implements ToXContent { - private final CharSequence category; +public final class CategoryQueryContext implements QueryContext { + public static final String NAME = "category"; + public static final CategoryQueryContext PROTOTYPE = new CategoryQueryContext("", 1, false); + + private final String category; private final boolean isPrefix; private final int boost; - private CategoryQueryContext(CharSequence category, int boost, boolean isPrefix) { + private CategoryQueryContext(String category, int boost, boolean isPrefix) { this.category = category; this.boost = boost; this.isPrefix = isPrefix; @@ -49,7 +55,7 @@ public final class CategoryQueryContext implements ToXContent { /** * Returns the category of the context */ - public CharSequence getCategory() { + public String getCategory() { return category; } @@ -71,54 +77,41 @@ public final class CategoryQueryContext implements ToXContent { return new Builder(); } - public static class Builder { - private CharSequence category; - private boolean isPrefix = false; - private int boost = 1; + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; - public Builder() { - } + CategoryQueryContext that = (CategoryQueryContext) o; - /** - * Sets the category of the context. - * This is a required field - */ - public Builder setCategory(CharSequence context) { - this.category = context; - return this; - } + if (isPrefix != that.isPrefix) return false; + if (boost != that.boost) return false; + return category != null ? category.equals(that.category) : that.category == null; - /** - * Sets if the context should be treated as a prefix or not. - * Defaults to false - */ - public Builder setPrefix(boolean prefix) { - this.isPrefix = prefix; - return this; - } - - /** - * Sets the query-time boost of the context. - * Defaults to 1. - */ - public Builder setBoost(int boost) { - this.boost = boost; - return this; - } - - public CategoryQueryContext build() { - return new CategoryQueryContext(category, boost, isPrefix); - } } - private static ObjectParser CATEGORY_PARSER = new ObjectParser<>("category", null); + @Override + public int hashCode() { + int result = category != null ? category.hashCode() : 0; + result = 31 * result + (isPrefix ? 1 : 0); + result = 31 * result + boost; + return result; + } + + @Override + public String getWriteableName() { + return NAME; + } + + private static ObjectParser CATEGORY_PARSER = new ObjectParser<>(NAME, null); static { - CATEGORY_PARSER.declareString(Builder::setCategory, new ParseField("context")); - CATEGORY_PARSER.declareInt(Builder::setBoost, new ParseField("boost")); - CATEGORY_PARSER.declareBoolean(Builder::setPrefix, new ParseField("prefix")); + CATEGORY_PARSER.declareString(Builder::setCategory, new ParseField(CONTEXT_VALUE)); + CATEGORY_PARSER.declareInt(Builder::setBoost, new ParseField(CONTEXT_BOOST)); + CATEGORY_PARSER.declareBoolean(Builder::setPrefix, new ParseField(CONTEXT_PREFIX)); } - public static CategoryQueryContext parse(XContentParser parser) throws IOException { + @Override + public CategoryQueryContext fromXContext(XContentParser parser) throws IOException { XContentParser.Token token = parser.currentToken(); Builder builder = builder(); if (token == XContentParser.Token.START_OBJECT) { @@ -140,4 +133,65 @@ public final class CategoryQueryContext implements ToXContent { builder.endObject(); return builder; } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(isPrefix); + out.writeVInt(boost); + out.writeString(category); + } + + @Override + public QueryContext readFrom(StreamInput in) throws IOException { + Builder builder = new Builder(); + builder.isPrefix = in.readBoolean(); + builder.boost = in.readVInt(); + builder.category = in.readString(); + return builder.build(); + } + + public static class Builder { + private String category; + private boolean isPrefix = false; + private int boost = 1; + + public Builder() { + } + + /** + * Sets the category of the category. + * This is a required field + */ + public Builder setCategory(String category) { + Objects.requireNonNull(category, "category must not be null"); + this.category = category; + return this; + } + + /** + * Sets if the context should be treated as a prefix or not. + * Defaults to false + */ + public Builder setPrefix(boolean prefix) { + this.isPrefix = prefix; + return this; + } + + /** + * Sets the query-time boost of the context. + * Defaults to 1. + */ + public Builder setBoost(int boost) { + if (boost <= 0) { + throw new IllegalArgumentException("boost must be greater than 0"); + } + this.boost = boost; + return this; + } + + public CategoryQueryContext build() { + Objects.requireNonNull(category, "category must not be null"); + return new CategoryQueryContext(category, boost, isPrefix); + } + } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java index 9d4bed4f664c..ccd4b2d58480 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java @@ -43,7 +43,6 @@ import java.util.Set; import static org.elasticsearch.search.suggest.completion.context.ContextMapping.FIELD_NAME; import static org.elasticsearch.search.suggest.completion.context.ContextMapping.FIELD_TYPE; -import static org.elasticsearch.search.suggest.completion.context.ContextMapping.QueryContext; import static org.elasticsearch.search.suggest.completion.context.ContextMapping.Type; /** @@ -153,7 +152,7 @@ public class ContextMappings implements ToXContent { * @param queryContexts a map of context mapping name and collected query contexts * @return a context-enabled query */ - public ContextQuery toContextQuery(CompletionQuery query, Map> queryContexts) { + public ContextQuery toContextQuery(CompletionQuery query, Map> queryContexts) { ContextQuery typedContextQuery = new ContextQuery(query); if (queryContexts.isEmpty() == false) { CharsRefBuilder scratch = new CharsRefBuilder(); @@ -162,9 +161,9 @@ public class ContextMappings implements ToXContent { scratch.setCharAt(0, (char) typeId); scratch.setLength(1); ContextMapping mapping = contextMappings.get(typeId); - List queryContext = queryContexts.get(mapping.name()); + List queryContext = queryContexts.get(mapping.name()); if (queryContext != null) { - for (QueryContext context : queryContext) { + for (ContextMapping.QueryContext context : queryContext) { scratch.append(context.context); typedContextQuery.addContext(scratch.toCharsRef(), context.boost, !context.isPrefix); scratch.setLength(1); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java index f2f3d10215d5..2c90429302da 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java @@ -247,18 +247,15 @@ public class GeoContextMapping extends ContextMapping { List queryContexts = new ArrayList<>(); Token token = parser.nextToken(); if (token == Token.START_OBJECT || token == Token.VALUE_STRING) { - queryContexts.add(GeoQueryContext.parse(parser)); + queryContexts.add(GeoQueryContext.PROTOTYPE.fromXContext(parser)); } else if (token == Token.START_ARRAY) { while (parser.nextToken() != Token.END_ARRAY) { - queryContexts.add(GeoQueryContext.parse(parser)); + queryContexts.add(GeoQueryContext.PROTOTYPE.fromXContext(parser)); } } List queryContextList = new ArrayList<>(); for (GeoQueryContext queryContext : queryContexts) { - int minPrecision = this.precision; - if (queryContext.getPrecision() != -1) { - minPrecision = Math.min(minPrecision, queryContext.getPrecision()); - } + int minPrecision = Math.min(this.precision, queryContext.getPrecision()); GeoPoint point = queryContext.getGeoPoint(); final Collection locations = new HashSet<>(); String geoHash = GeoHashUtils.stringEncode(point.getLon(), point.getLat(), minPrecision); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoQueryContext.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoQueryContext.java index da9191bf2d59..5b406abc1d41 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoQueryContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoQueryContext.java @@ -23,14 +23,17 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Objects; import static org.elasticsearch.search.suggest.completion.context.GeoContextMapping.CONTEXT_BOOST; import static org.elasticsearch.search.suggest.completion.context.GeoContextMapping.CONTEXT_NEIGHBOURS; @@ -40,7 +43,10 @@ import static org.elasticsearch.search.suggest.completion.context.GeoContextMapp /** * Defines the query context for {@link GeoContextMapping} */ -public final class GeoQueryContext implements ToXContent { +public final class GeoQueryContext implements QueryContext { + public static final String NAME = "geo"; + public static final GeoQueryContext PROTOTYPE = new GeoQueryContext(null, 1, 12, Collections.emptyList()); + private final GeoPoint geoPoint; private final int boost; private final int precision; @@ -81,90 +87,52 @@ public final class GeoQueryContext implements ToXContent { return neighbours; } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + GeoQueryContext that = (GeoQueryContext) o; + + if (boost != that.boost) return false; + if (precision != that.precision) return false; + if (geoPoint != null ? !geoPoint.equals(that.geoPoint) : that.geoPoint != null) return false; + return neighbours != null ? neighbours.equals(that.neighbours) : that.neighbours == null; + + } + + @Override + public int hashCode() { + int result = geoPoint != null ? geoPoint.hashCode() : 0; + result = 31 * result + boost; + result = 31 * result + precision; + result = 31 * result + (neighbours != null ? neighbours.hashCode() : 0); + return result; + } + public static Builder builder() { return new Builder(); } - public static class Builder { - private GeoPoint geoPoint; - private int boost = 1; - private int precision = -1; - private List neighbours = Collections.emptyList(); - - public Builder() { - } - - /** - * Sets the query-time boost for the context - * Defaults to 1 - */ - public Builder setBoost(int boost) { - this.boost = boost; - return this; - } - - /** - * Sets the precision level for computing the geohash from the context geo point. - * Defaults to using index-time precision level - */ - public Builder setPrecision(int precision) { - this.precision = precision; - return this; - } - - /** - * Sets the precision levels at which geohash cells neighbours are considered. - * Defaults to only considering neighbours at the index-time precision level - */ - public Builder setNeighbours(List neighbours) { - this.neighbours = neighbours; - return this; - } - - /** - * Sets the geo point of the context. - * This is a required field - */ - public Builder setGeoPoint(GeoPoint geoPoint) { - this.geoPoint = geoPoint; - return this; - } - - private double lat = Double.NaN; - void setLat(double lat) { - this.lat = lat; - } - - private double lon = Double.NaN; - void setLon(double lon) { - this.lon = lon; - } - - public GeoQueryContext build() { - if (geoPoint == null) { - if (Double.isNaN(lat) == false && Double.isNaN(lon) == false) { - geoPoint = new GeoPoint(lat, lon); - } else { - throw new IllegalArgumentException("no geohash or geo point provided"); - } - } - return new GeoQueryContext(geoPoint, boost, precision, neighbours); - } + @Override + public String getWriteableName() { + return NAME; } - private static ObjectParser GEO_CONTEXT_PARSER = new ObjectParser<>("geo", null); + private static ObjectParser GEO_CONTEXT_PARSER = new ObjectParser<>(NAME, null); static { - GEO_CONTEXT_PARSER.declareField((parser, geoQueryContext, geoContextMapping) -> geoQueryContext.setGeoPoint(GeoUtils.parseGeoPoint(parser)), new ParseField("context"), ObjectParser.ValueType.OBJECT); - GEO_CONTEXT_PARSER.declareInt(GeoQueryContext.Builder::setBoost, new ParseField("boost")); + GEO_CONTEXT_PARSER.declareField((parser, geoQueryContext, geoContextMapping) -> geoQueryContext.setGeoPoint(GeoUtils.parseGeoPoint(parser)), new ParseField(CONTEXT_VALUE), ObjectParser.ValueType.OBJECT); + GEO_CONTEXT_PARSER.declareInt(GeoQueryContext.Builder::setBoost, new ParseField(CONTEXT_BOOST)); // TODO : add string support for precision for GeoUtils.geoHashLevelsForPrecision() - GEO_CONTEXT_PARSER.declareInt(GeoQueryContext.Builder::setPrecision, new ParseField("precision")); + GEO_CONTEXT_PARSER.declareInt(GeoQueryContext.Builder::setPrecision, new ParseField(CONTEXT_PRECISION)); // TODO : add string array support for precision for GeoUtils.geoHashLevelsForPrecision() - GEO_CONTEXT_PARSER.declareIntArray(GeoQueryContext.Builder::setNeighbours, new ParseField("neighbours")); + GEO_CONTEXT_PARSER.declareIntArray(GeoQueryContext.Builder::setNeighbours, new ParseField(CONTEXT_NEIGHBOURS)); GEO_CONTEXT_PARSER.declareDouble(GeoQueryContext.Builder::setLat, new ParseField("lat")); GEO_CONTEXT_PARSER.declareDouble(GeoQueryContext.Builder::setLon, new ParseField("lon")); } - public static GeoQueryContext parse(XContentParser parser) throws IOException { + @Override + public GeoQueryContext fromXContext(XContentParser parser) throws IOException { XContentParser.Token token = parser.currentToken(); GeoQueryContext.Builder builder = new Builder(); if (token == XContentParser.Token.START_OBJECT) { @@ -190,4 +158,109 @@ public final class GeoQueryContext implements ToXContent { builder.endObject(); return builder; } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeGeoPoint(geoPoint); + out.writeVInt(boost); + out.writeInt(precision); + out.writeVInt(neighbours.size()); + for (Integer neighbour : neighbours) { + out.writeVInt(neighbour); + } + } + + @Override + public QueryContext readFrom(StreamInput in) throws IOException { + Builder builder = new Builder(); + builder.geoPoint = in.readGeoPoint(); + builder.boost = in.readVInt(); + builder.precision = in.readInt(); + int nNeighbour = in.readVInt(); + if (nNeighbour != 0) { + builder.neighbours = new ArrayList<>(nNeighbour); + for (int i = 0; i < nNeighbour; i++) { + builder.neighbours.add(in.readVInt()); + } + } + return builder.build(); + } + + public static class Builder { + private GeoPoint geoPoint; + private int boost = 1; + private int precision = 12; + private List neighbours = Collections.emptyList(); + + public Builder() { + } + + /** + * Sets the query-time boost for the context + * Defaults to 1 + */ + public Builder setBoost(int boost) { + if (boost <= 0) { + throw new IllegalArgumentException("boost must be greater than 0"); + } + this.boost = boost; + return this; + } + + /** + * Sets the precision level for computing the geohash from the context geo point. + * Defaults to using index-time precision level + */ + public Builder setPrecision(int precision) { + if (precision < 1 || precision > 12) { + throw new IllegalArgumentException("precision must be between 1 and 12"); + } + this.precision = precision; + return this; + } + + /** + * Sets the precision levels at which geohash cells neighbours are considered. + * Defaults to only considering neighbours at the index-time precision level + */ + public Builder setNeighbours(List neighbours) { + for (int neighbour : neighbours) { + if (neighbour < 1 || neighbour > 12) { + throw new IllegalArgumentException("neighbour value must be between 1 and 12"); + } + } + this.neighbours = neighbours; + return this; + } + + /** + * Sets the geo point of the context. + * This is a required field + */ + public Builder setGeoPoint(GeoPoint geoPoint) { + Objects.requireNonNull(geoPoint, "geoPoint must not be null"); + this.geoPoint = geoPoint; + return this; + } + + private double lat = Double.NaN; + void setLat(double lat) { + this.lat = lat; + } + + private double lon = Double.NaN; + void setLon(double lon) { + this.lon = lon; + } + + public GeoQueryContext build() { + if (geoPoint == null) { + if (Double.isNaN(lat) == false && Double.isNaN(lon) == false) { + geoPoint = new GeoPoint(lat, lon); + } + } + Objects.requireNonNull(geoPoint, "geoPoint must not be null"); + return new GeoQueryContext(geoPoint, boost, precision, neighbours); + } + } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/QueryContext.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/QueryContext.java new file mode 100644 index 000000000000..ccfd4a8d3dac --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/QueryContext.java @@ -0,0 +1,34 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.suggest.completion.context; + +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; + +/** + * Interface for serializing/de-serializing completion query context + */ +public interface QueryContext extends ToXContent, NamedWriteable { + + QueryContext fromXContext(XContentParser parser) throws IOException; +} diff --git a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 71431e70f3af..a7354f36cbcc 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -69,9 +69,6 @@ import java.nio.file.Path; import java.util.Collections; import java.util.Iterator; import java.util.Map; -import java.util.Map.Entry; -import java.util.function.Consumer; -import java.util.function.Supplier; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.equalTo; @@ -276,9 +273,9 @@ public abstract class AbstractSuggestionBuilderTestCase> iterator = buildSuggestSearchContext.suggestions().entrySet().iterator(); - for (Entry entry : parsedSuggestionSearchContext.suggestions().entrySet()) { - Entry other = iterator.next(); + Iterator> iterator = buildSuggestSearchContext.suggestions().entrySet().iterator(); + for (Map.Entry entry : parsedSuggestionSearchContext.suggestions().entrySet()) { + Map.Entry other = iterator.next(); assertEquals(entry.getKey(), other.getKey()); SuggestionContext oldSchoolContext = entry.getValue(); @@ -353,29 +350,9 @@ public abstract class AbstractSuggestionBuilderTestCase void maybeSet(Consumer consumer, T value) { - if (randomBoolean()) { - consumer.accept(value); - } - } - - /** - * helper to get a random value in a certain range that's different from the - * input - */ - protected static T randomValueOtherThan(T input, Supplier randomSupplier) { - T randomValue = null; - do { - randomValue = randomSupplier.get(); - } while (randomValue.equals(input)); - return randomValue; - } - } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 896d20895eca..d12b70c59a3a 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -47,7 +47,7 @@ import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.suggest.completion.CompletionStats; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; -import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder.FuzzyOptionsBuilder; +import org.elasticsearch.search.suggest.completion.FuzzyOptions; import org.elasticsearch.search.suggest.completion.context.CategoryContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.GeoContextMapping; @@ -56,6 +56,7 @@ import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; @@ -197,7 +198,8 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg").size(numDocs).payload("count"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg"). + size(numDocs).payload(Collections.singletonList("count")); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(prefix).execute().actionGet(); assertNoFailures(suggestResponse); CompletionSuggestion completionSuggestion = suggestResponse.getSuggest().getSuggestion("foo"); @@ -243,7 +245,8 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { client().prepareIndex(INDEX, TYPE, "2").setSource(FIELD, "suggestion") ); indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg").payload("test_field"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg") + .payload(Collections.singletonList("test_field")); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(prefix).execute().actionGet(); assertNoFailures(suggestResponse); CompletionSuggestion completionSuggestion = suggestResponse.getSuggest().getSuggestion("foo"); @@ -280,7 +283,8 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "2").setSource(source)); indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg").payload("title", "count"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg") + .payload(Arrays.asList("title", "count")); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(prefix).execute().actionGet(); assertNoFailures(suggestResponse); CompletionSuggestion completionSuggestion = suggestResponse.getSuggest().getSuggestion("foo"); @@ -325,12 +329,13 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { int suggestionSize = randomIntBetween(1, numDocs); int numRequestedPayloadFields = randomIntBetween(2, numPayloadFields); - String[] payloadFields = new String[numRequestedPayloadFields]; + List payloadFields = new ArrayList<>(numRequestedPayloadFields); for (int i = 0; i < numRequestedPayloadFields; i++) { - payloadFields[i] = "test_field" + i; + payloadFields.add("test_field" + i); } - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg").size(suggestionSize).payload(payloadFields); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg") + .size(suggestionSize).payload(payloadFields); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(prefix).execute().actionGet(); assertNoFailures(suggestResponse); CompletionSuggestion completionSuggestion = suggestResponse.getSuggest().getSuggestion("foo"); @@ -702,7 +707,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { refresh(); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nriv", new FuzzyOptionsBuilder().setTranspositions(false)).size(10) + SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nriv", FuzzyOptions.builder().setTranspositions(false).build()).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo"); @@ -724,12 +729,12 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { refresh(); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nriva", new FuzzyOptionsBuilder().setFuzzyMinLength(6)).size(10) + SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nriva", FuzzyOptions.builder().setFuzzyMinLength(6).build()).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo"); suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nrivan", new FuzzyOptionsBuilder().setFuzzyMinLength(6)).size(10) + SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nrivan", FuzzyOptions.builder().setFuzzyMinLength(6).build()).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "Nirvana"); } @@ -746,12 +751,12 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { refresh(); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nirw", new FuzzyOptionsBuilder().setFuzzyPrefixLength(4)).size(10) + SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nirw", FuzzyOptions.builder().setFuzzyPrefixLength(4).build()).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo"); suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nirvo", new FuzzyOptionsBuilder().setFuzzyPrefixLength(4)).size(10) + SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nirvo", FuzzyOptions.builder().setFuzzyPrefixLength(4).build()).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "Nirvana"); } @@ -769,18 +774,18 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { // suggestion with a character, which needs unicode awareness org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder completionSuggestionBuilder = - SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("öööи", new FuzzyOptionsBuilder().setUnicodeAware(true)).size(10); + SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("öööи", FuzzyOptions.builder().setUnicodeAware(true).build()).size(10); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(completionSuggestionBuilder).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "ööööö"); // removing unicode awareness leads to no result - completionSuggestionBuilder = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("öööи", new FuzzyOptionsBuilder().setUnicodeAware(false)).size(10); + completionSuggestionBuilder = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("öööи", FuzzyOptions.builder().setUnicodeAware(false).build()).size(10); suggestResponse = client().prepareSuggest(INDEX).addSuggestion(completionSuggestionBuilder).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo"); // increasing edit distance instead of unicode awareness works again, as this is only a single character - completionSuggestionBuilder = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("öööи", new FuzzyOptionsBuilder().setUnicodeAware(false).setFuzziness(Fuzziness.TWO)).size(10); + completionSuggestionBuilder = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("öööи", FuzzyOptions.builder().setUnicodeAware(false).setFuzziness(Fuzziness.TWO).build()).size(10); suggestResponse = client().prepareSuggest(INDEX).addSuggestion(completionSuggestionBuilder).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "ööööö"); } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java index f4551b3de9a0..8e324330fe81 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java @@ -47,7 +47,7 @@ public class SuggestBuilderTests extends WritableTestCase { @Override - protected NamedWriteableRegistry provideNamedWritbaleRegistry() { + protected NamedWriteableRegistry provideNamedWritableRegistry() { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, TermSuggestionBuilder.PROTOTYPE); namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, PhraseSuggestionBuilder.PROTOTYPE); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryQueryContextTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryQueryContextTests.java new file mode 100644 index 000000000000..b4d80ebba980 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryQueryContextTests.java @@ -0,0 +1,95 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.suggest.completion; + +import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; + +import java.io.IOException; + +public class CategoryQueryContextTests extends QueryContextTestCase { + + public static CategoryQueryContext randomCategoryQueryContext() { + final CategoryQueryContext.Builder builder = CategoryQueryContext.builder(); + builder.setCategory(randomAsciiOfLength(10)); + maybeSet(builder::setBoost, randomIntBetween(1, 10)); + maybeSet(builder::setPrefix, randomBoolean()); + return builder.build(); + } + + @Override + protected CategoryQueryContext createTestModel() { + return randomCategoryQueryContext(); + } + + @Override + protected CategoryQueryContext createMutation(CategoryQueryContext original) throws IOException { + final CategoryQueryContext.Builder builder = CategoryQueryContext.builder(); + builder.setCategory(original.getCategory()).setBoost(original.getBoost()).setPrefix(original.isPrefix()); + switch (randomIntBetween(0, 2)) { + case 0: + builder.setCategory(randomValueOtherThan(original.getCategory(), () -> randomAsciiOfLength(10))); + break; + case 1: + builder.setBoost(randomValueOtherThan(original.getBoost(), () -> randomIntBetween(1, 5))); + break; + case 2: + builder.setPrefix(!original.isPrefix()); + break; + + } + return builder.build(); + } + + @Override + protected CategoryQueryContext prototype() { + return CategoryQueryContext.PROTOTYPE; + } + + public void testNullCategoryIsIllegal() { + final CategoryQueryContext categoryQueryContext = randomCategoryQueryContext(); + final CategoryQueryContext.Builder builder = CategoryQueryContext.builder() + .setBoost(categoryQueryContext.getBoost()) + .setPrefix(categoryQueryContext.isPrefix()); + try { + builder.build(); + fail("null category is illegal"); + } catch (NullPointerException e) { + assertEquals(e.getMessage(), "category must not be null"); + } + } + + public void testIllegalArguments() { + final CategoryQueryContext.Builder builder = CategoryQueryContext.builder(); + + try { + builder.setCategory(null); + fail("category must not be null"); + } catch (NullPointerException e) { + assertEquals(e.getMessage(), "category must not be null"); + } + + try { + builder.setBoost(-randomIntBetween(1, Integer.MAX_VALUE)); + fail("boost must be positive"); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), "boost must be greater than 0"); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java new file mode 100644 index 000000000000..9fc5988c07ce --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -0,0 +1,135 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.suggest.completion; + +import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; +import org.elasticsearch.search.suggest.SuggestionSearchContext; +import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; +import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; +import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; +import org.elasticsearch.search.suggest.completion.context.QueryContext; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTestCase { + + @BeforeClass + public static void initQueryContexts() { + namedWriteableRegistry.registerPrototype(QueryContext.class, CategoryQueryContext.PROTOTYPE); + namedWriteableRegistry.registerPrototype(QueryContext.class, GeoQueryContext.PROTOTYPE); + } + + @Override + protected CompletionSuggestionBuilder randomSuggestionBuilder() { + CompletionSuggestionBuilder testBuilder = new CompletionSuggestionBuilder(randomAsciiOfLength(10)); + switch (randomIntBetween(0, 3)) { + case 0: + testBuilder.prefix(randomAsciiOfLength(10)); + break; + case 1: + testBuilder.prefix(randomAsciiOfLength(10), FuzzyOptionsTests.randomFuzzyOptions()); + break; + case 2: + testBuilder.prefix(randomAsciiOfLength(10), randomFrom(Fuzziness.ZERO, Fuzziness.ONE, Fuzziness.TWO)); + break; + case 3: + testBuilder.regex(randomAsciiOfLength(10), RegexOptionsTests.randomRegexOptions()); + break; + } + List payloads = new ArrayList<>(); + Collections.addAll(payloads, generateRandomStringArray(5, 10, false, false)); + maybeSet(testBuilder::payload, payloads); + if (randomBoolean()) { + int numContext = randomIntBetween(1, 5); + CategoryQueryContext[] contexts = new CategoryQueryContext[numContext]; + for (int i = 0; i < numContext; i++) { + contexts[i] = CategoryQueryContextTests.randomCategoryQueryContext(); + } + testBuilder.categoryContexts(randomAsciiOfLength(10), contexts); + } + if (randomBoolean()) { + int numContext = randomIntBetween(1, 5); + GeoQueryContext[] contexts = new GeoQueryContext[numContext]; + for (int i = 0; i < numContext; i++) { + contexts[i] = GeoQueryContextTests.randomGeoQueryContext(); + } + testBuilder.geoContexts(randomAsciiOfLength(10), contexts); + } + return testBuilder; + } + + @Override + protected void assertSuggestionContext(SuggestionContext oldSuggestion, SuggestionContext newSuggestion) { + + } + + @Override + public void testBuild() throws IOException { + // skip for now + } + + @Override + public void testFromXContent() throws IOException { + // skip for now + } + + protected void mutateSpecificParameters(CompletionSuggestionBuilder builder) throws IOException { + switch (randomIntBetween(0, 5)) { + case 0: + List payloads = new ArrayList<>(); + Collections.addAll(payloads, generateRandomStringArray(5, 10, false, false)); + builder.payload(payloads); + break; + case 1: + int numCategoryContext = randomIntBetween(1, 5); + CategoryQueryContext[] categoryContexts = new CategoryQueryContext[numCategoryContext]; + for (int i = 0; i < numCategoryContext; i++) { + categoryContexts[i] = CategoryQueryContextTests.randomCategoryQueryContext(); + } + builder.categoryContexts(randomAsciiOfLength(10), categoryContexts); + break; + case 2: + int numGeoContext = randomIntBetween(1, 5); + GeoQueryContext[] geoContexts = new GeoQueryContext[numGeoContext]; + for (int i = 0; i < numGeoContext; i++) { + geoContexts[i] = GeoQueryContextTests.randomGeoQueryContext(); + } + builder.geoContexts(randomAsciiOfLength(10), geoContexts); + break; + case 3: + builder.prefix(randomAsciiOfLength(10), FuzzyOptionsTests.randomFuzzyOptions()); + break; + case 4: + builder.prefix(randomAsciiOfLength(10), randomFrom(Fuzziness.ZERO, Fuzziness.ONE, Fuzziness.TWO)); + break; + case 5: + builder.regex(randomAsciiOfLength(10), RegexOptionsTests.randomRegexOptions()); + break; + default: + throw new IllegalStateException("should not through"); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/FuzzyOptionsTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/FuzzyOptionsTests.java new file mode 100644 index 000000000000..848a9088bc32 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/FuzzyOptionsTests.java @@ -0,0 +1,131 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.suggest.completion; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.unit.Fuzziness; + +import java.io.IOException; + +public class FuzzyOptionsTests extends WritableTestCase { + + public static FuzzyOptions randomFuzzyOptions() { + final FuzzyOptions.Builder builder = FuzzyOptions.builder(); + if (randomBoolean()) { + maybeSet(builder::setFuzziness, randomFrom(Fuzziness.ZERO, Fuzziness.ONE, Fuzziness.TWO)); + } else { + maybeSet(builder::setFuzziness, randomFrom(0, 1, 2)); + } + maybeSet(builder::setFuzzyMinLength, randomIntBetween(0, 10)); + maybeSet(builder::setFuzzyPrefixLength, randomIntBetween(0, 10)); + maybeSet(builder::setMaxDeterminizedStates, randomIntBetween(1, 1000)); + maybeSet(builder::setTranspositions, randomBoolean()); + maybeSet(builder::setUnicodeAware, randomBoolean()); + return builder.build(); + } + + @Override + protected FuzzyOptions createTestModel() { + return randomFuzzyOptions(); + } + + @Override + protected FuzzyOptions createMutation(FuzzyOptions original) throws IOException { + final FuzzyOptions.Builder builder = FuzzyOptions.builder(); + builder.setFuzziness(original.getEditDistance()) + .setFuzzyPrefixLength(original.getFuzzyPrefixLength()) + .setFuzzyMinLength(original.getFuzzyMinLength()) + .setMaxDeterminizedStates(original.getMaxDeterminizedStates()) + .setTranspositions(original.isTranspositions()) + .setUnicodeAware(original.isUnicodeAware()); + switch (randomIntBetween(0, 5)) { + case 0: + builder.setFuzziness(randomValueOtherThan(original.getEditDistance(), () -> randomFrom(0, 1, 2))); + break; + case 1: + builder.setFuzzyPrefixLength(randomValueOtherThan(original.getFuzzyPrefixLength(), () -> + randomIntBetween(1, 3))); + break; + case 2: + builder.setFuzzyMinLength(randomValueOtherThan(original.getFuzzyMinLength(), () -> + randomIntBetween(1, 3))); + break; + case 3: + builder.setMaxDeterminizedStates(randomValueOtherThan(original.getMaxDeterminizedStates(), () -> + randomIntBetween(1, 10))); + break; + case 4: + builder.setTranspositions(!original.isTranspositions()); + break; + case 5: + builder.setUnicodeAware(!original.isUnicodeAware()); + break; + } + return builder.build(); + } + + @Override + protected FuzzyOptions readFrom(StreamInput in) throws IOException { + return FuzzyOptions.readFuzzyOptions(in); + } + + public void testIllegalArguments() { + final FuzzyOptions.Builder builder = FuzzyOptions.builder(); + try { + builder.setFuzziness(-randomIntBetween(1, Integer.MAX_VALUE)); + fail("fuzziness must be > 0"); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), "fuzziness must be between 0 and 2"); + } + try { + builder.setFuzziness(randomIntBetween(3, Integer.MAX_VALUE)); + fail("fuzziness must be < 2"); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), "fuzziness must be between 0 and 2"); + } + try { + builder.setFuzziness(null); + fail("fuzziness must not be null"); + } catch (NullPointerException e) { + assertEquals(e.getMessage(), "fuzziness must not be null"); + } + + try { + builder.setFuzzyMinLength(-randomIntBetween(1, Integer.MAX_VALUE)); + fail("fuzzyMinLength must be >= 0"); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), "fuzzyMinLength must not be negative"); + } + + try { + builder.setFuzzyPrefixLength(-randomIntBetween(1, Integer.MAX_VALUE)); + fail("fuzzyPrefixLength must be >= 0"); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), "fuzzyPrefixLength must not be negative"); + } + + try { + builder.setMaxDeterminizedStates(-randomIntBetween(1, Integer.MAX_VALUE)); + fail("max determinized state must be >= 0"); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), "maxDeterminizedStates must not be negative"); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoQueryContextTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoQueryContextTests.java new file mode 100644 index 000000000000..d26be5036e2f --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoQueryContextTests.java @@ -0,0 +1,136 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.suggest.completion; + +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class GeoQueryContextTests extends QueryContextTestCase { + + public static GeoQueryContext randomGeoQueryContext() { + final GeoQueryContext.Builder builder = GeoQueryContext.builder(); + builder.setGeoPoint(new GeoPoint(randomDouble(), randomDouble())); + maybeSet(builder::setBoost, randomIntBetween(1, 10)); + maybeSet(builder::setPrecision, randomIntBetween(1, 12)); + List neighbours = new ArrayList<>(); + for (int i = 0; i < randomIntBetween(1, 12); i++) { + neighbours.add(randomIntBetween(1, 12)); + } + maybeSet(builder::setNeighbours, neighbours); + return builder.build(); + } + + @Override + protected GeoQueryContext createTestModel() { + return randomGeoQueryContext(); + } + + @Override + protected GeoQueryContext createMutation(GeoQueryContext original) throws IOException { + final GeoQueryContext.Builder builder = GeoQueryContext.builder(); + builder.setGeoPoint(original.getGeoPoint()).setBoost(original.getBoost()) + .setNeighbours(original.getNeighbours()).setPrecision(original.getPrecision()); + switch (randomIntBetween(0, 3)) { + case 0: + builder.setGeoPoint(randomValueOtherThan(original.getGeoPoint() ,() -> + new GeoPoint(randomDouble(), randomDouble()))); + break; + case 1: + builder.setBoost(randomValueOtherThan(original.getBoost() ,() -> randomIntBetween(1, 5))); + break; + case 2: + builder.setPrecision(randomValueOtherThan(original.getPrecision() ,() -> randomIntBetween(1, 12))); + break; + case 3: + builder.setNeighbours(randomValueOtherThan(original.getNeighbours(), () -> { + List newNeighbours = new ArrayList<>(); + for (int i = 0; i < randomIntBetween(1, 12); i++) { + newNeighbours.add(randomIntBetween(1, 12)); + } + return newNeighbours; + })); + break; + } + return builder.build(); + } + + @Override + protected GeoQueryContext prototype() { + return GeoQueryContext.PROTOTYPE; + } + + public void testNullGeoPointIsIllegal() { + final GeoQueryContext geoQueryContext = randomGeoQueryContext(); + final GeoQueryContext.Builder builder = GeoQueryContext.builder() + .setNeighbours(geoQueryContext.getNeighbours()) + .setPrecision(geoQueryContext.getPrecision()) + .setBoost(geoQueryContext.getBoost()); + try { + builder.build(); + fail("null geo point is illegal"); + } catch (NullPointerException e) { + assertThat(e.getMessage(), equalTo("geoPoint must not be null")); + } + } + + public void testIllegalArguments() { + final GeoQueryContext.Builder builder = GeoQueryContext.builder(); + + try { + builder.setGeoPoint(null); + fail("geoPoint must not be null"); + } catch (NullPointerException e) { + assertEquals(e.getMessage(), "geoPoint must not be null"); + } + try { + builder.setBoost(-randomIntBetween(1, Integer.MAX_VALUE)); + fail("boost must be positive"); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), "boost must be greater than 0"); + } + int precision = 0; + try { + do { + precision = randomInt(); + } while (precision >= 1 && precision <= 12); + builder.setPrecision(precision); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), "precision must be between 1 and 12"); + } + try { + List neighbours = new ArrayList<>(); + neighbours.add(precision); + for (int i = 1; i < randomIntBetween(1, 11); i++) { + neighbours.add(i); + } + Collections.shuffle(neighbours, random()); + builder.setNeighbours(neighbours); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), "neighbour value must be between 1 and 12"); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/QueryContextTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/QueryContextTestCase.java new file mode 100644 index 000000000000..b4a6a5b1da0b --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/QueryContextTestCase.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.suggest.completion; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.suggest.completion.context.QueryContext; + +import java.io.IOException; + + +public abstract class QueryContextTestCase extends WritableTestCase { + + private static final int NUMBER_OF_RUNS = 20; + + /** + * query context prototype to read serialized format + */ + protected abstract QC prototype(); + + @Override + protected QC readFrom(StreamInput in) throws IOException { + return (QC) prototype().readFrom(in); + } + + public void testToXContext() throws IOException { + for (int i = 0; i < NUMBER_OF_RUNS; i++) { + QueryContext toXContent = createTestModel(); + XContentBuilder builder = XContentFactory.jsonBuilder(); + toXContent.toXContent(builder, ToXContent.EMPTY_PARAMS); + BytesReference bytesReference = builder.bytes(); + XContentParser parser = XContentFactory.xContent(bytesReference).createParser(bytesReference); + parser.nextToken(); + QueryContext fromXContext = prototype().fromXContext(parser); + assertEquals(toXContent, fromXContext); + assertEquals(toXContent.hashCode(), fromXContext.hashCode()); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/RegexOptionsTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/RegexOptionsTests.java new file mode 100644 index 000000000000..082e2bc2687c --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/RegexOptionsTests.java @@ -0,0 +1,71 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.suggest.completion; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.index.query.RegexpFlag; + +import java.io.IOException; + +public class RegexOptionsTests extends WritableTestCase { + + public static RegexOptions randomRegexOptions() { + final RegexOptions.Builder builder = RegexOptions.builder(); + maybeSet(builder::setMaxDeterminizedStates, randomIntBetween(1, 1000)); + StringBuilder sb = new StringBuilder(); + for (RegexpFlag regexpFlag : RegexpFlag.values()) { + if (randomBoolean()) { + if (sb.length() != 0) { + sb.append("|"); + } + sb.append(regexpFlag.name()); + } + } + maybeSet(builder::setFlags, sb.toString()); + return builder.build(); + } + + @Override + protected RegexOptions createTestModel() { + return randomRegexOptions(); + } + + @Override + protected RegexOptions createMutation(RegexOptions original) throws IOException { + final RegexOptions.Builder builder = RegexOptions.builder(); + builder.setMaxDeterminizedStates(randomValueOtherThan(original.getMaxDeterminizedStates(), () -> randomIntBetween(1, 10))); + return builder.build(); + } + + @Override + protected RegexOptions readFrom(StreamInput in) throws IOException { + return RegexOptions.readRegexOptions(in); + } + + public void testIllegalArgument() { + final RegexOptions.Builder builder = RegexOptions.builder(); + try { + builder.setMaxDeterminizedStates(-randomIntBetween(1, Integer.MAX_VALUE)); + fail("max determinized state must be positive"); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), "maxDeterminizedStates must not be negative"); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/WritableTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/WritableTestCase.java index 47b337334259..68cc30f8de42 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/WritableTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/WritableTestCase.java @@ -103,13 +103,13 @@ public abstract class WritableTestCase extends ESTestCase { private M copyModel(M original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), provideNamedWritbaleRegistry())) { + try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), provideNamedWritableRegistry())) { return readFrom(in); } } } - protected NamedWriteableRegistry provideNamedWritbaleRegistry() { + protected NamedWriteableRegistry provideNamedWritableRegistry() { return new NamedWriteableRegistry(); } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index 9bf8447f8d80..28c064788111 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -296,12 +296,6 @@ public class DirectCandidateGeneratorTests extends ESTestCase{ return generator; } - private static void maybeSet(Consumer consumer, T value) { - if (randomBoolean()) { - consumer.accept(value); - } - } - private static DirectCandidateGeneratorBuilder serializedCopy(DirectCandidateGeneratorBuilder original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index e80bb93aeb7d..1637247ccf7a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -75,6 +75,8 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.function.BooleanSupplier; +import java.util.function.Consumer; +import java.util.function.Supplier; import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; import static org.hamcrest.Matchers.equalTo; @@ -396,6 +398,26 @@ public abstract class ESTestCase extends LuceneTestCase { return randomTimeValue(1, 1000); } + /** + * helper to randomly perform on consumer with value + */ + public static void maybeSet(Consumer consumer, T value) { + if (randomBoolean()) { + consumer.accept(value); + } + } + + /** + * helper to get a random value in a certain range that's different from the input + */ + public static T randomValueOtherThan(T input, Supplier randomSupplier) { + T randomValue = null; + do { + randomValue = randomSupplier.get(); + } while (randomValue.equals(input)); + return randomValue; + } + /** * Runs the code block for 10 seconds waiting for no assertion to trip. */ From f759604783d2f2d242d52b9fc230cf906ee5e6c9 Mon Sep 17 00:00:00 2001 From: Rhommel Date: Thu, 11 Feb 2016 13:29:10 +1100 Subject: [PATCH 022/320] Adds ES_INCLUDE on rpm --- distribution/rpm/src/main/packaging/init.d/elasticsearch | 1 + 1 file changed, 1 insertion(+) diff --git a/distribution/rpm/src/main/packaging/init.d/elasticsearch b/distribution/rpm/src/main/packaging/init.d/elasticsearch index 12fed7dbc335..c56944b7c3ca 100644 --- a/distribution/rpm/src/main/packaging/init.d/elasticsearch +++ b/distribution/rpm/src/main/packaging/init.d/elasticsearch @@ -66,6 +66,7 @@ export ES_JAVA_OPTS export ES_GC_LOG_FILE export ES_STARTUP_SLEEP_TIME export JAVA_HOME +export ES_INCLUDE lockfile=/var/lock/subsys/$prog From ef8ef2afe01372c362cd90137ee5f163a15f07a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 11 Feb 2016 11:41:34 +0100 Subject: [PATCH 023/320] Fixed small issue in SuggestBuilder#getText --- .../java/org/elasticsearch/search/suggest/SuggestBuilder.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java index 2852204bb6fc..6cf5175709b5 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java @@ -72,7 +72,7 @@ public class SuggestBuilder extends ToXContentToBytes implements Writeable Date: Fri, 29 Jan 2016 18:05:18 +0100 Subject: [PATCH 024/320] Add filtering support within Setting class Now we have a nice Setting infra, we can define in Setting class if a setting should be filtered or not. So when we register a setting, setting filtering would be automatically done. Instead of writing: ```java Setting KEY_SETTING = Setting.simpleString("cloud.aws.access_key", false, Setting.Scope.CLUSTER); settingsModule.registerSetting(AwsEc2Service.KEY_SETTING, false); settingsModule.registerSettingsFilterIfMissing(AwsEc2Service.KEY_SETTING.getKey()); ``` We could simply write: ```java Setting KEY_SETTING = Setting.simpleString("cloud.aws.access_key", false, Setting.Scope.CLUSTER, true); settingsModule.registerSettingsFilterIfMissing(AwsEc2Service.KEY_SETTING.getKey()); ``` It also removes `settingsModule.registerSettingsFilterIfMissing` method. The plan would be to remove as well `settingsModule.registerSettingsFilter` method but it still used with wildcards. For example in Azure Repository plugin: ```java module.registerSettingsFilter(AzureStorageService.Storage.PREFIX + "*.account"); module.registerSettingsFilter(AzureStorageService.Storage.PREFIX + "*.key"); ``` Closes #16598. --- .../common/settings/Setting.java | 70 ++++++++++++++++--- .../common/settings/SettingsModule.java | 11 ++- .../cluster/settings/SettingsFilteringIT.java | 14 ++-- .../common/settings/SettingsModuleTests.java | 3 +- .../DedicatedClusterSnapshotRestoreIT.java | 4 +- .../snapshots/mockstore/MockRepository.java | 10 ++- .../azure/management/AzureComputeService.java | 25 ++++--- .../discovery/azure/AzureDiscoveryPlugin.java | 5 -- .../cloud/aws/AwsEc2Service.java | 12 ++-- .../discovery/ec2/Ec2DiscoveryPlugin.java | 8 --- .../azure/storage/AzureStorageService.java | 33 ++++++--- .../azure/AzureRepositoryPlugin.java | 4 +- .../elasticsearch/cloud/aws/AwsS3Service.java | 12 ++-- .../repository/s3/S3RepositoryPlugin.java | 10 --- .../repositories/s3/S3Repository.java | 4 +- 15 files changed, 141 insertions(+), 84 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 7f64c0111335..e1349cc2a009 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -71,22 +71,50 @@ public class Setting extends ToXContentToBytes { private final Function parser; private final boolean dynamic; private final Scope scope; + private final boolean filtered; + + /** + * Creates a new Setting instance, unfiltered + * @param key the settings key for this setting. + * @param defaultValue a default value function that returns the default values string representation. + * @param parser a parser that parses the string rep into a complex datatype. + * @param dynamic true if this setting can be dynamically updateable + * @param scope the scope of this setting + */ + public Setting(String key, Function defaultValue, Function parser, boolean dynamic, Scope scope) { + this(key, defaultValue, parser, dynamic, scope, false); + } /** * Creates a new Setting instance * @param key the settings key for this setting. * @param defaultValue a default value function that returns the default values string representation. * @param parser a parser that parses the string rep into a complex datatype. - * @param dynamic true iff this setting can be dynamically updateable + * @param dynamic true if this setting can be dynamically updateable * @param scope the scope of this setting + * @param filtered true if this setting should be filtered */ - public Setting(String key, Function defaultValue, Function parser, boolean dynamic, Scope scope) { + public Setting(String key, Function defaultValue, Function parser, boolean dynamic, Scope scope, + boolean filtered) { assert parser.apply(defaultValue.apply(Settings.EMPTY)) != null || this.isGroupSetting(): "parser returned null"; this.key = key; this.defaultValue = defaultValue; this.parser = parser; this.dynamic = dynamic; this.scope = scope; + this.filtered = filtered; + } + + /** + * Creates a new Setting instance, unfiltered + * @param key the settings key for this setting. + * @param fallBackSetting a setting to fall back to if the current setting is not set. + * @param parser a parser that parses the string rep into a complex datatype. + * @param dynamic true iff this setting can be dynamically updateable + * @param scope the scope of this setting + */ + public Setting(String key, Setting fallBackSetting, Function parser, boolean dynamic, Scope scope) { + this(key, fallBackSetting, parser, dynamic, scope, false); } /** @@ -96,9 +124,10 @@ public class Setting extends ToXContentToBytes { * @param parser a parser that parses the string rep into a complex datatype. * @param dynamic true iff this setting can be dynamically updateable * @param scope the scope of this setting + * @param filtered true if this setting should be filtered */ - public Setting(String key, Setting fallBackSetting, Function parser, boolean dynamic, Scope scope) { - this(key, fallBackSetting::getRaw, parser, dynamic, scope); + public Setting(String key, Setting fallBackSetting, Function parser, boolean dynamic, Scope scope, boolean filtered) { + this(key, fallBackSetting::getRaw, parser, dynamic, scope, filtered); } /** @@ -113,7 +142,7 @@ public class Setting extends ToXContentToBytes { } /** - * Returns true iff this setting is dynamically updateable, otherwise false + * Returns true if this setting is dynamically updateable, otherwise false */ public final boolean isDynamic() { return dynamic; @@ -126,6 +155,13 @@ public class Setting extends ToXContentToBytes { return scope; } + /** + * Returns true if this setting must be filtered, otherwise false + */ + public boolean isFiltered() { + return filtered; + } + /** * Returns true iff this setting is a group setting. Group settings represent a set of settings * rather than a single value. The key, see {@link #getKey()}, in contrast to non-group settings is a prefix like cluster.store. @@ -331,7 +367,11 @@ public class Setting extends ToXContentToBytes { public Setting(String key, String defaultValue, Function parser, boolean dynamic, Scope scope) { - this(key, (s) -> defaultValue, parser, dynamic, scope); + this(key, defaultValue, parser, dynamic, scope, false); + } + + public Setting(String key, String defaultValue, Function parser, boolean dynamic, Scope scope, boolean filtered) { + this(key, (s) -> defaultValue, parser, dynamic, scope, filtered); } public static Setting floatSetting(String key, float defaultValue, boolean dynamic, Scope scope) { @@ -357,11 +397,19 @@ public class Setting extends ToXContentToBytes { } public static Setting longSetting(String key, long defaultValue, long minValue, boolean dynamic, Scope scope) { - return new Setting<>(key, (s) -> Long.toString(defaultValue), (s) -> parseLong(s, minValue, key), dynamic, scope); + return longSetting(key, defaultValue, minValue, dynamic, scope, false); + } + + public static Setting longSetting(String key, long defaultValue, long minValue, boolean dynamic, Scope scope, boolean filtered) { + return new Setting<>(key, (s) -> Long.toString(defaultValue), (s) -> parseLong(s, minValue, key), dynamic, scope, filtered); } public static Setting simpleString(String key, boolean dynamic, Scope scope) { - return new Setting<>(key, "", Function.identity(), dynamic, scope); + return simpleString(key, dynamic, scope, false); + } + + public static Setting simpleString(String key, boolean dynamic, Scope scope, boolean filtered) { + return new Setting<>(key, s -> "", Function.identity(), dynamic, scope, filtered); } public static int parseInt(String s, int minValue, String key) { @@ -392,7 +440,11 @@ public class Setting extends ToXContentToBytes { } public static Setting boolSetting(String key, boolean defaultValue, boolean dynamic, Scope scope) { - return new Setting<>(key, (s) -> Boolean.toString(defaultValue), Booleans::parseBooleanExact, dynamic, scope); + return boolSetting(key, defaultValue, dynamic, scope, false); + } + + public static Setting boolSetting(String key, boolean defaultValue, boolean dynamic, Scope scope, boolean filtered) { + return new Setting<>(key, (s) -> Boolean.toString(defaultValue), Booleans::parseBooleanExact, dynamic, scope, filtered); } public static Setting boolSetting(String key, Setting fallbackSetting, boolean dynamic, Scope scope) { diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index b06f53459c87..027e6e7cafeb 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -71,6 +71,11 @@ public class SettingsModule extends AbstractModule { * the setting during startup. */ public void registerSetting(Setting setting) { + if (setting.isFiltered()) { + if (settingsFilterPattern.contains(setting.getKey()) == false) { + registerSettingsFilter(setting.getKey()); + } + } switch (setting.getScope()) { case CLUSTER: if (clusterSettings.containsKey(setting.getKey())) { @@ -101,12 +106,6 @@ public class SettingsModule extends AbstractModule { settingsFilterPattern.add(filter); } - public void registerSettingsFilterIfMissing(String filter) { - if (settingsFilterPattern.contains(filter) == false) { - registerSettingsFilter(filter); - } - } - /** * Check if a setting has already been registered */ diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java index ed03c918c31b..dbf502d58055 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java @@ -50,6 +50,11 @@ public class SettingsFilteringIT extends ESIntegTestCase { } public static class SettingsFilteringPlugin extends Plugin { + public static final Setting SOME_NODE_SETTING = + Setting.boolSetting("some.node.setting", false, false, Setting.Scope.CLUSTER, true); + public static final Setting SOME_OTHER_NODE_SETTING = + Setting.boolSetting("some.other.node.setting", false, false, Setting.Scope.CLUSTER); + /** * The name of the plugin. */ @@ -72,10 +77,9 @@ public class SettingsFilteringIT extends ESIntegTestCase { } public void onModule(SettingsModule module) { + module.registerSetting(SOME_NODE_SETTING); + module.registerSetting(SOME_OTHER_NODE_SETTING); module.registerSetting(Setting.groupSetting("index.filter_test.", false, Setting.Scope.INDEX)); - module.registerSetting(Setting.boolSetting("some.node.setting", false, false, Setting.Scope.CLUSTER)); - module.registerSetting(Setting.boolSetting("some.other.node.setting", false, false, Setting.Scope.CLUSTER)); - module.registerSettingsFilter("some.node.setting"); module.registerSettingsFilter("index.filter_test.foo"); module.registerSettingsFilter("index.filter_test.bar*"); } @@ -104,8 +108,8 @@ public class SettingsFilteringIT extends ESIntegTestCase { for(NodeInfo info : nodeInfos.getNodes()) { Settings settings = info.getSettings(); assertNotNull(settings); - assertNull(settings.get("some.node.setting")); - assertTrue(settings.getAsBoolean("some.other.node.setting", false)); + assertNull(settings.get(SettingsFilteringPlugin.SOME_NODE_SETTING.getKey())); + assertTrue(settings.getAsBoolean(SettingsFilteringPlugin.SOME_OTHER_NODE_SETTING.getKey(), false)); assertEquals(settings.get("node.name"), info.getNode().getName()); } } diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java index 4f790c2d3a97..ce32be6c935e 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java @@ -132,11 +132,10 @@ public class SettingsModuleTests extends ModuleTestCase { Settings settings = Settings.builder().put("foo.bar", "false").put("bar.foo", false).put("bar.baz", false).build(); SettingsModule module = new SettingsModule(settings); module.registerSetting(Setting.boolSetting("foo.bar", true, false, Setting.Scope.CLUSTER)); - module.registerSetting(Setting.boolSetting("bar.foo", true, false, Setting.Scope.CLUSTER)); + module.registerSetting(Setting.boolSetting("bar.foo", true, false, Setting.Scope.CLUSTER, true)); module.registerSetting(Setting.boolSetting("bar.baz", true, false, Setting.Scope.CLUSTER)); module.registerSettingsFilter("foo.*"); - module.registerSettingsFilterIfMissing("bar.foo"); try { module.registerSettingsFilter("bar.foo"); fail(); diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index bd6c2533652d..51ae133f39ef 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -632,8 +632,8 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest client().admin().cluster().preparePutRepository("test-repo") .setType("mock").setSettings(Settings.settingsBuilder() .put("location", randomRepoPath()) - .put("secret.mock.username", "notsecretusername") - .put("secret.mock.password", "verysecretpassword") + .put(MockRepository.Plugin.USERNAME_SETTING.getKey(), "notsecretusername") + .put(MockRepository.Plugin.PASSWORD_SETTING.getKey(), "verysecretpassword") ).get(); RestGetRepositoriesAction getRepoAction = internalCluster().getInstance(RestGetRepositoriesAction.class); diff --git a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java index e92a28db86bb..1e3832907533 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java +++ b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; @@ -63,6 +64,11 @@ public class MockRepository extends FsRepository { public static class Plugin extends org.elasticsearch.plugins.Plugin { + public static final Setting USERNAME_SETTING = + Setting.simpleString("secret.mock.username", false, Setting.Scope.CLUSTER); + public static final Setting PASSWORD_SETTING = + Setting.simpleString("secret.mock.password", false, Setting.Scope.CLUSTER, true); + @Override public String name() { return "mock-repository"; @@ -78,8 +84,8 @@ public class MockRepository extends FsRepository { } public void onModule(SettingsModule module) { - module.registerSettingsFilter("secret.mock.password"); - + module.registerSetting(USERNAME_SETTING); + module.registerSetting(PASSWORD_SETTING); } } diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java index 0c665c138b8b..027caaccebc0 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java @@ -22,26 +22,35 @@ package org.elasticsearch.cloud.azure.management; import com.microsoft.windowsazure.core.utils.KeyStoreType; import com.microsoft.windowsazure.management.compute.models.HostedServiceGetDetailedResponse; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Scope; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.azure.AzureUnicastHostsProvider; public interface AzureComputeService { final class Management { - public static final Setting SUBSCRIPTION_ID_SETTING = Setting.simpleString("cloud.azure.management.subscription.id", false, Setting.Scope.CLUSTER); - public static final Setting SERVICE_NAME_SETTING = Setting.simpleString("cloud.azure.management.cloud.service.name", false, Setting.Scope.CLUSTER); + public static final Setting SUBSCRIPTION_ID_SETTING = + Setting.simpleString("cloud.azure.management.subscription.id", false, Scope.CLUSTER, true); + public static final Setting SERVICE_NAME_SETTING = + Setting.simpleString("cloud.azure.management.cloud.service.name", false, Scope.CLUSTER); // Keystore settings - public static final Setting KEYSTORE_PATH_SETTING = Setting.simpleString("cloud.azure.management.keystore.path", false, Setting.Scope.CLUSTER); - public static final Setting KEYSTORE_PASSWORD_SETTING = Setting.simpleString("cloud.azure.management.keystore.password", false, Setting.Scope.CLUSTER); - public static final Setting KEYSTORE_TYPE_SETTING = new Setting<>("cloud.azure.management.keystore.type", KeyStoreType.pkcs12.name(), KeyStoreType::fromString, false, Setting.Scope.CLUSTER); + public static final Setting KEYSTORE_PATH_SETTING = + Setting.simpleString("cloud.azure.management.keystore.path", false, Scope.CLUSTER, true); + public static final Setting KEYSTORE_PASSWORD_SETTING = + Setting.simpleString("cloud.azure.management.keystore.password", false, Scope.CLUSTER, true); + public static final Setting KEYSTORE_TYPE_SETTING = + new Setting<>("cloud.azure.management.keystore.type", KeyStoreType.pkcs12.name(), KeyStoreType::fromString, false, + Scope.CLUSTER, false); } final class Discovery { - public static final Setting REFRESH_SETTING = Setting.positiveTimeSetting("discovery.azure.refresh_interval", TimeValue.timeValueSeconds(0), false, Setting.Scope.CLUSTER); + public static final Setting REFRESH_SETTING = + Setting.positiveTimeSetting("discovery.azure.refresh_interval", TimeValue.timeValueSeconds(0), false, Scope.CLUSTER); - public static final Setting HOST_TYPE_SETTING = new Setting<>("discovery.azure.host.type", - AzureUnicastHostsProvider.HostType.PRIVATE_IP.name(), AzureUnicastHostsProvider.HostType::fromString, false, Setting.Scope.CLUSTER); + public static final Setting HOST_TYPE_SETTING = + new Setting<>("discovery.azure.host.type", AzureUnicastHostsProvider.HostType.PRIVATE_IP.name(), + AzureUnicastHostsProvider.HostType::fromString, false, Scope.CLUSTER); public static final String ENDPOINT_NAME = "discovery.azure.endpoint.name"; public static final String DEPLOYMENT_NAME = "discovery.azure.deployment.name"; diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/plugin/discovery/azure/AzureDiscoveryPlugin.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/plugin/discovery/azure/AzureDiscoveryPlugin.java index a8282dc9561e..a0b29fad024d 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/plugin/discovery/azure/AzureDiscoveryPlugin.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/plugin/discovery/azure/AzureDiscoveryPlugin.java @@ -74,10 +74,5 @@ public class AzureDiscoveryPlugin extends Plugin { settingsModule.registerSetting(AzureComputeService.Management.SUBSCRIPTION_ID_SETTING); settingsModule.registerSetting(AzureComputeService.Management.SERVICE_NAME_SETTING); settingsModule.registerSetting(AzureComputeService.Discovery.HOST_TYPE_SETTING); - // Cloud management API settings we need to hide - settingsModule.registerSettingsFilter(AzureComputeService.Management.KEYSTORE_PATH_SETTING.getKey()); - settingsModule.registerSettingsFilter(AzureComputeService.Management.KEYSTORE_PASSWORD_SETTING.getKey()); - settingsModule.registerSettingsFilter(AzureComputeService.Management.KEYSTORE_TYPE_SETTING.getKey()); - settingsModule.registerSettingsFilter(AzureComputeService.Management.SUBSCRIPTION_ID_SETTING.getKey()); } } diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java index a90d35734689..ce34dd61f40b 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java @@ -40,11 +40,11 @@ public interface AwsEc2Service { /** * cloud.aws.access_key: AWS Access key. Shared with repository-s3 plugin */ - Setting KEY_SETTING = Setting.simpleString("cloud.aws.access_key", false, Setting.Scope.CLUSTER); + Setting KEY_SETTING = Setting.simpleString("cloud.aws.access_key", false, Setting.Scope.CLUSTER, true); /** * cloud.aws.secret_key: AWS Secret key. Shared with repository-s3 plugin */ - Setting SECRET_SETTING = Setting.simpleString("cloud.aws.secret_key", false, Setting.Scope.CLUSTER); + Setting SECRET_SETTING = Setting.simpleString("cloud.aws.secret_key", false, Setting.Scope.CLUSTER, true); /** * cloud.aws.protocol: Protocol for AWS API: http or https. Defaults to https. Shared with repository-s3 plugin */ @@ -65,7 +65,7 @@ public interface AwsEc2Service { /** * cloud.aws.proxy.password: In case of proxy with auth, define the password. Shared with repository-s3 plugin */ - Setting PROXY_PASSWORD_SETTING = Setting.simpleString("cloud.aws.proxy.password", false, Setting.Scope.CLUSTER); + Setting PROXY_PASSWORD_SETTING = Setting.simpleString("cloud.aws.proxy.password", false, Setting.Scope.CLUSTER, true); /** * cloud.aws.signer: If you are using an old AWS API version, you can define a Signer. Shared with repository-s3 plugin */ @@ -84,13 +84,13 @@ public interface AwsEc2Service { * @see AwsEc2Service#KEY_SETTING */ Setting KEY_SETTING = new Setting<>("cloud.aws.ec2.access_key", AwsEc2Service.KEY_SETTING, Function.identity(), false, - Setting.Scope.CLUSTER); + Setting.Scope.CLUSTER, true); /** * cloud.aws.ec2.secret_key: AWS Secret key specific for EC2 API calls. Defaults to cloud.aws.secret_key. * @see AwsEc2Service#SECRET_SETTING */ Setting SECRET_SETTING = new Setting<>("cloud.aws.ec2.secret_key", AwsEc2Service.SECRET_SETTING, Function.identity(), false, - Setting.Scope.CLUSTER); + Setting.Scope.CLUSTER, true); /** * cloud.aws.ec2.protocol: Protocol for AWS API specific for EC2 API calls: http or https. Defaults to cloud.aws.protocol. * @see AwsEc2Service#PROTOCOL_SETTING @@ -122,7 +122,7 @@ public interface AwsEc2Service { * @see AwsEc2Service#PROXY_PASSWORD_SETTING */ Setting PROXY_PASSWORD_SETTING = new Setting<>("cloud.aws.ec2.proxy.password", AwsEc2Service.PROXY_PASSWORD_SETTING, - Function.identity(), false, Setting.Scope.CLUSTER); + Function.identity(), false, Setting.Scope.CLUSTER, true); /** * cloud.aws.ec2.signer: If you are using an old AWS API version, you can define a Signer. Specific for EC2 API calls. * Defaults to cloud.aws.signer. diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java index baad869a0aa3..211597ee454b 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java @@ -134,14 +134,6 @@ public class Ec2DiscoveryPlugin extends Plugin { settingsModule.registerSetting(AwsEc2Service.DISCOVERY_EC2.GROUPS_SETTING); settingsModule.registerSetting(AwsEc2Service.DISCOVERY_EC2.AVAILABILITY_ZONES_SETTING); settingsModule.registerSetting(AwsEc2Service.DISCOVERY_EC2.NODE_CACHE_TIME_SETTING); - - // Filter global settings - settingsModule.registerSettingsFilterIfMissing(AwsEc2Service.KEY_SETTING.getKey()); - settingsModule.registerSettingsFilterIfMissing(AwsEc2Service.SECRET_SETTING.getKey()); - settingsModule.registerSettingsFilterIfMissing(AwsEc2Service.PROXY_PASSWORD_SETTING.getKey()); - settingsModule.registerSettingsFilterIfMissing(AwsEc2Service.CLOUD_EC2.KEY_SETTING.getKey()); - settingsModule.registerSettingsFilterIfMissing(AwsEc2Service.CLOUD_EC2.SECRET_SETTING.getKey()); - settingsModule.registerSettingsFilterIfMissing(AwsEc2Service.CLOUD_EC2.PROXY_PASSWORD_SETTING.getKey()); } /** diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java index 657c292db310..2c5521887d8b 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java @@ -41,13 +41,20 @@ public interface AzureStorageService { final class Storage { public static final String PREFIX = "cloud.azure.storage."; - public static final Setting TIMEOUT_SETTING = Setting.timeSetting("cloud.azure.storage.timeout", TimeValue.timeValueMinutes(5), false, Setting.Scope.CLUSTER); - public static final Setting ACCOUNT_SETTING = Setting.simpleString("repositories.azure.account", false, Setting.Scope.CLUSTER); - public static final Setting CONTAINER_SETTING = Setting.simpleString("repositories.azure.container", false, Setting.Scope.CLUSTER); - public static final Setting BASE_PATH_SETTING = Setting.simpleString("repositories.azure.base_path", false, Setting.Scope.CLUSTER); - public static final Setting LOCATION_MODE_SETTING = Setting.simpleString("repositories.azure.location_mode", false, Setting.Scope.CLUSTER); - public static final Setting CHUNK_SIZE_SETTING = Setting.byteSizeSetting("repositories.azure.chunk_size", new ByteSizeValue(-1), false, Setting.Scope.CLUSTER); - public static final Setting COMPRESS_SETTING = Setting.boolSetting("repositories.azure.compress", false, false, Setting.Scope.CLUSTER); + public static final Setting TIMEOUT_SETTING = + Setting.timeSetting("cloud.azure.storage.timeout", TimeValue.timeValueMinutes(5), false, Setting.Scope.CLUSTER); + public static final Setting ACCOUNT_SETTING = + Setting.simpleString("repositories.azure.account", false, Setting.Scope.CLUSTER, true); + public static final Setting CONTAINER_SETTING = + Setting.simpleString("repositories.azure.container", false, Setting.Scope.CLUSTER); + public static final Setting BASE_PATH_SETTING = + Setting.simpleString("repositories.azure.base_path", false, Setting.Scope.CLUSTER); + public static final Setting LOCATION_MODE_SETTING = + Setting.simpleString("repositories.azure.location_mode", false, Setting.Scope.CLUSTER); + public static final Setting CHUNK_SIZE_SETTING = + Setting.byteSizeSetting("repositories.azure.chunk_size", new ByteSizeValue(-1), false, Setting.Scope.CLUSTER); + public static final Setting COMPRESS_SETTING = + Setting.boolSetting("repositories.azure.compress", false, false, Setting.Scope.CLUSTER); } boolean doesContainerExist(String account, LocationMode mode, String container); @@ -62,13 +69,17 @@ public interface AzureStorageService { void deleteBlob(String account, LocationMode mode, String container, String blob) throws URISyntaxException, StorageException; - InputStream getInputStream(String account, LocationMode mode, String container, String blob) throws URISyntaxException, StorageException; + InputStream getInputStream(String account, LocationMode mode, String container, String blob) + throws URISyntaxException, StorageException; - OutputStream getOutputStream(String account, LocationMode mode, String container, String blob) throws URISyntaxException, StorageException; + OutputStream getOutputStream(String account, LocationMode mode, String container, String blob) + throws URISyntaxException, StorageException; - Map listBlobsByPrefix(String account, LocationMode mode, String container, String keyPath, String prefix) throws URISyntaxException, StorageException; + Map listBlobsByPrefix(String account, LocationMode mode, String container, String keyPath, String prefix) + throws URISyntaxException, StorageException; - void moveBlob(String account, LocationMode mode, String container, String sourceBlob, String targetBlob) throws URISyntaxException, StorageException; + void moveBlob(String account, LocationMode mode, String container, String sourceBlob, String targetBlob) + throws URISyntaxException, StorageException; AzureStorageService start(); } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java b/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java index 616b150f9542..3ce043500ae3 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java @@ -74,9 +74,9 @@ public class AzureRepositoryPlugin extends Plugin { module.registerSetting(AzureStorageService.Storage.BASE_PATH_SETTING); module.registerSetting(AzureStorageService.Storage.CHUNK_SIZE_SETTING); module.registerSetting(AzureStorageService.Storage.LOCATION_MODE_SETTING); - // Cloud storage API settings needed to be hidden + + // Cloud storage API settings using a pattern needed to be hidden module.registerSettingsFilter(AzureStorageService.Storage.PREFIX + "*.account"); module.registerSettingsFilter(AzureStorageService.Storage.PREFIX + "*.key"); - module.registerSettingsFilter(AzureStorageService.Storage.ACCOUNT_SETTING.getKey()); } } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java index 3ccd6d7987f8..3af9446fbe9f 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java @@ -38,11 +38,11 @@ public interface AwsS3Service extends LifecycleComponent { /** * cloud.aws.access_key: AWS Access key. Shared with discovery-ec2 plugin */ - Setting KEY_SETTING = Setting.simpleString("cloud.aws.access_key", false, Setting.Scope.CLUSTER); + Setting KEY_SETTING = Setting.simpleString("cloud.aws.access_key", false, Setting.Scope.CLUSTER, true); /** * cloud.aws.secret_key: AWS Secret key. Shared with discovery-ec2 plugin */ - Setting SECRET_SETTING = Setting.simpleString("cloud.aws.secret_key", false, Setting.Scope.CLUSTER); + Setting SECRET_SETTING = Setting.simpleString("cloud.aws.secret_key", false, Setting.Scope.CLUSTER, true); /** * cloud.aws.protocol: Protocol for AWS API: http or https. Defaults to https. Shared with discovery-ec2 plugin */ @@ -63,7 +63,7 @@ public interface AwsS3Service extends LifecycleComponent { /** * cloud.aws.proxy.password: In case of proxy with auth, define the password. Shared with discovery-ec2 plugin */ - Setting PROXY_PASSWORD_SETTING = Setting.simpleString("cloud.aws.proxy.password", false, Setting.Scope.CLUSTER); + Setting PROXY_PASSWORD_SETTING = Setting.simpleString("cloud.aws.proxy.password", false, Setting.Scope.CLUSTER, true); /** * cloud.aws.signer: If you are using an old AWS API version, you can define a Signer. Shared with discovery-ec2 plugin */ @@ -82,13 +82,13 @@ public interface AwsS3Service extends LifecycleComponent { * @see AwsS3Service#KEY_SETTING */ Setting KEY_SETTING = - new Setting<>("cloud.aws.s3.access_key", AwsS3Service.KEY_SETTING, Function.identity(), false, Setting.Scope.CLUSTER); + new Setting<>("cloud.aws.s3.access_key", AwsS3Service.KEY_SETTING, Function.identity(), false, Setting.Scope.CLUSTER, true); /** * cloud.aws.s3.secret_key: AWS Secret key specific for S3 API calls. Defaults to cloud.aws.secret_key. * @see AwsS3Service#SECRET_SETTING */ Setting SECRET_SETTING = - new Setting<>("cloud.aws.s3.secret_key", AwsS3Service.SECRET_SETTING, Function.identity(), false, Setting.Scope.CLUSTER); + new Setting<>("cloud.aws.s3.secret_key", AwsS3Service.SECRET_SETTING, Function.identity(), false, Setting.Scope.CLUSTER, true); /** * cloud.aws.s3.protocol: Protocol for AWS API specific for S3 API calls: http or https. Defaults to cloud.aws.protocol. * @see AwsS3Service#PROTOCOL_SETTING @@ -124,7 +124,7 @@ public interface AwsS3Service extends LifecycleComponent { */ Setting PROXY_PASSWORD_SETTING = new Setting<>("cloud.aws.s3.proxy.password", AwsS3Service.PROXY_PASSWORD_SETTING, Function.identity(), false, - Setting.Scope.CLUSTER); + Setting.Scope.CLUSTER, true); /** * cloud.aws.s3.signer: If you are using an old AWS API version, you can define a Signer. Specific for S3 API calls. * Defaults to cloud.aws.signer. diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java b/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java index 5d21bb4e2acc..d07d8c174c51 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java @@ -144,16 +144,6 @@ public class S3RepositoryPlugin extends Plugin { settingsModule.registerSetting(S3Repository.Repository.STORAGE_CLASS_SETTING); settingsModule.registerSetting(S3Repository.Repository.CANNED_ACL_SETTING); settingsModule.registerSetting(S3Repository.Repository.BASE_PATH_SETTING); - - // Filter global settings - settingsModule.registerSettingsFilterIfMissing(AwsS3Service.KEY_SETTING.getKey()); - settingsModule.registerSettingsFilterIfMissing(AwsS3Service.SECRET_SETTING.getKey()); - settingsModule.registerSettingsFilterIfMissing(AwsS3Service.PROXY_PASSWORD_SETTING.getKey()); - settingsModule.registerSettingsFilterIfMissing(AwsS3Service.CLOUD_S3.KEY_SETTING.getKey()); - settingsModule.registerSettingsFilterIfMissing(AwsS3Service.CLOUD_S3.SECRET_SETTING.getKey()); - settingsModule.registerSettingsFilterIfMissing(AwsS3Service.CLOUD_S3.PROXY_PASSWORD_SETTING.getKey()); - settingsModule.registerSettingsFilter(S3Repository.Repository.KEY_SETTING.getKey()); - settingsModule.registerSettingsFilter(S3Repository.Repository.SECRET_SETTING.getKey()); } /** diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index 3edead0765e2..6b3b5bf943a4 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -140,12 +140,12 @@ public class S3Repository extends BlobStoreRepository { * access_key * @see Repositories#KEY_SETTING */ - Setting KEY_SETTING = Setting.simpleString("access_key", false, Setting.Scope.CLUSTER); + Setting KEY_SETTING = Setting.simpleString("access_key", false, Setting.Scope.CLUSTER, true); /** * secret_key * @see Repositories#SECRET_SETTING */ - Setting SECRET_SETTING = Setting.simpleString("secret_key", false, Setting.Scope.CLUSTER); + Setting SECRET_SETTING = Setting.simpleString("secret_key", false, Setting.Scope.CLUSTER, true); /** * bucket * @see Repositories#BUCKET_SETTING From 764efd5b680b509f43e3e919dd34af29c7a09a8b Mon Sep 17 00:00:00 2001 From: "George P. Stathis" Date: Wed, 10 Feb 2016 23:16:45 -0500 Subject: [PATCH 025/320] Issue #16594: prevents built-in similarities from being redefined, allows users to define a "default" similarity type or falls back to "classic" if none is defined. --- .../index/similarity/SimilarityService.java | 5 ++- .../similarity/SimilarityIT.java | 43 +++++++++++++++++++ 2 files changed, 47 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java index e950ebda1b38..f5557b6ad4dd 100644 --- a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java @@ -63,6 +63,8 @@ public final class SimilarityService extends AbstractIndexComponent { Map similaritySettings = this.indexSettings.getSettings().getGroups(IndexModule.SIMILARITY_SETTINGS_PREFIX); for (Map.Entry entry : similaritySettings.entrySet()) { String name = entry.getKey(); + if(BUILT_IN.containsKey(name)) + throw new IllegalArgumentException("Cannot redefine built-in Similarity [" + name + "]"); Settings settings = entry.getValue(); String typeName = settings.get("type"); if (typeName == null) { @@ -78,7 +80,8 @@ public final class SimilarityService extends AbstractIndexComponent { } addSimilarities(similaritySettings, providers, DEFAULTS); this.similarities = providers; - defaultSimilarity = providers.get(SimilarityService.DEFAULT_SIMILARITY).get(); + defaultSimilarity = (providers.get("default") != null) ? providers.get("default").get() + : providers.get(SimilarityService.DEFAULT_SIMILARITY).get(); // Expert users can configure the base type as being different to default, but out-of-box we use default. baseSimilarity = (providers.get("base") != null) ? providers.get("base").get() : defaultSimilarity; diff --git a/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java b/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java index f6fa1fc621fa..badae75ae445 100644 --- a/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java +++ b/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java @@ -72,4 +72,47 @@ public class SimilarityIT extends ESIntegTestCase { assertThat(bm25Score, not(equalTo(defaultScore))); } + + // Tests #16594 + public void testCustomBM25SimilarityAsDefault() throws Exception { + try { + client().admin().indices().prepareDelete("test").execute().actionGet(); + } catch (Exception e) { + // ignore + } + + client().admin().indices().prepareCreate("test") + .addMapping("type1", jsonBuilder().startObject() + .startObject("type1") + .startObject("properties") + .startObject("field1") + .field("type", "string") + .endObject() + .startObject("field2") + .field("similarity", "custom") + .field("type", "string") + .endObject() + .endObject() + .endObject()) + .setSettings(Settings.settingsBuilder() + .put("index.number_of_shards", 1) + .put("index.number_of_replicas", 0) + .put("similarity.default.type", "BM25") + .put("similarity.custom.type", "classic") + ).execute().actionGet(); + + client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox jumped over the lazy dog", + "field2", "the quick brown fox jumped over the lazy dog") + .setRefresh(true).execute().actionGet(); + + SearchResponse bm25SearchResponse = client().prepareSearch().setQuery(matchQuery("field1", "quick brown fox")).execute().actionGet(); + assertThat(bm25SearchResponse.getHits().totalHits(), equalTo(1L)); + float bm25Score = bm25SearchResponse.getHits().hits()[0].score(); + + SearchResponse defaultSearchResponse = client().prepareSearch().setQuery(matchQuery("field2", "quick brown fox")).execute().actionGet(); + assertThat(defaultSearchResponse.getHits().totalHits(), equalTo(1L)); + float defaultScore = defaultSearchResponse.getHits().hits()[0].score(); + + assertThat(bm25Score, not(equalTo(defaultScore))); + } } From 551e6bd66f542f99d261b22e01011066ec007e83 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 23 Feb 2016 07:22:33 -0800 Subject: [PATCH 026/320] Mapping: Moved dynamic field handling in doc parsing to end of parsing Currently dynamic mappings propgate through call semantics, where deeper dynamic mappings are merged into higher level mappings through return values of recursive method calls. This makese it tricky to handle multiple updates in the same method, for example when trying to create parent object mappers dynamically for a field name that contains dots. This change makes the api for adding mappers a simple list of new mappers, and moves construction of the root level mapping update to the end of doc parsing. --- .../index/mapper/DocumentParser.java | 237 ++++++++++-------- .../elasticsearch/index/mapper/Mapper.java | 1 + .../index/mapper/ParseContext.java | 33 ++- .../index/mapper/DocumentParserTests.java | 124 +++++++++ .../index/mapper/DynamicMappingTests.java | 5 +- .../index/mapper/FieldTypeLookupTests.java | 115 +++------ .../object/SimpleObjectMappingTests.java | 34 ++- .../index/mapper/MockFieldMapper.java | 78 ++++++ 8 files changed, 411 insertions(+), 216 deletions(-) create mode 100644 test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index b0cdb993b78e..8f0ed3106f70 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -19,12 +19,21 @@ package org.elasticsearch.index.mapper; +import java.io.Closeable; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.CloseableThreadLocal; import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.FormatDateTimeFormatter; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexSettings; @@ -48,15 +57,8 @@ import org.elasticsearch.index.mapper.object.ArrayValueMapperParser; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.RootObjectMapper; -import java.io.Closeable; -import java.io.IOException; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - /** A parser for documents, given mappings from a DocumentMapper */ -class DocumentParser implements Closeable { +final class DocumentParser implements Closeable { private CloseableThreadLocal cache = new CloseableThreadLocal() { @Override @@ -120,10 +122,7 @@ class DocumentParser implements Closeable { // entire type is disabled parser.skipChildren(); } else if (emptyDoc == false) { - Mapper update = parseObject(context, mapping.root, true); - if (update != null) { - context.addDynamicMappingsUpdate(update); - } + parseObjectOrNested(context, mapping.root, true); } for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) { @@ -178,12 +177,7 @@ class DocumentParser implements Closeable { } } - Mapper rootDynamicUpdate = context.dynamicMappingsUpdate(); - Mapping update = null; - if (rootDynamicUpdate != null) { - update = mapping.mappingUpdate(rootDynamicUpdate); - } - + Mapping update = createDynamicUpdate(mapping, docMapper, context.getDynamicMappers()); ParsedDocument doc = new ParsedDocument(context.uid(), context.version(), context.id(), context.type(), source.routing(), source.timestamp(), source.ttl(), context.docs(), context.source(), update).parent(source.parent()); // reset the context to free up memory @@ -191,10 +185,89 @@ class DocumentParser implements Closeable { return doc; } - static ObjectMapper parseObject(ParseContext context, ObjectMapper mapper, boolean atRoot) throws IOException { + /** Creates a Mapping containing any dynamically added fields, or returns null if there were no dynamic mappings. */ + static Mapping createDynamicUpdate(Mapping mapping, DocumentMapper docMapper, List dynamicMappers) { + if (dynamicMappers.isEmpty()) { + return null; + } + // We build a mapping by first sorting the mappers, so that all mappers containing a common prefix + // will be processed in a contiguous block. When the prefix is no longer seen, we pop the extra elements + // off the stack, merging them upwards into the existing mappers. + Collections.sort(dynamicMappers, (Mapper o1, Mapper o2) -> o1.name().compareTo(o2.name())); + List parentMappers = new ArrayList<>(); + // create an empty root object which updates will be propagated into + RootObjectMapper.Builder rootBuilder = new RootObjectMapper.Builder(docMapper.type()); + RootObjectMapper.BuilderContext context = new RootObjectMapper.BuilderContext(Settings.EMPTY, new ContentPath()); + parentMappers.add(rootBuilder.build(context)); + Mapper previousMapper = null; + for (Mapper newMapper : dynamicMappers) { + if (previousMapper != null && newMapper.name().equals(previousMapper.name())) { + // We can see the same mapper more than once, for example, if we had foo.bar and foo.baz, where + // foo did not yet exist. This will create 2 copies in dynamic mappings, which should be identical. + // Here we just skip over the duplicates, but we merge them to ensure there are no conflicts. + newMapper.merge(previousMapper, false); + continue; + } + previousMapper = newMapper; + String[] nameParts = newMapper.name().split("\\."); + // find common elements with the previously processed dynamic mapper + int keepBefore = 1; + while (keepBefore < parentMappers.size() && + parentMappers.get(keepBefore).simpleName().equals(nameParts[keepBefore - 1])) { + ++keepBefore; + } + popMappers(parentMappers, keepBefore); + + // Add parent mappers that don't exist in dynamic mappers + while (keepBefore < nameParts.length) { + ObjectMapper parent = parentMappers.get(parentMappers.size() - 1); + Mapper newLast = parent.getMapper(nameParts[keepBefore - 1]); + if (newLast == null) { + String objectName = nameParts[keepBefore - 1]; + if (keepBefore > 1) { + // only prefix with parent mapper if the parent mapper isn't the root (which has a fake name) + objectName = parent.name() + '.' + objectName; + } + newLast = docMapper.objectMappers().get(objectName); + } + assert newLast instanceof ObjectMapper; + parentMappers.add((ObjectMapper)newLast); + ++keepBefore; + } + + if (newMapper instanceof ObjectMapper) { + parentMappers.add((ObjectMapper)newMapper); + } else { + addToLastMapper(parentMappers, newMapper); + } + } + popMappers(parentMappers, 1); + assert parentMappers.size() == 1; + + return mapping.mappingUpdate(parentMappers.get(0)); + } + + private static void popMappers(List parentMappers, int keepBefore) { + assert keepBefore >= 1; // never remove the root mapper + // pop off parent mappers not needed by the current mapper, + // merging them backwards since they are immutable + for (int i = parentMappers.size() - 1; i >= keepBefore; --i) { + addToLastMapper(parentMappers, parentMappers.remove(i)); + } + } + + private static void addToLastMapper(List parentMappers, Mapper mapper) { + assert parentMappers.size() >= 1; + int lastIndex = parentMappers.size() - 1; + ObjectMapper withNewMapper = parentMappers.get(lastIndex).mappingUpdate(mapper); + ObjectMapper merged = parentMappers.get(lastIndex).merge(withNewMapper, false); + parentMappers.set(lastIndex, merged); + } + + static void parseObjectOrNested(ParseContext context, ObjectMapper mapper, boolean atRoot) throws IOException { if (mapper.isEnabled() == false) { context.parser().skipChildren(); - return null; + return; } XContentParser parser = context.parser(); @@ -205,7 +278,7 @@ class DocumentParser implements Closeable { XContentParser.Token token = parser.currentToken(); if (token == XContentParser.Token.VALUE_NULL) { // the object is null ("obj1" : null), simply bail - return null; + return; } if (token.isValue()) { @@ -245,9 +318,9 @@ class DocumentParser implements Closeable { while (token != XContentParser.Token.END_OBJECT) { ObjectMapper newUpdate = null; if (token == XContentParser.Token.START_OBJECT) { - newUpdate = parseObject(context, mapper, currentFieldName); + parseObject(context, mapper, currentFieldName); } else if (token == XContentParser.Token.START_ARRAY) { - newUpdate = parseArray(context, mapper, currentFieldName); + parseArray(context, mapper, currentFieldName); } else if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_NULL) { @@ -255,7 +328,7 @@ class DocumentParser implements Closeable { } else if (token == null) { throw new MapperParsingException("object mapping for [" + mapper.name() + "] tried to parse field [" + currentFieldName + "] as object, but got EOF, has a concrete value been provided to it?"); } else if (token.isValue()) { - newUpdate = parseValue(context, mapper, currentFieldName, token); + parseValue(context, mapper, currentFieldName, token); } token = parser.nextToken(); if (newUpdate != null) { @@ -293,36 +366,31 @@ class DocumentParser implements Closeable { } } } - return update; } - private static Mapper parseObjectOrField(ParseContext context, Mapper mapper) throws IOException { + private static void parseObjectOrField(ParseContext context, Mapper mapper) throws IOException { if (mapper instanceof ObjectMapper) { - return parseObject(context, (ObjectMapper) mapper, false); + parseObjectOrNested(context, (ObjectMapper) mapper, false); } else { FieldMapper fieldMapper = (FieldMapper)mapper; Mapper update = fieldMapper.parse(context); + if (update != null) { + context.addDynamicMapper(update); + } if (fieldMapper.copyTo() != null) { parseCopyFields(context, fieldMapper, fieldMapper.copyTo().copyToFields()); } - return update; } } private static ObjectMapper parseObject(final ParseContext context, ObjectMapper mapper, String currentFieldName) throws IOException { - if (currentFieldName == null) { - throw new MapperParsingException("object mapping [" + mapper.name() + "] trying to serialize an object with no field associated with it, current value [" + context.parser().textOrNull() + "]"); - } + assert currentFieldName != null; context.path().add(currentFieldName); ObjectMapper update = null; Mapper objectMapper = mapper.getMapper(currentFieldName); if (objectMapper != null) { - final Mapper subUpdate = parseObjectOrField(context, objectMapper); - if (subUpdate != null) { - // propagate mapping update - update = mapper.mappingUpdate(subUpdate); - } + parseObjectOrField(context, objectMapper); } else { ObjectMapper.Dynamic dynamic = mapper.dynamic(); if (dynamic == null) { @@ -343,8 +411,9 @@ class DocumentParser implements Closeable { } Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); objectMapper = builder.build(builderContext); + context.addDynamicMapper(objectMapper); context.path().add(currentFieldName); - update = mapper.mappingUpdate(parseAndMergeUpdate(objectMapper, context)); + parseObjectOrField(context, objectMapper); } else { // not dynamic, read everything up to end object context.parser().skipChildren(); @@ -355,7 +424,7 @@ class DocumentParser implements Closeable { return update; } - private static ObjectMapper parseArray(ParseContext context, ObjectMapper parentMapper, String lastFieldName) throws IOException { + private static void parseArray(ParseContext context, ObjectMapper parentMapper, String lastFieldName) throws IOException { String arrayFieldName = lastFieldName; Mapper mapper = parentMapper.getMapper(lastFieldName); if (mapper != null) { @@ -363,15 +432,9 @@ class DocumentParser implements Closeable { // expects an array, if so we pass the context straight to the mapper and if not // we serialize the array components if (mapper instanceof ArrayValueMapperParser) { - final Mapper subUpdate = parseObjectOrField(context, mapper); - if (subUpdate != null) { - // propagate the mapping update - return parentMapper.mappingUpdate(subUpdate); - } else { - return null; - } + parseObjectOrField(context, mapper); } else { - return parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName); + parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName); } } else { @@ -384,31 +447,35 @@ class DocumentParser implements Closeable { } else if (dynamic == ObjectMapper.Dynamic.TRUE) { Mapper.Builder builder = context.root().findTemplateBuilder(context, arrayFieldName, "object"); if (builder == null) { - return parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName); + // TODO: shouldn't this create a default object mapper builder? + parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName); + return; } Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); mapper = builder.build(builderContext); - if (mapper != null && mapper instanceof ArrayValueMapperParser) { + assert mapper != null; + if (mapper instanceof ArrayValueMapperParser) { + context.addDynamicMapper(mapper); context.path().add(arrayFieldName); - mapper = parseAndMergeUpdate(mapper, context); - return parentMapper.mappingUpdate(mapper); + parseObjectOrField(context, mapper); } else { - return parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName); + parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName); } } else { - return parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName); + // TODO: shouldn't this skip, not parse? + parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName); } } } - private static ObjectMapper parseNonDynamicArray(ParseContext context, ObjectMapper mapper, String lastFieldName, String arrayFieldName) throws IOException { + private static void parseNonDynamicArray(ParseContext context, ObjectMapper mapper, String lastFieldName, String arrayFieldName) throws IOException { XContentParser parser = context.parser(); XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.START_OBJECT) { - return parseObject(context, mapper, lastFieldName); + parseObject(context, mapper, lastFieldName); } else if (token == XContentParser.Token.START_ARRAY) { - return parseArray(context, mapper, lastFieldName); + parseArray(context, mapper, lastFieldName); } else if (token == XContentParser.Token.FIELD_NAME) { lastFieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_NULL) { @@ -416,25 +483,20 @@ class DocumentParser implements Closeable { } else if (token == null) { throw new MapperParsingException("object mapping for [" + mapper.name() + "] with array for [" + arrayFieldName + "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?"); } else { - return parseValue(context, mapper, lastFieldName, token); + parseValue(context, mapper, lastFieldName, token); } } - return null; } - private static ObjectMapper parseValue(final ParseContext context, ObjectMapper parentMapper, String currentFieldName, XContentParser.Token token) throws IOException { + private static void parseValue(final ParseContext context, ObjectMapper parentMapper, String currentFieldName, XContentParser.Token token) throws IOException { if (currentFieldName == null) { throw new MapperParsingException("object mapping [" + parentMapper.name() + "] trying to serialize a value with no field associated with it, current value [" + context.parser().textOrNull() + "]"); } Mapper mapper = parentMapper.getMapper(currentFieldName); if (mapper != null) { - Mapper subUpdate = parseObjectOrField(context, mapper); - if (subUpdate == null) { - return null; - } - return parentMapper.mappingUpdate(subUpdate); + parseObjectOrField(context, mapper); } else { - return parseDynamicValue(context, parentMapper, currentFieldName, token); + parseDynamicValue(context, parentMapper, currentFieldName, token); } } @@ -602,7 +664,7 @@ class DocumentParser implements Closeable { throw new IllegalStateException("Can't handle serializing a dynamic type with content token [" + token + "] and field name [" + currentFieldName + "]"); } - private static ObjectMapper parseDynamicValue(final ParseContext context, ObjectMapper parentMapper, String currentFieldName, XContentParser.Token token) throws IOException { + private static void parseDynamicValue(final ParseContext context, ObjectMapper parentMapper, String currentFieldName, XContentParser.Token token) throws IOException { ObjectMapper.Dynamic dynamic = parentMapper.dynamic(); if (dynamic == null) { dynamic = dynamicOrDefault(context.root().dynamic()); @@ -611,7 +673,7 @@ class DocumentParser implements Closeable { throw new StrictDynamicMappingException(parentMapper.fullPath(), currentFieldName); } if (dynamic == ObjectMapper.Dynamic.FALSE) { - return null; + return; } final String path = context.path().pathAsText(currentFieldName); final Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); @@ -629,14 +691,9 @@ class DocumentParser implements Closeable { // try to not introduce a conflict mapper = mapper.updateFieldType(Collections.singletonMap(path, existingFieldType)); } + context.addDynamicMapper(mapper); - mapper = parseAndMergeUpdate(mapper, context); - - ObjectMapper update = null; - if (mapper != null) { - update = parentMapper.mappingUpdate(mapper); - } - return update; + parseObjectOrField(context, mapper); } /** Creates instances of the fields that the current field should be copied to */ @@ -674,8 +731,9 @@ class DocumentParser implements Closeable { // The path of the dest field might be completely different from the current one so we need to reset it context = context.overridePath(new ContentPath(0)); - String[] paths = Strings.splitStringToArray(field, '.'); - String fieldName = paths[paths.length-1]; + // TODO: why Strings.splitStringToArray instead of String.split? + final String[] paths = Strings.splitStringToArray(field, '.'); + final String fieldName = paths[paths.length-1]; ObjectMapper mapper = context.root(); ObjectMapper[] mappers = new ObjectMapper[paths.length-1]; if (paths.length > 1) { @@ -706,6 +764,7 @@ class DocumentParser implements Closeable { if (mapper.nested() != ObjectMapper.Nested.NO) { throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().pathAsText(paths[i]) + "]) through `copy_to`"); } + context.addDynamicMapper(mapper); break; case FALSE: // Maybe we should log something to tell the user that the copy_to is ignored in this case. @@ -720,36 +779,10 @@ class DocumentParser implements Closeable { parent = mapper; } } - ObjectMapper update = parseDynamicValue(context, mapper, fieldName, context.parser().currentToken()); - assert update != null; // we are parsing a dynamic value so we necessarily created a new mapping - - if (paths.length > 1) { - for (int i = paths.length - 2; i >= 0; i--) { - ObjectMapper parent = context.root(); - if (i > 0) { - parent = mappers[i-1]; - } - assert parent != null; - update = parent.mappingUpdate(update); - } - } - context.addDynamicMappingsUpdate(update); + parseDynamicValue(context, mapper, fieldName, context.parser().currentToken()); } } - /** - * Parse the given {@code context} with the given {@code mapper} and apply - * the potential mapping update in-place. This method is useful when - * composing mapping updates. - */ - private static M parseAndMergeUpdate(M mapper, ParseContext context) throws IOException { - final Mapper update = parseObjectOrField(context, mapper); - if (update != null) { - mapper = (M) mapper.merge(update, false); - } - return mapper; - } - private static ObjectMapper.Dynamic dynamicOrDefault(ObjectMapper.Dynamic dynamic) { return dynamic == null ? ObjectMapper.Dynamic.TRUE : dynamic; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java b/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java index 4dd43db05176..6a9a402a5ff4 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java @@ -76,6 +76,7 @@ public abstract class Mapper implements ToXContent, Iterable { return this.name; } + /** Returns a newly built mapper. */ public abstract Y build(BuilderContext context); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java b/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java index 938dd778b0e5..4b5372271b67 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java @@ -337,13 +337,13 @@ public abstract class ParseContext { } @Override - public void addDynamicMappingsUpdate(Mapper update) { - in.addDynamicMappingsUpdate(update); + public void addDynamicMapper(Mapper update) { + in.addDynamicMapper(update); } @Override - public Mapper dynamicMappingsUpdate() { - return in.dynamicMappingsUpdate(); + public List getDynamicMappers() { + return in.getDynamicMappers(); } } @@ -377,7 +377,7 @@ public abstract class ParseContext { private float docBoost = 1.0f; - private Mapper dynamicMappingsUpdate = null; + private List dynamicMappers = new ArrayList<>(); public InternalParseContext(@Nullable Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper, ContentPath path) { this.indexSettings = indexSettings; @@ -403,7 +403,7 @@ public abstract class ParseContext { this.path.reset(); this.allEntries = new AllEntries(); this.docBoost = 1.0f; - this.dynamicMappingsUpdate = null; + this.dynamicMappers = new ArrayList<>(); } @Override @@ -555,18 +555,13 @@ public abstract class ParseContext { } @Override - public void addDynamicMappingsUpdate(Mapper mapper) { - assert mapper instanceof RootObjectMapper : mapper; - if (dynamicMappingsUpdate == null) { - dynamicMappingsUpdate = mapper; - } else { - dynamicMappingsUpdate = dynamicMappingsUpdate.merge(mapper, false); - } + public void addDynamicMapper(Mapper mapper) { + dynamicMappers.add(mapper); } @Override - public Mapper dynamicMappingsUpdate() { - return dynamicMappingsUpdate; + public List getDynamicMappers() { + return dynamicMappers; } } @@ -770,12 +765,12 @@ public abstract class ParseContext { public abstract StringBuilder stringBuilder(); /** - * Add a dynamic update to the root object mapper. + * Add a new mapper dynamically created while parsing. */ - public abstract void addDynamicMappingsUpdate(Mapper update); + public abstract void addDynamicMapper(Mapper update); /** - * Get dynamic updates to the root object mapper. + * Get dynamic mappers created while parsing. */ - public abstract Mapper dynamicMappingsUpdate(); + public abstract List getDynamicMappers(); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index 3206a5e87aee..48684d50399a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -19,12 +19,20 @@ package org.elasticsearch.index.mapper; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.internal.UidFieldMapper; +import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.test.ESSingleNodeTestCase; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; + // TODO: make this a real unit test public class DocumentParserTests extends ESSingleNodeTestCase { @@ -61,4 +69,120 @@ public class DocumentParserTests extends ESSingleNodeTestCase { assertNotNull(doc.rootDoc().getField("bar")); assertNotNull(doc.rootDoc().getField(UidFieldMapper.NAME)); } + + public void testDotsAsObject() throws Exception { + DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + .startObject("foo").startObject("properties") + .startObject("bar").startObject("properties") + .startObject("baz").field("type", "integer") + .endObject().endObject().endObject().endObject().endObject().endObject().endObject().endObject().string(); + DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); + + BytesReference bytes = XContentFactory.jsonBuilder() + .startObject() + .field("foo.bar.baz", 123) + .startObject("foo") + .field("bar.baz", 456) + .endObject() + .startObject("foo.bar") + .field("baz", 789) + .endObject() + .endObject().bytes(); + ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + String[] values = doc.rootDoc().getValues("foo.bar.baz"); + assertEquals(3, values.length); + assertEquals("123", values[0]); + assertEquals("456", values[1]); + assertEquals("789", values[2]); + } + + DocumentMapper createDummyMapping(MapperService mapperService) throws Exception { + String mapping = jsonBuilder().startObject().startObject("type").startObject("properties") + .startObject("a").startObject("properties") + .startObject("b").field("type", "object") + .endObject().endObject().endObject().endObject().endObject().endObject().string(); + + DocumentMapper defaultMapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping)); + return defaultMapper; + } + + // creates an object mapper, which is about 100x harder than it should be.... + ObjectMapper createObjectMapper(MapperService mapperService, String name) throws Exception { + String[] nameParts = name.split("\\."); + ContentPath path = new ContentPath(); + for (int i = 0; i < nameParts.length - 1; ++i) { + path.add(nameParts[i]); + } + ParseContext context = new ParseContext.InternalParseContext(Settings.EMPTY, + mapperService.documentMapperParser(), mapperService.documentMapper("type"), path); + Mapper.Builder builder = new ObjectMapper.Builder(nameParts[nameParts.length - 1]).enabled(true); + Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); + return (ObjectMapper)builder.build(builderContext); + } + + public void testEmptyMappingUpdate() throws Exception { + DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService()); + assertNull(DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, Collections.emptyList())); + } + + public void testSingleMappingUpdate() throws Exception { + DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService()); + List updates = Collections.singletonList(new MockFieldMapper("foo")); + Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates); + assertNotNull(mapping.root().getMapper("foo")); + } + + public void testSubfieldMappingUpdate() throws Exception { + DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService()); + List updates = Collections.singletonList(new MockFieldMapper("a.foo")); + Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates); + Mapper aMapper = mapping.root().getMapper("a"); + assertNotNull(aMapper); + assertTrue(aMapper instanceof ObjectMapper); + assertNotNull(((ObjectMapper)aMapper).getMapper("foo")); + } + + public void testMultipleSubfieldMappingUpdate() throws Exception { + DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService()); + List updates = new ArrayList<>(); + updates.add(new MockFieldMapper("a.foo")); + updates.add(new MockFieldMapper("a.bar")); + Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates); + Mapper aMapper = mapping.root().getMapper("a"); + assertNotNull(aMapper); + assertTrue(aMapper instanceof ObjectMapper); + assertNotNull(((ObjectMapper)aMapper).getMapper("foo")); + assertNotNull(((ObjectMapper)aMapper).getMapper("bar")); + } + + public void testDeepSubfieldMappingUpdate() throws Exception { + DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService()); + List updates = Collections.singletonList(new MockFieldMapper("a.b.foo")); + Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates); + Mapper aMapper = mapping.root().getMapper("a"); + assertNotNull(aMapper); + assertTrue(aMapper instanceof ObjectMapper); + Mapper bMapper = ((ObjectMapper)aMapper).getMapper("b"); + assertTrue(bMapper instanceof ObjectMapper); + assertNotNull(((ObjectMapper)bMapper).getMapper("foo")); + } + + public void testObjectMappingUpdate() throws Exception { + MapperService mapperService = createIndex("test").mapperService(); + DocumentMapper docMapper = createDummyMapping(mapperService); + List updates = new ArrayList<>(); + updates.add(createObjectMapper(mapperService, "foo")); + updates.add(createObjectMapper(mapperService, "foo.bar")); + updates.add(new MockFieldMapper("foo.bar.baz")); + updates.add(new MockFieldMapper("foo.field")); + Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates); + Mapper fooMapper = mapping.root().getMapper("foo"); + assertNotNull(fooMapper); + assertTrue(fooMapper instanceof ObjectMapper); + assertNotNull(((ObjectMapper)fooMapper).getMapper("field")); + Mapper barMapper = ((ObjectMapper)fooMapper).getMapper("bar"); + assertTrue(barMapper instanceof ObjectMapper); + assertNotNull(((ObjectMapper)barMapper).getMapper("baz")); + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index 0931120c177c..825487ce419f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -42,6 +42,7 @@ import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; +import java.util.List; import static java.util.Collections.emptyMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -211,7 +212,9 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { ctx.reset(XContentHelper.createParser(source.source()), new ParseContext.Document(), source); assertEquals(XContentParser.Token.START_OBJECT, ctx.parser().nextToken()); ctx.parser().nextToken(); - return DocumentParser.parseObject(ctx, mapper.root(), true); + DocumentParser.parseObjectOrNested(ctx, mapper.root(), true); + Mapping mapping = DocumentParser.createDynamicUpdate(mapper.mapping(), mapper, ctx.getDynamicMappers()); + return mapping == null ? null : mapping.root(); } public void testDynamicMappingsNotNeeded() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java index c5dbd653bfe3..cb9a64d357cd 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java @@ -19,12 +19,8 @@ package org.elasticsearch.index.mapper; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -59,7 +55,7 @@ public class FieldTypeLookupTests extends ESTestCase { public void testAddNewField() { FieldTypeLookup lookup = new FieldTypeLookup(); - FakeFieldMapper f = new FakeFieldMapper("foo"); + MockFieldMapper f = new MockFieldMapper("foo"); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type", newList(f), randomBoolean()); assertNull(lookup.get("foo")); assertNull(lookup.get("bar")); @@ -73,8 +69,8 @@ public class FieldTypeLookupTests extends ESTestCase { } public void testAddExistingField() { - FakeFieldMapper f = new FakeFieldMapper("foo"); - FakeFieldMapper f2 = new FakeFieldMapper("foo"); + MockFieldMapper f = new MockFieldMapper("foo"); + MockFieldMapper f2 = new MockFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean()); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); @@ -84,8 +80,8 @@ public class FieldTypeLookupTests extends ESTestCase { } public void testAddExistingIndexName() { - FakeFieldMapper f = new FakeFieldMapper("foo"); - FakeFieldMapper f2 = new FakeFieldMapper("bar"); + MockFieldMapper f = new MockFieldMapper("foo"); + MockFieldMapper f2 = new MockFieldMapper("bar"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean()); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); @@ -96,8 +92,8 @@ public class FieldTypeLookupTests extends ESTestCase { } public void testAddExistingFullName() { - FakeFieldMapper f = new FakeFieldMapper("foo"); - FakeFieldMapper f2 = new FakeFieldMapper("foo"); + MockFieldMapper f = new MockFieldMapper("foo"); + MockFieldMapper f2 = new MockFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); try { lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); @@ -107,12 +103,13 @@ public class FieldTypeLookupTests extends ESTestCase { } public void testCheckCompatibilityMismatchedTypes() { - FieldMapper f1 = new FakeFieldMapper("foo"); + FieldMapper f1 = new MockFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); - MappedFieldType ft2 = FakeFieldMapper.makeOtherFieldType("foo"); - FieldMapper f2 = new FakeFieldMapper("foo", ft2); + OtherFakeFieldType ft2 = new OtherFakeFieldType(); + ft2.setName("foo"); + FieldMapper f2 = new MockFieldMapper("foo", ft2); try { lookup.copyAndAddAll("type2", newList(f2), false); fail("expected type mismatch"); @@ -129,13 +126,14 @@ public class FieldTypeLookupTests extends ESTestCase { } public void testCheckCompatibilityConflict() { - FieldMapper f1 = new FakeFieldMapper("foo"); + FieldMapper f1 = new MockFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); - MappedFieldType ft2 = FakeFieldMapper.makeFieldType("foo"); + MappedFieldType ft2 = new MockFieldMapper.FakeFieldType(); + ft2.setName("foo"); ft2.setBoost(2.0f); - FieldMapper f2 = new FakeFieldMapper("foo", ft2); + FieldMapper f2 = new MockFieldMapper("foo", ft2); try { // different type lookup.copyAndAddAll("type2", newList(f2), false); @@ -146,9 +144,10 @@ public class FieldTypeLookupTests extends ESTestCase { lookup.copyAndAddAll("type", newList(f2), false); // boost is updateable, so ok since we are implicitly updating all types lookup.copyAndAddAll("type2", newList(f2), true); // boost is updateable, so ok if forcing // now with a non changeable setting - MappedFieldType ft3 = FakeFieldMapper.makeFieldType("foo"); + MappedFieldType ft3 = new MockFieldMapper.FakeFieldType(); + ft3.setName("foo"); ft3.setStored(true); - FieldMapper f3 = new FakeFieldMapper("foo", ft3); + FieldMapper f3 = new MockFieldMapper("foo", ft3); try { lookup.copyAndAddAll("type2", newList(f3), false); fail("expected conflict"); @@ -165,8 +164,8 @@ public class FieldTypeLookupTests extends ESTestCase { } public void testSimpleMatchFullNames() { - FakeFieldMapper f1 = new FakeFieldMapper("foo"); - FakeFieldMapper f2 = new FakeFieldMapper("bar"); + MockFieldMapper f1 = new MockFieldMapper("foo"); + MockFieldMapper f2 = new MockFieldMapper("bar"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type", newList(f1, f2), randomBoolean()); Collection names = lookup.simpleMatchToFullName("b*"); @@ -175,7 +174,7 @@ public class FieldTypeLookupTests extends ESTestCase { } public void testIteratorImmutable() { - FakeFieldMapper f1 = new FakeFieldMapper("foo"); + MockFieldMapper f1 = new MockFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); @@ -194,59 +193,6 @@ public class FieldTypeLookupTests extends ESTestCase { return Arrays.asList(mapper); } - // this sucks how much must be overridden just do get a dummy field mapper... - static class FakeFieldMapper extends FieldMapper { - static Settings dummySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); - public FakeFieldMapper(String fullName) { - super(fullName, makeFieldType(fullName), makeFieldType(fullName), dummySettings, null, null); - } - public FakeFieldMapper(String fullName, MappedFieldType fieldType) { - super(fullName, fieldType, fieldType, dummySettings, null, null); - } - static MappedFieldType makeFieldType(String fullName) { - FakeFieldType fieldType = new FakeFieldType(); - fieldType.setName(fullName); - return fieldType; - } - static MappedFieldType makeOtherFieldType(String fullName) { - OtherFakeFieldType fieldType = new OtherFakeFieldType(); - fieldType.setName(fullName); - return fieldType; - } - static class FakeFieldType extends MappedFieldType { - public FakeFieldType() {} - protected FakeFieldType(FakeFieldType ref) { - super(ref); - } - @Override - public MappedFieldType clone() { - return new FakeFieldType(this); - } - @Override - public String typeName() { - return "faketype"; - } - } - static class OtherFakeFieldType extends MappedFieldType { - public OtherFakeFieldType() {} - protected OtherFakeFieldType(OtherFakeFieldType ref) { - super(ref); - } - @Override - public MappedFieldType clone() { - return new OtherFakeFieldType(this); - } - @Override - public String typeName() { - return "otherfaketype"; - } - } - @Override - protected String contentType() { return null; } - @Override - protected void parseCreateField(ParseContext context, List list) throws IOException {} - } - private int size(Iterator iterator) { if (iterator == null) { throw new NullPointerException("iterator"); @@ -258,4 +204,23 @@ public class FieldTypeLookupTests extends ESTestCase { } return count; } + + static class OtherFakeFieldType extends MappedFieldType { + public OtherFakeFieldType() { + } + + protected OtherFakeFieldType(OtherFakeFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new OtherFakeFieldType(this); + } + + @Override + public String typeName() { + return "otherfaketype"; + } + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/object/SimpleObjectMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/object/SimpleObjectMappingTests.java index 96d5559f4576..907616712a20 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/object/SimpleObjectMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/object/SimpleObjectMappingTests.java @@ -28,32 +28,28 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import static org.hamcrest.Matchers.containsString; -/** - */ public class SimpleObjectMappingTests extends ESSingleNodeTestCase { public void testDifferentInnerObjectTokenFailure() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - try { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { defaultMapper.parse("test", "type", "1", new BytesArray(" {\n" + - " \"object\": {\n" + - " \"array\":[\n" + - " {\n" + - " \"object\": { \"value\": \"value\" }\n" + - " },\n" + - " {\n" + - " \"object\":\"value\"\n" + - " }\n" + - " ]\n" + - " },\n" + - " \"value\":\"value\"\n" + - " }")); - fail(); - } catch (MapperParsingException e) { - // all is well - } + " \"object\": {\n" + + " \"array\":[\n" + + " {\n" + + " \"object\": { \"value\": \"value\" }\n" + + " },\n" + + " {\n" + + " \"object\":\"value\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"value\":\"value\"\n" + + " }")); + }); + assertTrue(e.getMessage(), e.getMessage().contains("different type")); } public void testEmptyArrayProperties() throws Exception { diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java new file mode 100644 index 000000000000..fec3f312e5c5 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import java.io.IOException; +import java.util.List; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; + +// this sucks how much must be overridden just do get a dummy field mapper... +public class MockFieldMapper extends FieldMapper { + static Settings dummySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + + public MockFieldMapper(String fullName) { + this(fullName, new FakeFieldType()); + } + + public MockFieldMapper(String fullName, MappedFieldType fieldType) { + super(findSimpleName(fullName), setName(fullName, fieldType), setName(fullName, fieldType), dummySettings, null, null); + } + + static MappedFieldType setName(String fullName, MappedFieldType fieldType) { + fieldType.setName(fullName); + return fieldType; + } + + static String findSimpleName(String fullName) { + int ndx = fullName.lastIndexOf('.'); + return fullName.substring(ndx + 1); + } + + static class FakeFieldType extends MappedFieldType { + public FakeFieldType() { + } + + protected FakeFieldType(FakeFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new FakeFieldType(this); + } + + @Override + public String typeName() { + return "faketype"; + } + } + + @Override + protected String contentType() { + return null; + } + + @Override + protected void parseCreateField(ParseContext context, List list) throws IOException { + } +} From 08905be2cacca70bc4d4ae85408c0b07b45c4e56 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Wed, 24 Feb 2016 14:05:09 -0800 Subject: [PATCH 027/320] Revert javadoc change: iff is correct --- .../main/java/org/elasticsearch/common/settings/Setting.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index e1349cc2a009..1e0e2190fcc9 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -78,7 +78,7 @@ public class Setting extends ToXContentToBytes { * @param key the settings key for this setting. * @param defaultValue a default value function that returns the default values string representation. * @param parser a parser that parses the string rep into a complex datatype. - * @param dynamic true if this setting can be dynamically updateable + * @param dynamic true iff this setting can be dynamically updateable * @param scope the scope of this setting */ public Setting(String key, Function defaultValue, Function parser, boolean dynamic, Scope scope) { @@ -90,7 +90,7 @@ public class Setting extends ToXContentToBytes { * @param key the settings key for this setting. * @param defaultValue a default value function that returns the default values string representation. * @param parser a parser that parses the string rep into a complex datatype. - * @param dynamic true if this setting can be dynamically updateable + * @param dynamic true iff this setting can be dynamically updateable * @param scope the scope of this setting * @param filtered true if this setting should be filtered */ From f953d34cba0fdba7a59a92ae3ff76fce0c64c1b2 Mon Sep 17 00:00:00 2001 From: "George P. Stathis" Date: Thu, 25 Feb 2016 01:35:16 -0500 Subject: [PATCH 028/320] Adds unit tests for #16594 and removes prior integration tests. Throws exception on redefining built-in similarities only for indices created on or after v3. --- .../index/similarity/SimilarityService.java | 9 +++- .../similarity/SimilarityServiceTests.java | 49 +++++++++++++++++++ .../similarity/SimilarityIT.java | 43 ---------------- 3 files changed, 57 insertions(+), 44 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java diff --git a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java index f5557b6ad4dd..7ad7dfad3a06 100644 --- a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.similarity; import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper; import org.apache.lucene.search.similarities.Similarity; +import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexModule; @@ -63,8 +64,10 @@ public final class SimilarityService extends AbstractIndexComponent { Map similaritySettings = this.indexSettings.getSettings().getGroups(IndexModule.SIMILARITY_SETTINGS_PREFIX); for (Map.Entry entry : similaritySettings.entrySet()) { String name = entry.getKey(); - if(BUILT_IN.containsKey(name)) + // Starting with v3.0 indices, it should no longer be possible to redefine built-in similarities + if(BUILT_IN.containsKey(name) && indexSettings.getIndexVersionCreated().onOrAfter(Version.V_3_0_0)) { throw new IllegalArgumentException("Cannot redefine built-in Similarity [" + name + "]"); + } Settings settings = entry.getValue(); String typeName = settings.get("type"); if (typeName == null) { @@ -109,6 +112,10 @@ public final class SimilarityService extends AbstractIndexComponent { return similarities.get(name); } + public SimilarityProvider getDefaultSimilarity() { + return similarities.get("default"); + } + static class PerFieldSimilarity extends PerFieldSimilarityWrapper { private final Similarity defaultSimilarity; diff --git a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java new file mode 100644 index 000000000000..0ec7dc5d64de --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java @@ -0,0 +1,49 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.similarity; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.IndexSettingsModule; + +import java.util.Collections; + +public class SimilarityServiceTests extends ESTestCase { + + // Tests #16594 + public void testOverrideBuiltInSimilarity() { + Settings settings = Settings.builder().put("index.similarity.BM25.type", "classic").build(); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings); + try { + new SimilarityService(indexSettings, Collections.emptyMap()); + fail("can't override bm25"); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Cannot redefine built-in Similarity [BM25]"); + } + } + + // Tests #16594 + public void testDefaultSimilarity() { + Settings settings = Settings.builder().put("index.similarity.default.type", "BM25").build(); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings); + SimilarityService service = new SimilarityService(indexSettings, Collections.emptyMap()); + assertTrue(service.getDefaultSimilarity() instanceof BM25SimilarityProvider); + } +} diff --git a/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java b/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java index badae75ae445..f6fa1fc621fa 100644 --- a/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java +++ b/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java @@ -72,47 +72,4 @@ public class SimilarityIT extends ESIntegTestCase { assertThat(bm25Score, not(equalTo(defaultScore))); } - - // Tests #16594 - public void testCustomBM25SimilarityAsDefault() throws Exception { - try { - client().admin().indices().prepareDelete("test").execute().actionGet(); - } catch (Exception e) { - // ignore - } - - client().admin().indices().prepareCreate("test") - .addMapping("type1", jsonBuilder().startObject() - .startObject("type1") - .startObject("properties") - .startObject("field1") - .field("type", "string") - .endObject() - .startObject("field2") - .field("similarity", "custom") - .field("type", "string") - .endObject() - .endObject() - .endObject()) - .setSettings(Settings.settingsBuilder() - .put("index.number_of_shards", 1) - .put("index.number_of_replicas", 0) - .put("similarity.default.type", "BM25") - .put("similarity.custom.type", "classic") - ).execute().actionGet(); - - client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox jumped over the lazy dog", - "field2", "the quick brown fox jumped over the lazy dog") - .setRefresh(true).execute().actionGet(); - - SearchResponse bm25SearchResponse = client().prepareSearch().setQuery(matchQuery("field1", "quick brown fox")).execute().actionGet(); - assertThat(bm25SearchResponse.getHits().totalHits(), equalTo(1L)); - float bm25Score = bm25SearchResponse.getHits().hits()[0].score(); - - SearchResponse defaultSearchResponse = client().prepareSearch().setQuery(matchQuery("field2", "quick brown fox")).execute().actionGet(); - assertThat(defaultSearchResponse.getHits().totalHits(), equalTo(1L)); - float defaultScore = defaultSearchResponse.getHits().hits()[0].score(); - - assertThat(bm25Score, not(equalTo(defaultScore))); - } } From 31b5e0888f4414b6ba863778436e177b6e82eea5 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Sun, 28 Feb 2016 00:40:00 +0100 Subject: [PATCH 029/320] Use an SettingsProperty enumSet Instead of modifying methods each time we need to add a new behavior for settings, we can simply pass `SettingsProperty... properties` instead. `SettingsProperty` could be defined then: ``` public enum SettingsProperty { Filtered, Dynamic, ClusterScope, NodeScope, IndexScope // HereGoesYours; } ``` Then in setting code, it become much more flexible. TODO: Note that we need to validate SettingsProperty which are added to a Setting as some of them might be mutually exclusive. --- .../close/TransportCloseIndexAction.java | 4 +- .../action/support/AutoCreateIndex.java | 4 +- .../action/support/DestructiveOperations.java | 4 +- .../master/TransportMasterNodeReadAction.java | 4 +- .../bootstrap/BootstrapSettings.java | 13 +- .../cache/recycler/PageCacheRecycler.java | 19 +- .../java/org/elasticsearch/client/Client.java | 3 +- .../TransportClientNodesService.java | 13 +- .../elasticsearch/cluster/ClusterModule.java | 4 +- .../elasticsearch/cluster/ClusterName.java | 3 +- .../cluster/InternalClusterInfoService.java | 8 +- .../action/index/MappingUpdatedAction.java | 4 +- .../cluster/metadata/AutoExpandReplicas.java | 3 +- .../cluster/metadata/IndexMetaData.java | 43 ++-- .../cluster/metadata/MetaData.java | 4 +- .../cluster/routing/UnassignedInfo.java | 5 +- .../allocator/BalancedShardsAllocator.java | 10 +- .../decider/AwarenessAllocationDecider.java | 8 +- .../ClusterRebalanceAllocationDecider.java | 5 +- .../ConcurrentRebalanceAllocationDecider.java | 4 +- .../decider/DiskThresholdDecider.java | 17 +- .../decider/EnableAllocationDecider.java | 13 +- .../decider/FilterAllocationDecider.java | 10 +- .../decider/ShardsLimitAllocationDecider.java | 7 +- .../SnapshotInProgressAllocationDecider.java | 4 +- .../decider/ThrottlingAllocationDecider.java | 22 +- .../service/InternalClusterService.java | 9 +- .../common/logging/ESLoggerFactory.java | 7 +- .../common/network/NetworkModule.java | 12 +- .../common/network/NetworkService.java | 42 ++-- .../settings/AbstractScopedSettings.java | 16 +- .../common/settings/ClusterSettings.java | 3 +- .../common/settings/IndexScopedSettings.java | 9 +- .../common/settings/Setting.java | 233 +++++++++--------- .../common/settings/SettingsModule.java | 34 ++- .../common/util/concurrent/EsExecutors.java | 4 +- .../common/util/concurrent/ThreadContext.java | 4 +- .../discovery/DiscoveryModule.java | 10 +- .../discovery/DiscoveryService.java | 8 +- .../discovery/DiscoverySettings.java | 15 +- .../discovery/zen/ZenDiscovery.java | 34 ++- .../zen/elect/ElectMasterService.java | 4 +- .../discovery/zen/fd/FaultDetection.java | 17 +- .../zen/ping/unicast/UnicastZenPing.java | 8 +- .../org/elasticsearch/env/Environment.java | 22 +- .../elasticsearch/env/NodeEnvironment.java | 8 +- .../elasticsearch/gateway/GatewayService.java | 29 +-- .../gateway/PrimaryShardAllocator.java | 9 +- .../http/HttpTransportSettings.java | 65 +++-- .../http/netty/NettyHttpServerTransport.java | 56 ++--- .../org/elasticsearch/index/IndexModule.java | 10 +- .../elasticsearch/index/IndexSettings.java | 48 ++-- .../org/elasticsearch/index/IndexWarmer.java | 3 +- .../elasticsearch/index/IndexingSlowLog.java | 26 +- .../index/MergePolicyConfig.java | 29 ++- .../index/MergeSchedulerConfig.java | 15 +- .../elasticsearch/index/SearchSlowLog.java | 40 ++- .../index/analysis/NamedAnalyzer.java | 2 +- .../index/analysis/NumericDoubleAnalyzer.java | 2 +- .../index/analysis/NumericFloatAnalyzer.java | 2 +- .../index/analysis/NumericLongAnalyzer.java | 2 +- .../index/cache/bitset/BitsetFilterCache.java | 4 +- .../index/engine/EngineConfig.java | 3 +- .../fielddata/IndexFieldDataService.java | 3 +- .../index/mapper/FieldMapper.java | 7 +- .../index/mapper/MapperService.java | 7 +- .../index/mapper/core/NumberFieldMapper.java | 5 +- .../percolator/PercolatorQueriesRegistry.java | 4 +- .../index/store/FsDirectoryService.java | 3 +- .../elasticsearch/index/store/IndexStore.java | 7 +- .../index/store/IndexStoreConfig.java | 8 +- .../org/elasticsearch/index/store/Store.java | 4 +- .../indices/IndicesQueryCache.java | 7 +- .../indices/IndicesRequestCache.java | 13 +- .../elasticsearch/indices/IndicesService.java | 4 +- .../indices/analysis/HunspellService.java | 10 +- .../HierarchyCircuitBreakerService.java | 22 +- .../cache/IndicesFieldDataCache.java | 4 +- .../indices/recovery/RecoverySettings.java | 27 +- .../indices/store/IndicesStore.java | 5 +- .../indices/ttl/IndicesTTLService.java | 4 +- .../elasticsearch/monitor/fs/FsService.java | 4 +- .../monitor/jvm/JvmGcMonitorService.java | 10 +- .../elasticsearch/monitor/jvm/JvmService.java | 4 +- .../elasticsearch/monitor/os/OsService.java | 4 +- .../monitor/process/ProcessService.java | 4 +- .../java/org/elasticsearch/node/Node.java | 25 +- .../internal/InternalSettingsPreparer.java | 4 +- .../elasticsearch/plugins/PluginsService.java | 4 +- .../repositories/fs/FsRepository.java | 18 +- .../repositories/uri/URLRepository.java | 26 +- .../elasticsearch/rest/BaseRestHandler.java | 4 +- .../elasticsearch/script/ScriptService.java | 10 +- .../elasticsearch/script/ScriptSettings.java | 9 +- .../elasticsearch/search/SearchService.java | 10 +- .../elasticsearch/threadpool/ThreadPool.java | 4 +- .../elasticsearch/transport/Transport.java | 3 +- .../transport/TransportService.java | 11 +- .../transport/TransportSettings.java | 20 +- .../transport/netty/NettyTransport.java | 81 +++--- .../org/elasticsearch/tribe/TribeService.java | 28 ++- .../cluster/ClusterModuleTests.java | 5 +- .../cluster/settings/SettingsFilteringIT.java | 12 +- .../common/settings/ScopedSettingsTests.java | 26 +- .../common/settings/SettingTests.java | 58 +++-- .../common/settings/SettingsModuleTests.java | 11 +- .../elasticsearch/index/IndexModuleTests.java | 5 +- .../index/IndexSettingsTests.java | 9 +- .../index/SettingsListenerIT.java | 5 +- .../indices/IndicesOptionsIntegrationIT.java | 10 +- .../RandomExceptionCircuitBreakerIT.java | 7 +- .../basic/SearchWithRandomExceptionsIT.java | 7 +- .../snapshots/mockstore/MockRepository.java | 10 +- .../azure/management/AzureComputeService.java | 18 +- .../cloud/aws/AwsEc2Service.java | 58 +++-- .../mapper/attachments/AttachmentMapper.java | 10 +- .../azure/storage/AzureStorageService.java | 16 +- .../repositories/azure/AzureRepository.java | 18 +- .../elasticsearch/cloud/aws/AwsS3Service.java | 46 ++-- .../repositories/s3/S3Repository.java | 70 +++--- .../elasticsearch/test/ESIntegTestCase.java | 4 +- .../test/InternalSettingsPlugin.java | 10 +- .../test/MockIndexEventListener.java | 3 +- .../test/engine/MockEngineSupport.java | 7 +- .../test/store/MockFSDirectoryService.java | 18 +- .../test/store/MockFSIndexStore.java | 4 +- .../test/tasks/MockTaskManager.java | 4 +- .../transport/AssertingLocalTransport.java | 11 +- 128 files changed, 1228 insertions(+), 743 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java index f8bbebf7db88..82602a10c008 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java @@ -33,6 +33,7 @@ import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -46,7 +47,8 @@ public class TransportCloseIndexAction extends TransportMasterNodeAction CLUSTER_INDICES_CLOSE_ENABLE_SETTING = Setting.boolSetting("cluster.indices.close.enable", true, true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_INDICES_CLOSE_ENABLE_SETTING = + Setting.boolSetting("cluster.indices.close.enable", true, true, SettingsProperty.ClusterScope); @Inject public TransportCloseIndexAction(Settings settings, TransportService transportService, ClusterService clusterService, diff --git a/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java b/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java index d5574755346f..ee304dd05f2b 100644 --- a/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java +++ b/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.MapperService; @@ -39,7 +40,8 @@ import java.util.List; */ public final class AutoCreateIndex { - public static final Setting AUTO_CREATE_INDEX_SETTING = new Setting<>("action.auto_create_index", "true", AutoCreate::new, false, Setting.Scope.CLUSTER); + public static final Setting AUTO_CREATE_INDEX_SETTING = + new Setting<>("action.auto_create_index", "true", AutoCreate::new, false, SettingsProperty.ClusterScope); private final boolean dynamicMappingDisabled; private final IndexNameExpressionResolver resolver; diff --git a/core/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java b/core/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java index 5f2fb33e043a..cfdd45cdfa1e 100644 --- a/core/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java +++ b/core/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; /** @@ -33,7 +34,8 @@ public final class DestructiveOperations extends AbstractComponent { /** * Setting which controls whether wildcard usage (*, prefix*, _all) is allowed. */ - public static final Setting REQUIRES_NAME_SETTING = Setting.boolSetting("action.destructive_requires_name", false, true, Setting.Scope.CLUSTER); + public static final Setting REQUIRES_NAME_SETTING = + Setting.boolSetting("action.destructive_requires_name", false, true, SettingsProperty.ClusterScope); private volatile boolean destructiveRequiresName; @Inject diff --git a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java index ed08e5bdba35..5c15acbbdca4 100644 --- a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -37,7 +38,8 @@ import java.util.function.Supplier; public abstract class TransportMasterNodeReadAction, Response extends ActionResponse> extends TransportMasterNodeAction { - public static final Setting FORCE_LOCAL_SETTING = Setting.boolSetting("action.master.force_local", false, false, Setting.Scope.CLUSTER); + public static final Setting FORCE_LOCAL_SETTING = + Setting.boolSetting("action.master.force_local", false, false, SettingsProperty.ClusterScope); private final boolean forceLocal; diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapSettings.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapSettings.java index a20ff9bb059a..9c0bdcbd2c96 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapSettings.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapSettings.java @@ -20,7 +20,7 @@ package org.elasticsearch.bootstrap; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Scope; +import org.elasticsearch.common.settings.Setting.SettingsProperty; public final class BootstrapSettings { @@ -29,10 +29,13 @@ public final class BootstrapSettings { // TODO: remove this hack when insecure defaults are removed from java public static final Setting SECURITY_FILTER_BAD_DEFAULTS_SETTING = - Setting.boolSetting("security.manager.filter_bad_defaults", true, false, Scope.CLUSTER); + Setting.boolSetting("security.manager.filter_bad_defaults", true, false, SettingsProperty.ClusterScope); - public static final Setting MLOCKALL_SETTING = Setting.boolSetting("bootstrap.mlockall", false, false, Scope.CLUSTER); - public static final Setting SECCOMP_SETTING = Setting.boolSetting("bootstrap.seccomp", true, false, Scope.CLUSTER); - public static final Setting CTRLHANDLER_SETTING = Setting.boolSetting("bootstrap.ctrlhandler", true, false, Scope.CLUSTER); + public static final Setting MLOCKALL_SETTING = + Setting.boolSetting("bootstrap.mlockall", false, false, SettingsProperty.ClusterScope); + public static final Setting SECCOMP_SETTING = + Setting.boolSetting("bootstrap.seccomp", true, false, SettingsProperty.ClusterScope); + public static final Setting CTRLHANDLER_SETTING = + Setting.boolSetting("bootstrap.ctrlhandler", true, false, SettingsProperty.ClusterScope); } diff --git a/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java b/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java index a293428192b6..9cec74115f60 100644 --- a/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java +++ b/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.recycler.AbstractRecyclerC; import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; @@ -43,13 +44,19 @@ import static org.elasticsearch.common.recycler.Recyclers.none; /** A recycler of fixed-size pages. */ public class PageCacheRecycler extends AbstractComponent implements Releasable { - public static final Setting TYPE_SETTING = new Setting<>("cache.recycler.page.type", Type.CONCURRENT.name(), Type::parse, false, Setting.Scope.CLUSTER); - public static final Setting LIMIT_HEAP_SETTING = Setting.byteSizeSetting("cache.recycler.page.limit.heap", "10%", false, Setting.Scope.CLUSTER); - public static final Setting WEIGHT_BYTES_SETTING = Setting.doubleSetting("cache.recycler.page.weight.bytes", 1d, 0d, false, Setting.Scope.CLUSTER); - public static final Setting WEIGHT_LONG_SETTING = Setting.doubleSetting("cache.recycler.page.weight.longs", 1d, 0d, false, Setting.Scope.CLUSTER); - public static final Setting WEIGHT_INT_SETTING = Setting.doubleSetting("cache.recycler.page.weight.ints", 1d, 0d, false, Setting.Scope.CLUSTER); + public static final Setting TYPE_SETTING = + new Setting<>("cache.recycler.page.type", Type.CONCURRENT.name(), Type::parse, false, SettingsProperty.ClusterScope); + public static final Setting LIMIT_HEAP_SETTING = + Setting.byteSizeSetting("cache.recycler.page.limit.heap", "10%", false, SettingsProperty.ClusterScope); + public static final Setting WEIGHT_BYTES_SETTING = + Setting.doubleSetting("cache.recycler.page.weight.bytes", 1d, 0d, false, SettingsProperty.ClusterScope); + public static final Setting WEIGHT_LONG_SETTING = + Setting.doubleSetting("cache.recycler.page.weight.longs", 1d, 0d, false, SettingsProperty.ClusterScope); + public static final Setting WEIGHT_INT_SETTING = + Setting.doubleSetting("cache.recycler.page.weight.ints", 1d, 0d, false, SettingsProperty.ClusterScope); // object pages are less useful to us so we give them a lower weight by default - public static final Setting WEIGHT_OBJECTS_SETTING = Setting.doubleSetting("cache.recycler.page.weight.objects", 0.1d, 0d, false, Setting.Scope.CLUSTER); + public static final Setting WEIGHT_OBJECTS_SETTING = + Setting.doubleSetting("cache.recycler.page.weight.objects", 0.1d, 0d, false, SettingsProperty.ClusterScope); private final Recycler bytePage; private final Recycler intPage; diff --git a/core/src/main/java/org/elasticsearch/client/Client.java b/core/src/main/java/org/elasticsearch/client/Client.java index f81ba9eb1b12..859a15e2c5b0 100644 --- a/core/src/main/java/org/elasticsearch/client/Client.java +++ b/core/src/main/java/org/elasticsearch/client/Client.java @@ -87,6 +87,7 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import java.util.Map; @@ -114,7 +115,7 @@ public interface Client extends ElasticsearchClient, Releasable { default: throw new IllegalArgumentException("Can't parse [client.type] must be one of [node, transport]"); } - }, false, Setting.Scope.CLUSTER); + }, false, SettingsProperty.ClusterScope); /** * The admin client that can be used to perform administrative operations. diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java index 1e605b9de065..2e4956333299 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java @@ -34,6 +34,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; @@ -100,10 +101,14 @@ public class TransportClientNodesService extends AbstractComponent { private volatile boolean closed; - public static final Setting CLIENT_TRANSPORT_NODES_SAMPLER_INTERVAL = Setting.positiveTimeSetting("client.transport.nodes_sampler_interval", timeValueSeconds(5), false, Setting.Scope.CLUSTER); - public static final Setting CLIENT_TRANSPORT_PING_TIMEOUT = Setting.positiveTimeSetting("client.transport.ping_timeout", timeValueSeconds(5), false, Setting.Scope.CLUSTER); - public static final Setting CLIENT_TRANSPORT_IGNORE_CLUSTER_NAME = Setting.boolSetting("client.transport.ignore_cluster_name", false, false, Setting.Scope.CLUSTER); - public static final Setting CLIENT_TRANSPORT_SNIFF = Setting.boolSetting("client.transport.sniff", false, false, Setting.Scope.CLUSTER); + public static final Setting CLIENT_TRANSPORT_NODES_SAMPLER_INTERVAL = + Setting.positiveTimeSetting("client.transport.nodes_sampler_interval", timeValueSeconds(5), false, SettingsProperty.ClusterScope); + public static final Setting CLIENT_TRANSPORT_PING_TIMEOUT = + Setting.positiveTimeSetting("client.transport.ping_timeout", timeValueSeconds(5), false, SettingsProperty.ClusterScope); + public static final Setting CLIENT_TRANSPORT_IGNORE_CLUSTER_NAME = + Setting.boolSetting("client.transport.ignore_cluster_name", false, false, SettingsProperty.ClusterScope); + public static final Setting CLIENT_TRANSPORT_SNIFF = + Setting.boolSetting("client.transport.sniff", false, false, SettingsProperty.ClusterScope); @Inject public TransportClientNodesService(Settings settings, ClusterName clusterName, TransportService transportService, diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 3e668191ff39..ec27ed3a4d40 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -58,6 +58,7 @@ import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.gateway.GatewayAllocator; @@ -74,7 +75,8 @@ public class ClusterModule extends AbstractModule { public static final String EVEN_SHARD_COUNT_ALLOCATOR = "even_shard"; public static final String BALANCED_ALLOCATOR = "balanced"; // default - public static final Setting SHARDS_ALLOCATOR_TYPE_SETTING = new Setting<>("cluster.routing.allocation.type", BALANCED_ALLOCATOR, Function.identity(), false, Setting.Scope.CLUSTER); + public static final Setting SHARDS_ALLOCATOR_TYPE_SETTING = + new Setting<>("cluster.routing.allocation.type", BALANCED_ALLOCATOR, Function.identity(), false, SettingsProperty.ClusterScope); public static final List> DEFAULT_ALLOCATION_DECIDERS = Collections.unmodifiableList(Arrays.asList( SameShardAllocationDecider.class, diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterName.java b/core/src/main/java/org/elasticsearch/cluster/ClusterName.java index daf3000d710b..9012b9b0278b 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterName.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterName.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import java.io.IOException; @@ -37,7 +38,7 @@ public class ClusterName implements Streamable { throw new IllegalArgumentException("[cluster.name] must not be empty"); } return s; - }, false, Setting.Scope.CLUSTER); + }, false, SettingsProperty.ClusterScope); public static final ClusterName DEFAULT = new ClusterName(CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY).intern()); diff --git a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index 5107b4495aba..e6d9c27c1c25 100644 --- a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; @@ -64,8 +65,11 @@ import java.util.concurrent.TimeUnit; */ public class InternalClusterInfoService extends AbstractComponent implements ClusterInfoService, LocalNodeMasterListener, ClusterStateListener { - public static final Setting INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING = Setting.timeSetting("cluster.info.update.interval", TimeValue.timeValueSeconds(30), TimeValue.timeValueSeconds(10), true, Setting.Scope.CLUSTER); - public static final Setting INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING = Setting.positiveTimeSetting("cluster.info.update.timeout", TimeValue.timeValueSeconds(15), true, Setting.Scope.CLUSTER); + public static final Setting INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING = + Setting.timeSetting("cluster.info.update.interval", TimeValue.timeValueSeconds(30), TimeValue.timeValueSeconds(10), true, + SettingsProperty.ClusterScope); + public static final Setting INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING = + Setting.positiveTimeSetting("cluster.info.update.timeout", TimeValue.timeValueSeconds(15), true, SettingsProperty.ClusterScope); private volatile TimeValue updateFrequency; diff --git a/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java b/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java index 0e61712b010a..647f5df1cd47 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.mapper.MapperService; @@ -41,7 +42,8 @@ import java.util.concurrent.TimeoutException; */ public class MappingUpdatedAction extends AbstractComponent { - public static final Setting INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING = Setting.positiveTimeSetting("indices.mapping.dynamic_timeout", TimeValue.timeValueSeconds(30), true, Setting.Scope.CLUSTER); + public static final Setting INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING = + Setting.positiveTimeSetting("indices.mapping.dynamic_timeout", TimeValue.timeValueSeconds(30), true, SettingsProperty.ClusterScope); private IndicesAdminClient client; private volatile TimeValue dynamicMappingUpdateTimeout; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java b/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java index d9b288bb8978..dac44814a92d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; /** * This class acts as a functional wrapper around the index.auto_expand_replicas setting. @@ -56,7 +57,7 @@ final class AutoExpandReplicas { } } return new AutoExpandReplicas(min, max, true); - }, true, Setting.Scope.INDEX); + }, true, SettingsProperty.IndexScope); private final int minReplicas; private final int maxReplicas; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index f8822ceb2810..f9982384d6e2 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.FromXContentBuilder; @@ -152,28 +153,36 @@ public class IndexMetaData implements Diffable, FromXContentBuild public static final String INDEX_SETTING_PREFIX = "index."; public static final String SETTING_NUMBER_OF_SHARDS = "index.number_of_shards"; - public static final Setting INDEX_NUMBER_OF_SHARDS_SETTING = Setting.intSetting(SETTING_NUMBER_OF_SHARDS, 5, 1, false, Setting.Scope.INDEX); + public static final Setting INDEX_NUMBER_OF_SHARDS_SETTING = + Setting.intSetting(SETTING_NUMBER_OF_SHARDS, 5, 1, false, SettingsProperty.IndexScope); public static final String SETTING_NUMBER_OF_REPLICAS = "index.number_of_replicas"; - public static final Setting INDEX_NUMBER_OF_REPLICAS_SETTING = Setting.intSetting(SETTING_NUMBER_OF_REPLICAS, 1, 0, true, Setting.Scope.INDEX); + public static final Setting INDEX_NUMBER_OF_REPLICAS_SETTING = + Setting.intSetting(SETTING_NUMBER_OF_REPLICAS, 1, 0, true, SettingsProperty.IndexScope); public static final String SETTING_SHADOW_REPLICAS = "index.shadow_replicas"; - public static final Setting INDEX_SHADOW_REPLICAS_SETTING = Setting.boolSetting(SETTING_SHADOW_REPLICAS, false, false, Setting.Scope.INDEX); + public static final Setting INDEX_SHADOW_REPLICAS_SETTING = + Setting.boolSetting(SETTING_SHADOW_REPLICAS, false, false, SettingsProperty.IndexScope); public static final String SETTING_SHARED_FILESYSTEM = "index.shared_filesystem"; - public static final Setting INDEX_SHARED_FILESYSTEM_SETTING = Setting.boolSetting(SETTING_SHARED_FILESYSTEM, false, false, Setting.Scope.INDEX); + public static final Setting INDEX_SHARED_FILESYSTEM_SETTING = + Setting.boolSetting(SETTING_SHARED_FILESYSTEM, false, false, SettingsProperty.IndexScope); public static final String SETTING_AUTO_EXPAND_REPLICAS = "index.auto_expand_replicas"; public static final Setting INDEX_AUTO_EXPAND_REPLICAS_SETTING = AutoExpandReplicas.SETTING; public static final String SETTING_READ_ONLY = "index.blocks.read_only"; - public static final Setting INDEX_READ_ONLY_SETTING = Setting.boolSetting(SETTING_READ_ONLY, false, true, Setting.Scope.INDEX); + public static final Setting INDEX_READ_ONLY_SETTING = + Setting.boolSetting(SETTING_READ_ONLY, false, true, SettingsProperty.IndexScope); public static final String SETTING_BLOCKS_READ = "index.blocks.read"; - public static final Setting INDEX_BLOCKS_READ_SETTING = Setting.boolSetting(SETTING_BLOCKS_READ, false, true, Setting.Scope.INDEX); + public static final Setting INDEX_BLOCKS_READ_SETTING = + Setting.boolSetting(SETTING_BLOCKS_READ, false, true, SettingsProperty.IndexScope); public static final String SETTING_BLOCKS_WRITE = "index.blocks.write"; - public static final Setting INDEX_BLOCKS_WRITE_SETTING = Setting.boolSetting(SETTING_BLOCKS_WRITE, false, true, Setting.Scope.INDEX); + public static final Setting INDEX_BLOCKS_WRITE_SETTING = + Setting.boolSetting(SETTING_BLOCKS_WRITE, false, true, SettingsProperty.IndexScope); public static final String SETTING_BLOCKS_METADATA = "index.blocks.metadata"; - public static final Setting INDEX_BLOCKS_METADATA_SETTING = Setting.boolSetting(SETTING_BLOCKS_METADATA, false, true, Setting.Scope.INDEX); + public static final Setting INDEX_BLOCKS_METADATA_SETTING = + Setting.boolSetting(SETTING_BLOCKS_METADATA, false, true, SettingsProperty.IndexScope); public static final String SETTING_VERSION_CREATED = "index.version.created"; public static final String SETTING_VERSION_CREATED_STRING = "index.version.created_string"; @@ -182,18 +191,24 @@ public class IndexMetaData implements Diffable, FromXContentBuild public static final String SETTING_VERSION_MINIMUM_COMPATIBLE = "index.version.minimum_compatible"; public static final String SETTING_CREATION_DATE = "index.creation_date"; public static final String SETTING_PRIORITY = "index.priority"; - public static final Setting INDEX_PRIORITY_SETTING = Setting.intSetting("index.priority", 1, 0, true, Setting.Scope.INDEX); + public static final Setting INDEX_PRIORITY_SETTING = + Setting.intSetting("index.priority", 1, 0, true, SettingsProperty.IndexScope); public static final String SETTING_CREATION_DATE_STRING = "index.creation_date_string"; public static final String SETTING_INDEX_UUID = "index.uuid"; public static final String SETTING_DATA_PATH = "index.data_path"; - public static final Setting INDEX_DATA_PATH_SETTING = new Setting<>(SETTING_DATA_PATH, "", Function.identity(), false, Setting.Scope.INDEX); + public static final Setting INDEX_DATA_PATH_SETTING = + new Setting<>(SETTING_DATA_PATH, "", Function.identity(), false, SettingsProperty.IndexScope); public static final String SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE = "index.shared_filesystem.recover_on_any_node"; - public static final Setting INDEX_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE_SETTING = Setting.boolSetting(SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false, true, Setting.Scope.INDEX); + public static final Setting INDEX_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE_SETTING = + Setting.boolSetting(SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false, true, SettingsProperty.IndexScope); public static final String INDEX_UUID_NA_VALUE = "_na_"; - public static final Setting INDEX_ROUTING_REQUIRE_GROUP_SETTING = Setting.groupSetting("index.routing.allocation.require.", true, Setting.Scope.INDEX); - public static final Setting INDEX_ROUTING_INCLUDE_GROUP_SETTING = Setting.groupSetting("index.routing.allocation.include.", true, Setting.Scope.INDEX); - public static final Setting INDEX_ROUTING_EXCLUDE_GROUP_SETTING = Setting.groupSetting("index.routing.allocation.exclude.", true, Setting.Scope.INDEX); + public static final Setting INDEX_ROUTING_REQUIRE_GROUP_SETTING = + Setting.groupSetting("index.routing.allocation.require.", true, SettingsProperty.IndexScope); + public static final Setting INDEX_ROUTING_INCLUDE_GROUP_SETTING = + Setting.groupSetting("index.routing.allocation.include.", true, SettingsProperty.IndexScope); + public static final Setting INDEX_ROUTING_EXCLUDE_GROUP_SETTING = + Setting.groupSetting("index.routing.allocation.exclude.", true, SettingsProperty.IndexScope); public static final IndexMetaData PROTO = IndexMetaData.builder("") .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 8ba78979f3f3..f729cc4cabcb 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.FromXContentBuilder; @@ -139,7 +140,8 @@ public class MetaData implements Iterable, Diffable, Fr } - public static final Setting SETTING_READ_ONLY_SETTING = Setting.boolSetting("cluster.blocks.read_only", false, true, Setting.Scope.CLUSTER); + public static final Setting SETTING_READ_ONLY_SETTING = + Setting.boolSetting("cluster.blocks.read_only", false, true, SettingsProperty.ClusterScope); public static final ClusterBlock CLUSTER_READ_ONLY_BLOCK = new ClusterBlock(6, "cluster read-only (api)", false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA_WRITE)); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java b/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java index 714c1e4913a2..d7cfe1a39d95 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; @@ -44,7 +45,9 @@ public class UnassignedInfo implements ToXContent, Writeable { public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("dateOptionalTime"); private static final TimeValue DEFAULT_DELAYED_NODE_LEFT_TIMEOUT = TimeValue.timeValueMinutes(1); - public static final Setting INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING = Setting.timeSetting("index.unassigned.node_left.delayed_timeout", DEFAULT_DELAYED_NODE_LEFT_TIMEOUT, true, Setting.Scope.INDEX); + public static final Setting INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING = + Setting.timeSetting("index.unassigned.node_left.delayed_timeout", DEFAULT_DELAYED_NODE_LEFT_TIMEOUT, true, + SettingsProperty.IndexScope); /** * Reason why the shard is in unassigned state. diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index e12020cfa742..248d5aa25c93 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.PriorityComparator; @@ -72,9 +73,12 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; */ public class BalancedShardsAllocator extends AbstractComponent implements ShardsAllocator { - public static final Setting INDEX_BALANCE_FACTOR_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.index", 0.55f, true, Setting.Scope.CLUSTER); - public static final Setting SHARD_BALANCE_FACTOR_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.shard", 0.45f, true, Setting.Scope.CLUSTER); - public static final Setting THRESHOLD_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.threshold", 1.0f, 0.0f, true, Setting.Scope.CLUSTER); + public static final Setting INDEX_BALANCE_FACTOR_SETTING = + Setting.floatSetting("cluster.routing.allocation.balance.index", 0.55f, true, SettingsProperty.ClusterScope); + public static final Setting SHARD_BALANCE_FACTOR_SETTING = + Setting.floatSetting("cluster.routing.allocation.balance.shard", 0.45f, true, SettingsProperty.ClusterScope); + public static final Setting THRESHOLD_SETTING = + Setting.floatSetting("cluster.routing.allocation.balance.threshold", 1.0f, 0.0f, true, SettingsProperty.ClusterScope); private volatile WeightFunction weightFunction; private volatile float threshold; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java index 9859a9b65844..792f670dcf2c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import java.util.HashMap; @@ -77,8 +78,11 @@ public class AwarenessAllocationDecider extends AllocationDecider { public static final String NAME = "awareness"; - public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING = new Setting<>("cluster.routing.allocation.awareness.attributes", "", Strings::splitStringByCommaToArray , true, Setting.Scope.CLUSTER); - public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.awareness.force.", true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING = + new Setting<>("cluster.routing.allocation.awareness.attributes", "", Strings::splitStringByCommaToArray , true, + SettingsProperty.ClusterScope); + public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING = + Setting.groupSetting("cluster.routing.allocation.awareness.force.", true, SettingsProperty.ClusterScope); private String[] awarenessAttributes; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java index 11fce397b267..2c59fee3af6f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import java.util.Locale; @@ -48,7 +49,9 @@ import java.util.Locale; public class ClusterRebalanceAllocationDecider extends AllocationDecider { public static final String NAME = "cluster_rebalance"; - public static final Setting CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING = new Setting<>("cluster.routing.allocation.allow_rebalance", ClusterRebalanceType.INDICES_ALL_ACTIVE.name().toLowerCase(Locale.ROOT), ClusterRebalanceType::parseString, true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING = + new Setting<>("cluster.routing.allocation.allow_rebalance", ClusterRebalanceType.INDICES_ALL_ACTIVE.name().toLowerCase(Locale.ROOT), + ClusterRebalanceType::parseString, true, SettingsProperty.ClusterScope); /** * An enum representation for the configured re-balance type. diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java index d39b96040667..cda5e628dece 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; /** @@ -42,7 +43,8 @@ public class ConcurrentRebalanceAllocationDecider extends AllocationDecider { public static final String NAME = "concurrent_rebalance"; - public static final Setting CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING = Setting.intSetting("cluster.routing.allocation.cluster_concurrent_rebalance", 2, -1, true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING = + Setting.intSetting("cluster.routing.allocation.cluster_concurrent_rebalance", 2, -1, true, SettingsProperty.ClusterScope); private volatile int clusterConcurrentRebalance; @Inject diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index 821fa55d7043..051eab81ec8f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.RatioValue; @@ -81,11 +82,17 @@ public class DiskThresholdDecider extends AllocationDecider { private volatile boolean enabled; private volatile TimeValue rerouteInterval; - public static final Setting CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING = Setting.boolSetting("cluster.routing.allocation.disk.threshold_enabled", true, true, Setting.Scope.CLUSTER); - public static final Setting CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.low", "85%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.low"), true, Setting.Scope.CLUSTER); - public static final Setting CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.high", "90%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.high"), true, Setting.Scope.CLUSTER); - public static final Setting CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING = Setting.boolSetting("cluster.routing.allocation.disk.include_relocations", true, true, Setting.Scope.CLUSTER);; - public static final Setting CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING = Setting.positiveTimeSetting("cluster.routing.allocation.disk.reroute_interval", TimeValue.timeValueSeconds(60), true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING = + Setting.boolSetting("cluster.routing.allocation.disk.threshold_enabled", true, true, SettingsProperty.ClusterScope); + public static final Setting CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING = + new Setting<>("cluster.routing.allocation.disk.watermark.low", "85%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.low"), true, SettingsProperty.ClusterScope); + public static final Setting CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING = + new Setting<>("cluster.routing.allocation.disk.watermark.high", "90%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.high"), true, SettingsProperty.ClusterScope); + public static final Setting CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING = + Setting.boolSetting("cluster.routing.allocation.disk.include_relocations", true, true, SettingsProperty.ClusterScope);; + public static final Setting CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING = + Setting.positiveTimeSetting("cluster.routing.allocation.disk.reroute_interval", TimeValue.timeValueSeconds(60), true, + SettingsProperty.ClusterScope); /** * Listens for a node to go over the high watermark and kicks off an empty diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java index 9131355876ba..edece247c8b3 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import java.util.Locale; @@ -60,11 +61,15 @@ public class EnableAllocationDecider extends AllocationDecider { public static final String NAME = "enable"; - public static final Setting CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING = new Setting<>("cluster.routing.allocation.enable", Allocation.ALL.name(), Allocation::parse, true, Setting.Scope.CLUSTER); - public static final Setting INDEX_ROUTING_ALLOCATION_ENABLE_SETTING = new Setting<>("index.routing.allocation.enable", Allocation.ALL.name(), Allocation::parse, true, Setting.Scope.INDEX); + public static final Setting CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING = + new Setting<>("cluster.routing.allocation.enable", Allocation.ALL.name(), Allocation::parse, true, SettingsProperty.ClusterScope); + public static final Setting INDEX_ROUTING_ALLOCATION_ENABLE_SETTING = + new Setting<>("index.routing.allocation.enable", Allocation.ALL.name(), Allocation::parse, true, SettingsProperty.IndexScope); - public static final Setting CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING = new Setting<>("cluster.routing.rebalance.enable", Rebalance.ALL.name(), Rebalance::parse, true, Setting.Scope.CLUSTER); - public static final Setting INDEX_ROUTING_REBALANCE_ENABLE_SETTING = new Setting<>("index.routing.rebalance.enable", Rebalance.ALL.name(), Rebalance::parse, true, Setting.Scope.INDEX); + public static final Setting CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING = + new Setting<>("cluster.routing.rebalance.enable", Rebalance.ALL.name(), Rebalance::parse, true, SettingsProperty.ClusterScope); + public static final Setting INDEX_ROUTING_REBALANCE_ENABLE_SETTING = + new Setting<>("index.routing.rebalance.enable", Rebalance.ALL.name(), Rebalance::parse, true, SettingsProperty.IndexScope); private volatile Rebalance enableRebalance; private volatile Allocation enableAllocation; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java index f8ff5f37aed7..59f6ec1531a0 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.AND; @@ -60,9 +61,12 @@ public class FilterAllocationDecider extends AllocationDecider { public static final String NAME = "filter"; - public static final Setting CLUSTER_ROUTING_REQUIRE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.require.", true, Setting.Scope.CLUSTER); - public static final Setting CLUSTER_ROUTING_INCLUDE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.include.", true, Setting.Scope.CLUSTER); - public static final Setting CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.exclude.", true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_REQUIRE_GROUP_SETTING = + Setting.groupSetting("cluster.routing.allocation.require.", true, SettingsProperty.ClusterScope); + public static final Setting CLUSTER_ROUTING_INCLUDE_GROUP_SETTING = + Setting.groupSetting("cluster.routing.allocation.include.", true, SettingsProperty.ClusterScope); + public static final Setting CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING = + Setting.groupSetting("cluster.routing.allocation.exclude.", true, SettingsProperty.ClusterScope); private volatile DiscoveryNodeFilters clusterRequireFilters; private volatile DiscoveryNodeFilters clusterIncludeFilters; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java index e766b4c49aa3..be6c98d147ba 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; /** @@ -59,13 +60,15 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { * Controls the maximum number of shards per index on a single Elasticsearch * node. Negative values are interpreted as unlimited. */ - public static final Setting INDEX_TOTAL_SHARDS_PER_NODE_SETTING = Setting.intSetting("index.routing.allocation.total_shards_per_node", -1, -1, true, Setting.Scope.INDEX); + public static final Setting INDEX_TOTAL_SHARDS_PER_NODE_SETTING = + Setting.intSetting("index.routing.allocation.total_shards_per_node", -1, -1, true, SettingsProperty.IndexScope); /** * Controls the maximum number of shards per node on a global level. * Negative values are interpreted as unlimited. */ - public static final Setting CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING = Setting.intSetting("cluster.routing.allocation.total_shards_per_node", -1, -1, true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING = + Setting.intSetting("cluster.routing.allocation.total_shards_per_node", -1, -1, true, SettingsProperty.ClusterScope); @Inject diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java index cf889cde6ad2..b4927b6c5c7f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; /** @@ -39,7 +40,8 @@ public class SnapshotInProgressAllocationDecider extends AllocationDecider { /** * Disables relocation of shards that are currently being snapshotted. */ - public static final Setting CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING = Setting.boolSetting("cluster.routing.allocation.snapshot.relocation_enabled", false, true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING = + Setting.boolSetting("cluster.routing.allocation.snapshot.relocation_enabled", false, true, SettingsProperty.ClusterScope); private volatile boolean enableRelocation = false; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java index 25f43f576101..1e12eb406b83 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; /** @@ -50,10 +51,23 @@ public class ThrottlingAllocationDecider extends AllocationDecider { public static final int DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES = 2; public static final int DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES = 4; public static final String NAME = "throttling"; - public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_recoveries", Integer.toString(DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES), (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_recoveries"), true, Setting.Scope.CLUSTER); - public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING = Setting.intSetting("cluster.routing.allocation.node_initial_primaries_recoveries", DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, 0, true, Setting.Scope.CLUSTER); - public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_incoming_recoveries", (s) -> CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getRaw(s), (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_incoming_recoveries"), true, Setting.Scope.CLUSTER); - public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_outgoing_recoveries", (s) -> CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getRaw(s), (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_outgoing_recoveries"), true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING = + new Setting<>("cluster.routing.allocation.node_concurrent_recoveries", + Integer.toString(DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES), + (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_recoveries"), true, SettingsProperty.ClusterScope); + public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING = + Setting.intSetting("cluster.routing.allocation.node_initial_primaries_recoveries", + DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, 0, true, SettingsProperty.ClusterScope); + public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING = + new Setting<>("cluster.routing.allocation.node_concurrent_incoming_recoveries", + (s) -> CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getRaw(s), + (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_incoming_recoveries"), true, + SettingsProperty.ClusterScope); + public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING = + new Setting<>("cluster.routing.allocation.node_concurrent_outgoing_recoveries", + (s) -> CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getRaw(s), + (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_outgoing_recoveries"), true, + SettingsProperty.ClusterScope); private volatile int primariesInitialRecoveries; diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index 1ee3dddf77c7..0b0fd5e2b994 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -51,6 +51,7 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.transport.TransportAddress; @@ -93,8 +94,12 @@ import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadF */ public class InternalClusterService extends AbstractLifecycleComponent implements ClusterService { - public static final Setting CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING = Setting.positiveTimeSetting("cluster.service.slow_task_logging_threshold", TimeValue.timeValueSeconds(30), true, Setting.Scope.CLUSTER); - public static final Setting CLUSTER_SERVICE_RECONNECT_INTERVAL_SETTING = Setting.positiveTimeSetting("cluster.service.reconnect_interval", TimeValue.timeValueSeconds(10), false, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING = + Setting.positiveTimeSetting("cluster.service.slow_task_logging_threshold", TimeValue.timeValueSeconds(30), true, + SettingsProperty.ClusterScope); + public static final Setting CLUSTER_SERVICE_RECONNECT_INTERVAL_SETTING = + Setting.positiveTimeSetting("cluster.service.reconnect_interval", TimeValue.timeValueSeconds(10), false, + SettingsProperty.ClusterScope); public static final String UPDATE_THREAD_NAME = "clusterService#updateTask"; private final ThreadPool threadPool; diff --git a/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java index 4fdde3db8953..98d75c864828 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java +++ b/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.logging.log4j.Log4jESLoggerFactory; import org.elasticsearch.common.logging.slf4j.Slf4jESLoggerFactory; import org.elasticsearch.common.settings.AbstractScopedSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import java.util.Locale; @@ -36,8 +37,10 @@ import java.util.regex.Pattern; */ public abstract class ESLoggerFactory { - public static final Setting LOG_DEFAULT_LEVEL_SETTING = new Setting<>("logger.level", LogLevel.INFO.name(), LogLevel::parse, false, Setting.Scope.CLUSTER); - public static final Setting LOG_LEVEL_SETTING = Setting.dynamicKeySetting("logger.", LogLevel.INFO.name(), LogLevel::parse, true, Setting.Scope.CLUSTER); + public static final Setting LOG_DEFAULT_LEVEL_SETTING = + new Setting<>("logger.level", LogLevel.INFO.name(), LogLevel::parse, false, SettingsProperty.ClusterScope); + public static final Setting LOG_LEVEL_SETTING = + Setting.dynamicKeySetting("logger.", LogLevel.INFO.name(), LogLevel::parse, true, SettingsProperty.ClusterScope); private static volatile ESLoggerFactory defaultFactory = new JdkESLoggerFactory(); diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java index b0598469d3ad..ea2cc1b42676 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -28,8 +28,8 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.Setting.Scope; import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.http.HttpServer; import org.elasticsearch.http.HttpServerTransport; @@ -155,10 +155,12 @@ public class NetworkModule extends AbstractModule { public static final String LOCAL_TRANSPORT = "local"; public static final String NETTY_TRANSPORT = "netty"; - public static final Setting HTTP_TYPE_SETTING = Setting.simpleString("http.type", false, Scope.CLUSTER); - public static final Setting HTTP_ENABLED = Setting.boolSetting("http.enabled", true, false, Scope.CLUSTER); - public static final Setting TRANSPORT_SERVICE_TYPE_SETTING = Setting.simpleString("transport.service.type", false, Scope.CLUSTER); - public static final Setting TRANSPORT_TYPE_SETTING = Setting.simpleString("transport.type", false, Scope.CLUSTER); + public static final Setting HTTP_TYPE_SETTING = Setting.simpleString("http.type", false, SettingsProperty.ClusterScope); + public static final Setting HTTP_ENABLED = Setting.boolSetting("http.enabled", true, false, SettingsProperty.ClusterScope); + public static final Setting TRANSPORT_SERVICE_TYPE_SETTING = + Setting.simpleString("transport.service.type", false, SettingsProperty.ClusterScope); + public static final Setting TRANSPORT_TYPE_SETTING = + Setting.simpleString("transport.type", false, SettingsProperty.ClusterScope); diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java index 5e8dbc4dcad5..abb7795f12a7 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java @@ -22,6 +22,7 @@ package org.elasticsearch.common.network; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -43,24 +44,33 @@ public class NetworkService extends AbstractComponent { /** By default, we bind to loopback interfaces */ public static final String DEFAULT_NETWORK_HOST = "_local_"; - public static final Setting> GLOBAL_NETWORK_HOST_SETTING = Setting.listSetting("network.host", Arrays.asList(DEFAULT_NETWORK_HOST), - s -> s, false, Setting.Scope.CLUSTER); - public static final Setting> GLOBAL_NETWORK_BINDHOST_SETTING = Setting.listSetting("network.bind_host", GLOBAL_NETWORK_HOST_SETTING, - s -> s, false, Setting.Scope.CLUSTER); - public static final Setting> GLOBAL_NETWORK_PUBLISHHOST_SETTING = Setting.listSetting("network.publish_host", GLOBAL_NETWORK_HOST_SETTING, - s -> s, false, Setting.Scope.CLUSTER); - public static final Setting NETWORK_SERVER = Setting.boolSetting("network.server", true, false, Setting.Scope.CLUSTER); + public static final Setting> GLOBAL_NETWORK_HOST_SETTING = + Setting.listSetting("network.host", Arrays.asList(DEFAULT_NETWORK_HOST), s -> s, false, SettingsProperty.ClusterScope); + public static final Setting> GLOBAL_NETWORK_BINDHOST_SETTING = + Setting.listSetting("network.bind_host", GLOBAL_NETWORK_HOST_SETTING, s -> s, false, SettingsProperty.ClusterScope); + public static final Setting> GLOBAL_NETWORK_PUBLISHHOST_SETTING = + Setting.listSetting("network.publish_host", GLOBAL_NETWORK_HOST_SETTING, s -> s, false, SettingsProperty.ClusterScope); + public static final Setting NETWORK_SERVER = Setting.boolSetting("network.server", true, false, SettingsProperty.ClusterScope); public static final class TcpSettings { - public static final Setting TCP_NO_DELAY = Setting.boolSetting("network.tcp.no_delay", true, false, Setting.Scope.CLUSTER); - public static final Setting TCP_KEEP_ALIVE = Setting.boolSetting("network.tcp.keep_alive", true, false, Setting.Scope.CLUSTER); - public static final Setting TCP_REUSE_ADDRESS = Setting.boolSetting("network.tcp.reuse_address", NetworkUtils.defaultReuseAddress(), false, Setting.Scope.CLUSTER); - public static final Setting TCP_SEND_BUFFER_SIZE = Setting.byteSizeSetting("network.tcp.send_buffer_size", new ByteSizeValue(-1), false, Setting.Scope.CLUSTER); - public static final Setting TCP_RECEIVE_BUFFER_SIZE = Setting.byteSizeSetting("network.tcp.receive_buffer_size", new ByteSizeValue(-1), false, Setting.Scope.CLUSTER); - public static final Setting TCP_BLOCKING = Setting.boolSetting("network.tcp.blocking", false, false, Setting.Scope.CLUSTER); - public static final Setting TCP_BLOCKING_SERVER = Setting.boolSetting("network.tcp.blocking_server", TCP_BLOCKING, false, Setting.Scope.CLUSTER); - public static final Setting TCP_BLOCKING_CLIENT = Setting.boolSetting("network.tcp.blocking_client", TCP_BLOCKING, false, Setting.Scope.CLUSTER); - public static final Setting TCP_CONNECT_TIMEOUT = Setting.timeSetting("network.tcp.connect_timeout", new TimeValue(30, TimeUnit.SECONDS), false, Setting.Scope.CLUSTER); + public static final Setting TCP_NO_DELAY = + Setting.boolSetting("network.tcp.no_delay", true, false, SettingsProperty.ClusterScope); + public static final Setting TCP_KEEP_ALIVE = + Setting.boolSetting("network.tcp.keep_alive", true, false, SettingsProperty.ClusterScope); + public static final Setting TCP_REUSE_ADDRESS = + Setting.boolSetting("network.tcp.reuse_address", NetworkUtils.defaultReuseAddress(), false, SettingsProperty.ClusterScope); + public static final Setting TCP_SEND_BUFFER_SIZE = + Setting.byteSizeSetting("network.tcp.send_buffer_size", new ByteSizeValue(-1), false, SettingsProperty.ClusterScope); + public static final Setting TCP_RECEIVE_BUFFER_SIZE = + Setting.byteSizeSetting("network.tcp.receive_buffer_size", new ByteSizeValue(-1), false, SettingsProperty.ClusterScope); + public static final Setting TCP_BLOCKING = + Setting.boolSetting("network.tcp.blocking", false, false, SettingsProperty.ClusterScope); + public static final Setting TCP_BLOCKING_SERVER = + Setting.boolSetting("network.tcp.blocking_server", TCP_BLOCKING, false, SettingsProperty.ClusterScope); + public static final Setting TCP_BLOCKING_CLIENT = + Setting.boolSetting("network.tcp.blocking_client", TCP_BLOCKING, false, SettingsProperty.ClusterScope); + public static final Setting TCP_CONNECT_TIMEOUT = + Setting.timeSetting("network.tcp.connect_timeout", new TimeValue(30, TimeUnit.SECONDS), false, SettingsProperty.ClusterScope); } /** diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index b30178857e1d..5345ab03b63d 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -22,7 +22,6 @@ package org.elasticsearch.common.settings; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.util.set.Sets; import java.util.ArrayList; import java.util.Collections; @@ -45,19 +44,19 @@ public abstract class AbstractScopedSettings extends AbstractComponent { private final List> settingUpdaters = new CopyOnWriteArrayList<>(); private final Map> complexMatchers; private final Map> keySettings; - private final Setting.Scope scope; + private final Setting.SettingsProperty scope; private static final Pattern KEY_PATTERN = Pattern.compile("^(?:[-\\w]+[.])*[-\\w]+$"); private static final Pattern GROUP_KEY_PATTERN = Pattern.compile("^(?:[-\\w]+[.])+$"); - protected AbstractScopedSettings(Settings settings, Set> settingsSet, Setting.Scope scope) { + protected AbstractScopedSettings(Settings settings, Set> settingsSet, Setting.SettingsProperty scope) { super(settings); this.lastSettingsApplied = Settings.EMPTY; this.scope = scope; Map> complexMatchers = new HashMap<>(); Map> keySettings = new HashMap<>(); for (Setting setting : settingsSet) { - if (setting.getScope() != scope) { - throw new IllegalArgumentException("Setting must be a " + scope + " setting but was: " + setting.getScope()); + if (setting.getProperties().contains(scope) == false) { + throw new IllegalArgumentException("Setting must be a " + scope + " setting but has: " + setting.getProperties()); } if (isValidKey(setting.getKey()) == false && (setting.isGroupSetting() && isValidGroupKey(setting.getKey())) == false) { throw new IllegalArgumentException("illegal settings key: [" + setting.getKey() + "]"); @@ -92,7 +91,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { return GROUP_KEY_PATTERN.matcher(key).matches(); } - public Setting.Scope getScope() { + public Setting.SettingsProperty getScope() { return this.scope; } @@ -325,8 +324,9 @@ public abstract class AbstractScopedSettings extends AbstractComponent { * Returns the value for the given setting. */ public T get(Setting setting) { - if (setting.getScope() != scope) { - throw new IllegalArgumentException("settings scope doesn't match the setting scope [" + this.scope + "] != [" + setting.getScope() + "]"); + if (setting.getProperties().contains(scope) == false) { + throw new IllegalArgumentException("settings scope doesn't match the setting scope [" + this.scope + "] not in [" + + setting.getProperties() + "]"); } if (get(setting.getKey()) == null) { throw new IllegalArgumentException("setting " + setting.getKey() + " has not been registered"); diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index d68e43ea1cb4..36a066678575 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -45,6 +45,7 @@ import org.elasticsearch.cluster.service.InternalClusterService; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.discovery.DiscoveryModule; @@ -102,7 +103,7 @@ import java.util.function.Predicate; */ public final class ClusterSettings extends AbstractScopedSettings { public ClusterSettings(Settings nodeSettings, Set> settingsSet) { - super(nodeSettings, settingsSet, Setting.Scope.CLUSTER); + super(nodeSettings, settingsSet, SettingsProperty.ClusterScope); addSettingsUpdater(new LoggingSettingUpdater(nodeSettings)); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index 69ef795812dc..a8f06fd9cf52 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -22,6 +22,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.gateway.PrimaryShardAllocator; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; @@ -49,7 +50,7 @@ import java.util.function.Predicate; /** * Encapsulates all valid index level settings. - * @see org.elasticsearch.common.settings.Setting.Scope#INDEX + * @see org.elasticsearch.common.settings.Setting.SettingsProperty#IndexScope */ public final class IndexScopedSettings extends AbstractScopedSettings { @@ -134,15 +135,15 @@ public final class IndexScopedSettings extends AbstractScopedSettings { EngineConfig.INDEX_CODEC_SETTING, IndexWarmer.INDEX_NORMS_LOADING_SETTING, // this sucks but we can't really validate all the analyzers/similarity in here - Setting.groupSetting("index.similarity.", false, Setting.Scope.INDEX), // this allows similarity settings to be passed - Setting.groupSetting("index.analysis.", false, Setting.Scope.INDEX) // this allows analysis settings to be passed + Setting.groupSetting("index.similarity.", false, SettingsProperty.IndexScope), // this allows similarity settings to be passed + Setting.groupSetting("index.analysis.", false, SettingsProperty.IndexScope) // this allows analysis settings to be passed ))); public static final IndexScopedSettings DEFAULT_SCOPED_SETTINGS = new IndexScopedSettings(Settings.EMPTY, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); public IndexScopedSettings(Settings settings, Set> settingsSet) { - super(settings, settingsSet, Setting.Scope.INDEX); + super(settings, settingsSet, SettingsProperty.IndexScope); } private IndexScopedSettings(Settings settings, IndexScopedSettings other, IndexMetaData metaData) { diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 1e0e2190fcc9..ce66eda1766d 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -35,6 +35,8 @@ import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; +import java.util.EnumSet; import java.util.List; import java.util.Objects; import java.util.function.BiConsumer; @@ -50,7 +52,7 @@ import java.util.stream.Collectors; * together with {@link AbstractScopedSettings}. This class contains several utility methods that makes it straight forward * to add settings for the majority of the cases. For instance a simple boolean settings can be defined like this: *
      {@code
      - * public static final Setting; MY_BOOLEAN = Setting.boolSetting("my.bool.setting", true, false, Scope.CLUSTER);}
      + * public static final Setting; MY_BOOLEAN = Setting.boolSetting("my.bool.setting", true, false, SettingsProperty.ClusterScope);}
        * 
      * To retrieve the value of the setting a {@link Settings} object can be passed directly to the {@link Setting#get(Settings)} method. *
      @@ -61,29 +63,26 @@ import java.util.stream.Collectors;
        * public enum Color {
        *     RED, GREEN, BLUE;
        * }
      - * public static final Setting MY_BOOLEAN = new Setting<>("my.color.setting", Color.RED.toString(), Color::valueOf, false, Scope.CLUSTER);
      + * public static final Setting MY_BOOLEAN =
      + *     new Setting<>("my.color.setting", Color.RED.toString(), Color::valueOf, false, SettingsProperty.ClusterScope);
        * }
        * 
      */ public class Setting extends ToXContentToBytes { + + public enum SettingsProperty { + Filtered, + Dynamic, + ClusterScope, + NodeScope, + IndexScope; + } + private final String key; protected final Function defaultValue; private final Function parser; private final boolean dynamic; - private final Scope scope; - private final boolean filtered; - - /** - * Creates a new Setting instance, unfiltered - * @param key the settings key for this setting. - * @param defaultValue a default value function that returns the default values string representation. - * @param parser a parser that parses the string rep into a complex datatype. - * @param dynamic true iff this setting can be dynamically updateable - * @param scope the scope of this setting - */ - public Setting(String key, Function defaultValue, Function parser, boolean dynamic, Scope scope) { - this(key, defaultValue, parser, dynamic, scope, false); - } + private final EnumSet properties; /** * Creates a new Setting instance @@ -91,30 +90,32 @@ public class Setting extends ToXContentToBytes { * @param defaultValue a default value function that returns the default values string representation. * @param parser a parser that parses the string rep into a complex datatype. * @param dynamic true iff this setting can be dynamically updateable - * @param scope the scope of this setting - * @param filtered true if this setting should be filtered + * @param properties properties for this setting like scope, filtering... */ - public Setting(String key, Function defaultValue, Function parser, boolean dynamic, Scope scope, - boolean filtered) { + public Setting(String key, Function defaultValue, Function parser, boolean dynamic, + SettingsProperty... properties) { assert parser.apply(defaultValue.apply(Settings.EMPTY)) != null || this.isGroupSetting(): "parser returned null"; this.key = key; this.defaultValue = defaultValue; this.parser = parser; this.dynamic = dynamic; - this.scope = scope; - this.filtered = filtered; + if (properties.length == 0) { + this.properties = EnumSet.of(SettingsProperty.NodeScope); + } else { + this.properties = EnumSet.copyOf(Arrays.asList(properties)); + } } /** - * Creates a new Setting instance, unfiltered + * Creates a new Setting instance * @param key the settings key for this setting. - * @param fallBackSetting a setting to fall back to if the current setting is not set. + * @param defaultValue a default value. * @param parser a parser that parses the string rep into a complex datatype. * @param dynamic true iff this setting can be dynamically updateable - * @param scope the scope of this setting + * @param properties properties for this setting like scope, filtering... */ - public Setting(String key, Setting fallBackSetting, Function parser, boolean dynamic, Scope scope) { - this(key, fallBackSetting, parser, dynamic, scope, false); + public Setting(String key, String defaultValue, Function parser, boolean dynamic, SettingsProperty... properties) { + this(key, s -> defaultValue, parser, dynamic, properties); } /** @@ -123,11 +124,10 @@ public class Setting extends ToXContentToBytes { * @param fallBackSetting a setting to fall back to if the current setting is not set. * @param parser a parser that parses the string rep into a complex datatype. * @param dynamic true iff this setting can be dynamically updateable - * @param scope the scope of this setting - * @param filtered true if this setting should be filtered + * @param properties properties for this setting like scope, filtering... */ - public Setting(String key, Setting fallBackSetting, Function parser, boolean dynamic, Scope scope, boolean filtered) { - this(key, fallBackSetting::getRaw, parser, dynamic, scope, filtered); + public Setting(String key, Setting fallBackSetting, Function parser, boolean dynamic, SettingsProperty... properties) { + this(key, fallBackSetting::getRaw, parser, dynamic, properties); } /** @@ -149,17 +149,39 @@ public class Setting extends ToXContentToBytes { } /** - * Returns the settings scope + * Returns the setting properties + * @see SettingsProperty */ - public final Scope getScope() { - return scope; + public EnumSet getProperties() { + return properties; } /** * Returns true if this setting must be filtered, otherwise false */ public boolean isFiltered() { - return filtered; + return properties.contains(SettingsProperty.Filtered); + } + + /** + * Returns true if this setting has a cluster scope, otherwise false + */ + public boolean hasClusterScope() { + return properties.contains(SettingsProperty.ClusterScope); + } + + /** + * Returns true if this setting has an index scope, otherwise false + */ + public boolean hasIndexScope() { + return properties.contains(SettingsProperty.IndexScope); + } + + /** + * Returns true if this setting has an index scope, otherwise false + */ + public boolean hasNodeScope() { + return properties.contains(SettingsProperty.NodeScope); } /** @@ -238,7 +260,7 @@ public class Setting extends ToXContentToBytes { public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("key", key); - builder.field("type", scope.name()); + builder.field("properties", properties); builder.field("dynamic", dynamic); builder.field("is_group_setting", isGroupSetting()); builder.field("default", defaultValue.apply(Settings.EMPTY)); @@ -261,14 +283,6 @@ public class Setting extends ToXContentToBytes { return this; } - /** - * The settings scope - settings can either be cluster settings or per index settings. - */ - public enum Scope { - CLUSTER, - INDEX; - } - /** * Build a new updater with a noop validator. */ @@ -366,50 +380,35 @@ public class Setting extends ToXContentToBytes { } - public Setting(String key, String defaultValue, Function parser, boolean dynamic, Scope scope) { - this(key, defaultValue, parser, dynamic, scope, false); + public static Setting floatSetting(String key, float defaultValue, boolean dynamic, SettingsProperty... properties) { + return new Setting<>(key, (s) -> Float.toString(defaultValue), Float::parseFloat, dynamic, properties); } - public Setting(String key, String defaultValue, Function parser, boolean dynamic, Scope scope, boolean filtered) { - this(key, (s) -> defaultValue, parser, dynamic, scope, filtered); - } - - public static Setting floatSetting(String key, float defaultValue, boolean dynamic, Scope scope) { - return new Setting<>(key, (s) -> Float.toString(defaultValue), Float::parseFloat, dynamic, scope); - } - - public static Setting floatSetting(String key, float defaultValue, float minValue, boolean dynamic, Scope scope) { + public static Setting floatSetting(String key, float defaultValue, float minValue, boolean dynamic, SettingsProperty... properties) { return new Setting<>(key, (s) -> Float.toString(defaultValue), (s) -> { float value = Float.parseFloat(s); if (value < minValue) { throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); } return value; - }, dynamic, scope); + }, dynamic, properties); } - public static Setting intSetting(String key, int defaultValue, int minValue, int maxValue, boolean dynamic, Scope scope) { - return new Setting<>(key, (s) -> Integer.toString(defaultValue), (s) -> parseInt(s, minValue, maxValue, key), dynamic, scope); + public static Setting intSetting(String key, int defaultValue, int minValue, int maxValue, boolean dynamic, + SettingsProperty... properties) { + return new Setting<>(key, (s) -> Integer.toString(defaultValue), (s) -> parseInt(s, minValue, maxValue, key), dynamic, properties); } - public static Setting intSetting(String key, int defaultValue, int minValue, boolean dynamic, Scope scope) { - return new Setting<>(key, (s) -> Integer.toString(defaultValue), (s) -> parseInt(s, minValue, key), dynamic, scope); + public static Setting intSetting(String key, int defaultValue, int minValue, boolean dynamic, SettingsProperty... properties) { + return new Setting<>(key, (s) -> Integer.toString(defaultValue), (s) -> parseInt(s, minValue, key), dynamic, properties); } - public static Setting longSetting(String key, long defaultValue, long minValue, boolean dynamic, Scope scope) { - return longSetting(key, defaultValue, minValue, dynamic, scope, false); + public static Setting longSetting(String key, long defaultValue, long minValue, boolean dynamic, SettingsProperty... properties) { + return new Setting<>(key, (s) -> Long.toString(defaultValue), (s) -> parseLong(s, minValue, key), dynamic, properties); } - public static Setting longSetting(String key, long defaultValue, long minValue, boolean dynamic, Scope scope, boolean filtered) { - return new Setting<>(key, (s) -> Long.toString(defaultValue), (s) -> parseLong(s, minValue, key), dynamic, scope, filtered); - } - - public static Setting simpleString(String key, boolean dynamic, Scope scope) { - return simpleString(key, dynamic, scope, false); - } - - public static Setting simpleString(String key, boolean dynamic, Scope scope, boolean filtered) { - return new Setting<>(key, s -> "", Function.identity(), dynamic, scope, filtered); + public static Setting simpleString(String key, boolean dynamic, SettingsProperty... properties) { + return new Setting<>(key, s -> "", Function.identity(), dynamic, properties); } public static int parseInt(String s, int minValue, String key) { @@ -435,55 +434,58 @@ public class Setting extends ToXContentToBytes { return value; } - public static Setting intSetting(String key, int defaultValue, boolean dynamic, Scope scope) { - return intSetting(key, defaultValue, Integer.MIN_VALUE, dynamic, scope); + public static Setting intSetting(String key, int defaultValue, boolean dynamic, SettingsProperty... properties) { + return intSetting(key, defaultValue, Integer.MIN_VALUE, dynamic, properties); } - public static Setting boolSetting(String key, boolean defaultValue, boolean dynamic, Scope scope) { - return boolSetting(key, defaultValue, dynamic, scope, false); + public static Setting boolSetting(String key, boolean defaultValue, boolean dynamic, SettingsProperty... properties) { + return new Setting<>(key, (s) -> Boolean.toString(defaultValue), Booleans::parseBooleanExact, dynamic, properties); } - public static Setting boolSetting(String key, boolean defaultValue, boolean dynamic, Scope scope, boolean filtered) { - return new Setting<>(key, (s) -> Boolean.toString(defaultValue), Booleans::parseBooleanExact, dynamic, scope, filtered); + public static Setting boolSetting(String key, Setting fallbackSetting, boolean dynamic, + SettingsProperty... properties) { + return new Setting<>(key, fallbackSetting, Booleans::parseBooleanExact, dynamic, properties); } - public static Setting boolSetting(String key, Setting fallbackSetting, boolean dynamic, Scope scope) { - return new Setting<>(key, fallbackSetting, Booleans::parseBooleanExact, dynamic, scope); + public static Setting byteSizeSetting(String key, String percentage, boolean dynamic, SettingsProperty... properties) { + return new Setting<>(key, (s) -> percentage, (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, key), dynamic, properties); } - public static Setting byteSizeSetting(String key, String percentage, boolean dynamic, Scope scope) { - return new Setting<>(key, (s) -> percentage, (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, key), dynamic, scope); + public static Setting byteSizeSetting(String key, ByteSizeValue value, boolean dynamic, SettingsProperty... properties) { + return byteSizeSetting(key, (s) -> value.toString(), dynamic, properties); } - public static Setting byteSizeSetting(String key, ByteSizeValue value, boolean dynamic, Scope scope) { - return byteSizeSetting(key, (s) -> value.toString(), dynamic, scope); + public static Setting byteSizeSetting(String key, Setting fallbackSettings, boolean dynamic, + SettingsProperty... properties) { + return byteSizeSetting(key, fallbackSettings::getRaw, dynamic, properties); } - public static Setting byteSizeSetting(String key, Setting fallbackSettings, boolean dynamic, Scope scope) { - return byteSizeSetting(key, fallbackSettings::getRaw, dynamic, scope); + public static Setting byteSizeSetting(String key, Function defaultValue, boolean dynamic, + SettingsProperty... properties) { + return new Setting<>(key, defaultValue, (s) -> ByteSizeValue.parseBytesSizeValue(s, key), dynamic, properties); } - public static Setting byteSizeSetting(String key, Function defaultValue, boolean dynamic, Scope scope) { - return new Setting<>(key, defaultValue, (s) -> ByteSizeValue.parseBytesSizeValue(s, key), dynamic, scope); + public static Setting positiveTimeSetting(String key, TimeValue defaultValue, boolean dynamic, SettingsProperty... properties) { + return timeSetting(key, defaultValue, TimeValue.timeValueMillis(0), dynamic, properties); } - public static Setting positiveTimeSetting(String key, TimeValue defaultValue, boolean dynamic, Scope scope) { - return timeSetting(key, defaultValue, TimeValue.timeValueMillis(0), dynamic, scope); + public static Setting> listSetting(String key, List defaultStringValue, Function singleValueParser, + boolean dynamic, SettingsProperty... properties) { + return listSetting(key, (s) -> defaultStringValue, singleValueParser, dynamic, properties); } - public static Setting> listSetting(String key, List defaultStringValue, Function singleValueParser, boolean dynamic, Scope scope) { - return listSetting(key, (s) -> defaultStringValue, singleValueParser, dynamic, scope); + public static Setting> listSetting(String key, Setting> fallbackSetting, Function singleValueParser, + boolean dynamic, SettingsProperty... properties) { + return listSetting(key, (s) -> parseableStringToList(fallbackSetting.getRaw(s)), singleValueParser, dynamic, properties); } - public static Setting> listSetting(String key, Setting> fallbackSetting, Function singleValueParser, boolean dynamic, Scope scope) { - return listSetting(key, (s) -> parseableStringToList(fallbackSetting.getRaw(s)), singleValueParser, dynamic, scope); - } - - public static Setting> listSetting(String key, Function> defaultStringValue, Function singleValueParser, boolean dynamic, Scope scope) { + public static Setting> listSetting(String key, Function> defaultStringValue, + Function singleValueParser, boolean dynamic, SettingsProperty... properties) { Function> parser = (s) -> parseableStringToList(s).stream().map(singleValueParser).collect(Collectors.toList()); - return new Setting>(key, (s) -> arrayToParsableString(defaultStringValue.apply(s).toArray(Strings.EMPTY_ARRAY)), parser, dynamic, scope) { + return new Setting>(key, (s) -> arrayToParsableString(defaultStringValue.apply(s).toArray(Strings.EMPTY_ARRAY)), parser, + dynamic, properties) { private final Pattern pattern = Pattern.compile(Pattern.quote(key)+"(\\.\\d+)?"); @Override public String getRaw(Settings settings) { @@ -537,11 +539,11 @@ public class Setting extends ToXContentToBytes { } } - public static Setting groupSetting(String key, boolean dynamic, Scope scope) { + public static Setting groupSetting(String key, boolean dynamic, SettingsProperty... properties) { if (key.endsWith(".") == false) { throw new IllegalArgumentException("key must end with a '.'"); } - return new Setting(key, "", (s) -> null, dynamic, scope) { + return new Setting(key, "", (s) -> null, dynamic, properties) { @Override public boolean isGroupSetting() { @@ -600,36 +602,40 @@ public class Setting extends ToXContentToBytes { }; } - public static Setting timeSetting(String key, Function defaultValue, TimeValue minValue, boolean dynamic, Scope scope) { + public static Setting timeSetting(String key, Function defaultValue, TimeValue minValue, boolean dynamic, + SettingsProperty... properties) { return new Setting<>(key, defaultValue, (s) -> { TimeValue timeValue = TimeValue.parseTimeValue(s, null, key); if (timeValue.millis() < minValue.millis()) { throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); } return timeValue; - }, dynamic, scope); + }, dynamic, properties); } - public static Setting timeSetting(String key, TimeValue defaultValue, TimeValue minValue, boolean dynamic, Scope scope) { - return timeSetting(key, (s) -> defaultValue.getStringRep(), minValue, dynamic, scope); + public static Setting timeSetting(String key, TimeValue defaultValue, TimeValue minValue, boolean dynamic, + SettingsProperty... properties) { + return timeSetting(key, (s) -> defaultValue.getStringRep(), minValue, dynamic, properties); } - public static Setting timeSetting(String key, TimeValue defaultValue, boolean dynamic, Scope scope) { - return new Setting<>(key, (s) -> defaultValue.toString(), (s) -> TimeValue.parseTimeValue(s, key), dynamic, scope); + public static Setting timeSetting(String key, TimeValue defaultValue, boolean dynamic, SettingsProperty... properties) { + return new Setting<>(key, (s) -> defaultValue.toString(), (s) -> TimeValue.parseTimeValue(s, key), dynamic, properties); } - public static Setting timeSetting(String key, Setting fallbackSetting, boolean dynamic, Scope scope) { - return new Setting<>(key, fallbackSetting::getRaw, (s) -> TimeValue.parseTimeValue(s, key), dynamic, scope); + public static Setting timeSetting(String key, Setting fallbackSetting, boolean dynamic, + SettingsProperty... properties) { + return new Setting<>(key, fallbackSetting::getRaw, (s) -> TimeValue.parseTimeValue(s, key), dynamic, properties); } - public static Setting doubleSetting(String key, double defaultValue, double minValue, boolean dynamic, Scope scope) { + public static Setting doubleSetting(String key, double defaultValue, double minValue, boolean dynamic, + SettingsProperty... properties) { return new Setting<>(key, (s) -> Double.toString(defaultValue), (s) -> { final double d = Double.parseDouble(s); if (d < minValue) { throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); } return d; - }, dynamic, scope); + }, dynamic, properties); } @Override @@ -650,8 +656,9 @@ public class Setting extends ToXContentToBytes { * can easily be added with this setting. Yet, dynamic key settings don't support updaters our of the box unless {@link #getConcreteSetting(String)} * is used to pull the updater. */ - public static Setting dynamicKeySetting(String key, String defaultValue, Function parser, boolean dynamic, Scope scope) { - return new Setting(key, defaultValue, parser, dynamic, scope) { + public static Setting dynamicKeySetting(String key, String defaultValue, Function parser, boolean dynamic, + SettingsProperty... properties) { + return new Setting(key, defaultValue, parser, dynamic, properties) { @Override boolean isGroupSetting() { @@ -671,7 +678,7 @@ public class Setting extends ToXContentToBytes { @Override public Setting getConcreteSetting(String key) { if (match(key)) { - return new Setting<>(key, defaultValue, parser, dynamic, scope); + return new Setting<>(key, defaultValue, parser, dynamic, properties); } else { throw new IllegalArgumentException("key must match setting but didn't ["+key +"]"); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index 027e6e7cafeb..8786ac5f4474 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -76,19 +76,17 @@ public class SettingsModule extends AbstractModule { registerSettingsFilter(setting.getKey()); } } - switch (setting.getScope()) { - case CLUSTER: - if (clusterSettings.containsKey(setting.getKey())) { - throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); - } - clusterSettings.put(setting.getKey(), setting); - break; - case INDEX: - if (indexSettings.containsKey(setting.getKey())) { - throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); - } - indexSettings.put(setting.getKey(), setting); - break; + if (setting.hasClusterScope()) { + if (clusterSettings.containsKey(setting.getKey())) { + throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); + } + clusterSettings.put(setting.getKey(), setting); + } + if (setting.hasIndexScope()) { + if (indexSettings.containsKey(setting.getKey())) { + throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); + } + indexSettings.put(setting.getKey(), setting); } } @@ -110,11 +108,11 @@ public class SettingsModule extends AbstractModule { * Check if a setting has already been registered */ public boolean exists(Setting setting) { - switch (setting.getScope()) { - case CLUSTER: - return clusterSettings.containsKey(setting.getKey()); - case INDEX: - return indexSettings.containsKey(setting.getKey()); + if (setting.hasClusterScope()) { + return clusterSettings.containsKey(setting.getKey()); + } + if (setting.hasIndexScope()) { + return indexSettings.containsKey(setting.getKey()); } throw new IllegalArgumentException("setting scope is unknown. This should never happen!"); } diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index 10b1412425cb..6f055e3cf04e 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.util.concurrent; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import java.util.Arrays; @@ -41,7 +42,8 @@ public class EsExecutors { * Settings key to manually set the number of available processors. * This is used to adjust thread pools sizes etc. per node. */ - public static final Setting PROCESSORS_SETTING = Setting.intSetting("processors", Math.min(32, Runtime.getRuntime().availableProcessors()), 1, false, Setting.Scope.CLUSTER) ; + public static final Setting PROCESSORS_SETTING = + Setting.intSetting("processors", Math.min(32, Runtime.getRuntime().availableProcessors()), 1, false, SettingsProperty.ClusterScope); /** * Returns the number of processors available but at most 32. diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index 1928392fe411..798cd7462afa 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import java.io.Closeable; @@ -63,7 +64,8 @@ import java.util.concurrent.atomic.AtomicBoolean; public final class ThreadContext implements Closeable, Writeable{ public static final String PREFIX = "request.headers"; - public static final Setting DEFAULT_HEADERS_SETTING = Setting.groupSetting(PREFIX + ".", false, Setting.Scope.CLUSTER); + public static final Setting DEFAULT_HEADERS_SETTING = + Setting.groupSetting(PREFIX + ".", false, SettingsProperty.ClusterScope); private final Map defaultHeader; private static final ThreadContextStruct DEFAULT_CONTEXT = new ThreadContextStruct(Collections.emptyMap()); private final ContextThreadLocal threadLocal; diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java b/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java index b51339aac90e..57ae63c11049 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.discovery.local.LocalDiscovery; @@ -44,10 +45,11 @@ import java.util.function.Function; */ public class DiscoveryModule extends AbstractModule { - public static final Setting DISCOVERY_TYPE_SETTING = new Setting<>("discovery.type", - settings -> DiscoveryNode.localNode(settings) ? "local" : "zen", Function.identity(), false, Setting.Scope.CLUSTER); - public static final Setting ZEN_MASTER_SERVICE_TYPE_SETTING = new Setting<>("discovery.zen.masterservice.type", - "zen", Function.identity(), false, Setting.Scope.CLUSTER); + public static final Setting DISCOVERY_TYPE_SETTING = + new Setting<>("discovery.type", settings -> DiscoveryNode.localNode(settings) ? "local" : "zen", Function.identity(), false, + SettingsProperty.ClusterScope); + public static final Setting ZEN_MASTER_SERVICE_TYPE_SETTING = + new Setting<>("discovery.zen.masterservice.type", "zen", Function.identity(), false, SettingsProperty.ClusterScope); private final Settings settings; private final List> unicastHostProviders = new ArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java b/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java index bef1c8fe5ecb..181ee8253c0e 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -40,8 +41,11 @@ import java.util.concurrent.TimeUnit; */ public class DiscoveryService extends AbstractLifecycleComponent { - public static final Setting INITIAL_STATE_TIMEOUT_SETTING = Setting.positiveTimeSetting("discovery.initial_state_timeout", TimeValue.timeValueSeconds(30), false, Setting.Scope.CLUSTER); - public static final Setting DISCOVERY_SEED_SETTING = Setting.longSetting("discovery.id.seed", 0L, Long.MIN_VALUE, false, Setting.Scope.CLUSTER); + public static final Setting INITIAL_STATE_TIMEOUT_SETTING = + Setting.positiveTimeSetting("discovery.initial_state_timeout", TimeValue.timeValueSeconds(30), false, + SettingsProperty.ClusterScope); + public static final Setting DISCOVERY_SEED_SETTING = + Setting.longSetting("discovery.id.seed", 0L, Long.MIN_VALUE, false, SettingsProperty.ClusterScope); private static class InitialStateListener implements InitialStateDiscoveryListener { diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java b/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java index dec856992b3c..64c13fabe7cb 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.RestStatus; @@ -43,15 +44,21 @@ public class DiscoverySettings extends AbstractComponent { * sets the timeout for a complete publishing cycle, including both sending and committing. the master * will continue to process the next cluster state update after this time has elapsed **/ - public static final Setting PUBLISH_TIMEOUT_SETTING = Setting.positiveTimeSetting("discovery.zen.publish_timeout", TimeValue.timeValueSeconds(30), true, Setting.Scope.CLUSTER); + public static final Setting PUBLISH_TIMEOUT_SETTING = + Setting.positiveTimeSetting("discovery.zen.publish_timeout", TimeValue.timeValueSeconds(30), true, SettingsProperty.ClusterScope); /** * sets the timeout for receiving enough acks for a specific cluster state and committing it. failing * to receive responses within this window will cause the cluster state change to be rejected. */ - public static final Setting COMMIT_TIMEOUT_SETTING = new Setting<>("discovery.zen.commit_timeout", (s) -> PUBLISH_TIMEOUT_SETTING.getRaw(s), (s) -> TimeValue.parseTimeValue(s, TimeValue.timeValueSeconds(30), "discovery.zen.commit_timeout"), true, Setting.Scope.CLUSTER); - public static final Setting NO_MASTER_BLOCK_SETTING = new Setting<>("discovery.zen.no_master_block", "write", DiscoverySettings::parseNoMasterBlock, true, Setting.Scope.CLUSTER); - public static final Setting PUBLISH_DIFF_ENABLE_SETTING = Setting.boolSetting("discovery.zen.publish_diff.enable", true, true, Setting.Scope.CLUSTER); + public static final Setting COMMIT_TIMEOUT_SETTING = + new Setting<>("discovery.zen.commit_timeout", (s) -> PUBLISH_TIMEOUT_SETTING.getRaw(s), + (s) -> TimeValue.parseTimeValue(s, TimeValue.timeValueSeconds(30), "discovery.zen.commit_timeout"), true, + SettingsProperty.ClusterScope); + public static final Setting NO_MASTER_BLOCK_SETTING = + new Setting<>("discovery.zen.no_master_block", "write", DiscoverySettings::parseNoMasterBlock, true, SettingsProperty.ClusterScope); + public static final Setting PUBLISH_DIFF_ENABLE_SETTING = + Setting.boolSetting("discovery.zen.publish_diff.enable", true, true, SettingsProperty.ClusterScope); private volatile ClusterBlock noMasterBlock; private volatile TimeValue publishTimeout; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index ce0831471174..653c77945c98 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -46,6 +46,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.Discovery; @@ -89,17 +90,28 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; */ public class ZenDiscovery extends AbstractLifecycleComponent implements Discovery, PingContextProvider { - public final static Setting PING_TIMEOUT_SETTING = Setting.positiveTimeSetting("discovery.zen.ping_timeout", timeValueSeconds(3), false, Setting.Scope.CLUSTER); - public final static Setting JOIN_TIMEOUT_SETTING = Setting.timeSetting("discovery.zen.join_timeout", - settings -> TimeValue.timeValueMillis(PING_TIMEOUT_SETTING.get(settings).millis() * 20).toString(), TimeValue.timeValueMillis(0), false, Setting.Scope.CLUSTER); - public final static Setting JOIN_RETRY_ATTEMPTS_SETTING = Setting.intSetting("discovery.zen.join_retry_attempts", 3, 1, false, Setting.Scope.CLUSTER); - public final static Setting JOIN_RETRY_DELAY_SETTING = Setting.positiveTimeSetting("discovery.zen.join_retry_delay", TimeValue.timeValueMillis(100), false, Setting.Scope.CLUSTER); - public final static Setting MAX_PINGS_FROM_ANOTHER_MASTER_SETTING = Setting.intSetting("discovery.zen.max_pings_from_another_master", 3, 1, false, Setting.Scope.CLUSTER); - public final static Setting SEND_LEAVE_REQUEST_SETTING = Setting.boolSetting("discovery.zen.send_leave_request", true, false, Setting.Scope.CLUSTER); - public final static Setting MASTER_ELECTION_FILTER_CLIENT_SETTING = Setting.boolSetting("discovery.zen.master_election.filter_client", true, false, Setting.Scope.CLUSTER); - public final static Setting MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING = Setting.timeSetting("discovery.zen.master_election.wait_for_joins_timeout", - settings -> TimeValue.timeValueMillis(JOIN_TIMEOUT_SETTING.get(settings).millis() / 2).toString(), TimeValue.timeValueMillis(0), false, Setting.Scope.CLUSTER); - public final static Setting MASTER_ELECTION_FILTER_DATA_SETTING = Setting.boolSetting("discovery.zen.master_election.filter_data", false, false, Setting.Scope.CLUSTER); + public final static Setting PING_TIMEOUT_SETTING = + Setting.positiveTimeSetting("discovery.zen.ping_timeout", timeValueSeconds(3), false, SettingsProperty.ClusterScope); + public final static Setting JOIN_TIMEOUT_SETTING = + Setting.timeSetting("discovery.zen.join_timeout", + settings -> TimeValue.timeValueMillis(PING_TIMEOUT_SETTING.get(settings).millis() * 20).toString(), + TimeValue.timeValueMillis(0), false, SettingsProperty.ClusterScope); + public final static Setting JOIN_RETRY_ATTEMPTS_SETTING = + Setting.intSetting("discovery.zen.join_retry_attempts", 3, 1, false, SettingsProperty.ClusterScope); + public final static Setting JOIN_RETRY_DELAY_SETTING = + Setting.positiveTimeSetting("discovery.zen.join_retry_delay", TimeValue.timeValueMillis(100), false, SettingsProperty.ClusterScope); + public final static Setting MAX_PINGS_FROM_ANOTHER_MASTER_SETTING = + Setting.intSetting("discovery.zen.max_pings_from_another_master", 3, 1, false, SettingsProperty.ClusterScope); + public final static Setting SEND_LEAVE_REQUEST_SETTING = + Setting.boolSetting("discovery.zen.send_leave_request", true, false, SettingsProperty.ClusterScope); + public final static Setting MASTER_ELECTION_FILTER_CLIENT_SETTING = + Setting.boolSetting("discovery.zen.master_election.filter_client", true, false, SettingsProperty.ClusterScope); + public final static Setting MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING = + Setting.timeSetting("discovery.zen.master_election.wait_for_joins_timeout", + settings -> TimeValue.timeValueMillis(JOIN_TIMEOUT_SETTING.get(settings).millis() / 2).toString(), TimeValue.timeValueMillis(0), + false, SettingsProperty.ClusterScope); + public final static Setting MASTER_ELECTION_FILTER_DATA_SETTING = + Setting.boolSetting("discovery.zen.master_election.filter_data", false, false, SettingsProperty.ClusterScope); public static final String DISCOVERY_REJOIN_ACTION_NAME = "internal:discovery/zen/rejoin"; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java b/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java index 1482fb92a223..cd418e369c48 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -40,7 +41,8 @@ import java.util.List; */ public class ElectMasterService extends AbstractComponent { - public static final Setting DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING = Setting.intSetting("discovery.zen.minimum_master_nodes", -1, true, Setting.Scope.CLUSTER); + public static final Setting DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING = + Setting.intSetting("discovery.zen.minimum_master_nodes", -1, true, SettingsProperty.ClusterScope); // This is the minimum version a master needs to be on, otherwise it gets ignored // This is based on the minimum compatible version of the current version this node is on diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java b/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java index 62b0250315c7..c4247ea15df4 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java @@ -22,7 +22,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Scope; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.threadpool.ThreadPool; @@ -37,11 +37,16 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; */ public abstract class FaultDetection extends AbstractComponent { - public static final Setting CONNECT_ON_NETWORK_DISCONNECT_SETTING = Setting.boolSetting("discovery.zen.fd.connect_on_network_disconnect", false, false, Scope.CLUSTER); - public static final Setting PING_INTERVAL_SETTING = Setting.positiveTimeSetting("discovery.zen.fd.ping_interval", timeValueSeconds(1), false, Scope.CLUSTER); - public static final Setting PING_TIMEOUT_SETTING = Setting.timeSetting("discovery.zen.fd.ping_timeout", timeValueSeconds(30), false, Scope.CLUSTER); - public static final Setting PING_RETRIES_SETTING = Setting.intSetting("discovery.zen.fd.ping_retries", 3, false, Scope.CLUSTER); - public static final Setting REGISTER_CONNECTION_LISTENER_SETTING = Setting.boolSetting("discovery.zen.fd.register_connection_listener", true, false, Scope.CLUSTER); + public static final Setting CONNECT_ON_NETWORK_DISCONNECT_SETTING = + Setting.boolSetting("discovery.zen.fd.connect_on_network_disconnect", false, false, SettingsProperty.ClusterScope); + public static final Setting PING_INTERVAL_SETTING = + Setting.positiveTimeSetting("discovery.zen.fd.ping_interval", timeValueSeconds(1), false, SettingsProperty.ClusterScope); + public static final Setting PING_TIMEOUT_SETTING = + Setting.timeSetting("discovery.zen.fd.ping_timeout", timeValueSeconds(30), false, SettingsProperty.ClusterScope); + public static final Setting PING_RETRIES_SETTING = + Setting.intSetting("discovery.zen.fd.ping_retries", 3, false, SettingsProperty.ClusterScope); + public static final Setting REGISTER_CONNECTION_LISTENER_SETTING = + Setting.boolSetting("discovery.zen.fd.register_connection_listener", true, false, SettingsProperty.ClusterScope); protected final ThreadPool threadPool; protected final ClusterName clusterName; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java index 427abca8d85f..24191ccf4fc8 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; @@ -86,8 +87,11 @@ import static org.elasticsearch.discovery.zen.ping.ZenPing.PingResponse.readPing public class UnicastZenPing extends AbstractLifecycleComponent implements ZenPing { public static final String ACTION_NAME = "internal:discovery/zen/unicast"; - public static final Setting> DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING = Setting.listSetting("discovery.zen.ping.unicast.hosts", Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER); - public static final Setting DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING = Setting.intSetting("discovery.zen.ping.unicast.concurrent_connects", 10, 0, false, Setting.Scope.CLUSTER); + public static final Setting> DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING = + Setting.listSetting("discovery.zen.ping.unicast.hosts", Collections.emptyList(), Function.identity(), false, + SettingsProperty.ClusterScope); + public static final Setting DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING = + Setting.intSetting("discovery.zen.ping.unicast.concurrent_connects", 10, 0, false, SettingsProperty.ClusterScope); // these limits are per-address public static final int LIMIT_FOREIGN_PORTS_COUNT = 1; diff --git a/core/src/main/java/org/elasticsearch/env/Environment.java b/core/src/main/java/org/elasticsearch/env/Environment.java index 1f8cffc97f39..143ddf699012 100644 --- a/core/src/main/java/org/elasticsearch/env/Environment.java +++ b/core/src/main/java/org/elasticsearch/env/Environment.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import java.io.IOException; @@ -46,15 +47,18 @@ import static org.elasticsearch.common.Strings.cleanPath; // TODO: move PathUtils to be package-private here instead of // public+forbidden api! public class Environment { - public static final Setting PATH_HOME_SETTING = Setting.simpleString("path.home", false, Setting.Scope.CLUSTER); - public static final Setting PATH_CONF_SETTING = Setting.simpleString("path.conf", false, Setting.Scope.CLUSTER); - public static final Setting PATH_SCRIPTS_SETTING = Setting.simpleString("path.scripts", false, Setting.Scope.CLUSTER); - public static final Setting> PATH_DATA_SETTING = Setting.listSetting("path.data", Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER); - public static final Setting PATH_LOGS_SETTING = Setting.simpleString("path.logs", false, Setting.Scope.CLUSTER); - public static final Setting PATH_PLUGINS_SETTING = Setting.simpleString("path.plugins", false, Setting.Scope.CLUSTER); - public static final Setting> PATH_REPO_SETTING = Setting.listSetting("path.repo", Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER); - public static final Setting PATH_SHARED_DATA_SETTING = Setting.simpleString("path.shared_data", false, Setting.Scope.CLUSTER); - public static final Setting PIDFILE_SETTING = Setting.simpleString("pidfile", false, Setting.Scope.CLUSTER); + public static final Setting PATH_HOME_SETTING = Setting.simpleString("path.home", false, SettingsProperty.ClusterScope); + public static final Setting PATH_CONF_SETTING = Setting.simpleString("path.conf", false, SettingsProperty.ClusterScope); + public static final Setting PATH_SCRIPTS_SETTING = Setting.simpleString("path.scripts", false, SettingsProperty.ClusterScope); + public static final Setting> PATH_DATA_SETTING = + Setting.listSetting("path.data", Collections.emptyList(), Function.identity(), false, SettingsProperty.ClusterScope); + public static final Setting PATH_LOGS_SETTING = Setting.simpleString("path.logs", false, SettingsProperty.ClusterScope); + public static final Setting PATH_PLUGINS_SETTING = Setting.simpleString("path.plugins", false, SettingsProperty.ClusterScope); + public static final Setting> PATH_REPO_SETTING = + Setting.listSetting("path.repo", Collections.emptyList(), Function.identity(), false, SettingsProperty.ClusterScope); + public static final Setting PATH_SHARED_DATA_SETTING = + Setting.simpleString("path.shared_data", false, SettingsProperty.ClusterScope); + public static final Setting PIDFILE_SETTING = Setting.simpleString("pidfile", false, SettingsProperty.ClusterScope); private final Settings settings; diff --git a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 5eecafa252f1..f6d64b3406b4 100644 --- a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -36,7 +36,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Scope; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -136,19 +136,19 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { * Maximum number of data nodes that should run in an environment. */ public static final Setting MAX_LOCAL_STORAGE_NODES_SETTING = Setting.intSetting("node.max_local_storage_nodes", 50, 1, false, - Scope.CLUSTER); + SettingsProperty.ClusterScope); /** * If true automatically append node id to custom data paths. */ public static final Setting ADD_NODE_ID_TO_CUSTOM_PATH = Setting.boolSetting("node.add_id_to_custom_path", true, false, - Scope.CLUSTER); + SettingsProperty.ClusterScope); /** * If true the [verbose] SegmentInfos.infoStream logging is sent to System.out. */ public static final Setting ENABLE_LUCENE_SEGMENT_INFOS_TRACE_SETTING = Setting - .boolSetting("node.enable_lucene_segment_infos_trace", false, false, Scope.CLUSTER); + .boolSetting("node.enable_lucene_segment_infos_trace", false, false, SettingsProperty.ClusterScope); public static final String NODES_FOLDER = "nodes"; public static final String INDICES_FOLDER = "indices"; diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java index 43b22d6c0bb4..8c17325c08c6 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java @@ -37,6 +37,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.Discovery; @@ -52,20 +53,20 @@ import java.util.concurrent.atomic.AtomicBoolean; */ public class GatewayService extends AbstractLifecycleComponent implements ClusterStateListener { - public static final Setting EXPECTED_NODES_SETTING = Setting.intSetting( - "gateway.expected_nodes", -1, -1, false, Setting.Scope.CLUSTER); - public static final Setting EXPECTED_DATA_NODES_SETTING = Setting.intSetting( - "gateway.expected_data_nodes", -1, -1, false, Setting.Scope.CLUSTER); - public static final Setting EXPECTED_MASTER_NODES_SETTING = Setting.intSetting( - "gateway.expected_master_nodes", -1, -1, false, Setting.Scope.CLUSTER); - public static final Setting RECOVER_AFTER_TIME_SETTING = Setting.positiveTimeSetting( - "gateway.recover_after_time", TimeValue.timeValueMillis(0), false, Setting.Scope.CLUSTER); - public static final Setting RECOVER_AFTER_NODES_SETTING = Setting.intSetting( - "gateway.recover_after_nodes", -1, -1, false, Setting.Scope.CLUSTER); - public static final Setting RECOVER_AFTER_DATA_NODES_SETTING = Setting.intSetting( - "gateway.recover_after_data_nodes", -1, -1, false, Setting.Scope.CLUSTER); - public static final Setting RECOVER_AFTER_MASTER_NODES_SETTING = Setting.intSetting( - "gateway.recover_after_master_nodes", 0, 0, false, Setting.Scope.CLUSTER); + public static final Setting EXPECTED_NODES_SETTING = + Setting.intSetting("gateway.expected_nodes", -1, -1, false, SettingsProperty.ClusterScope); + public static final Setting EXPECTED_DATA_NODES_SETTING = + Setting.intSetting("gateway.expected_data_nodes", -1, -1, false, SettingsProperty.ClusterScope); + public static final Setting EXPECTED_MASTER_NODES_SETTING = + Setting.intSetting("gateway.expected_master_nodes", -1, -1, false, SettingsProperty.ClusterScope); + public static final Setting RECOVER_AFTER_TIME_SETTING = + Setting.positiveTimeSetting("gateway.recover_after_time", TimeValue.timeValueMillis(0), false, SettingsProperty.ClusterScope); + public static final Setting RECOVER_AFTER_NODES_SETTING = + Setting.intSetting("gateway.recover_after_nodes", -1, -1, false, SettingsProperty.ClusterScope); + public static final Setting RECOVER_AFTER_DATA_NODES_SETTING = + Setting.intSetting("gateway.recover_after_data_nodes", -1, -1, false, SettingsProperty.ClusterScope); + public static final Setting RECOVER_AFTER_MASTER_NODES_SETTING = + Setting.intSetting("gateway.recover_after_master_nodes", 0, 0, false, SettingsProperty.ClusterScope); public static final ClusterBlock STATE_NOT_RECOVERED_BLOCK = new ClusterBlock(1, "state not recovered / initialized", true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL); diff --git a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java index 8809f68853bb..018262c0304b 100644 --- a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.TransportNodesListGatewayStartedShards.NodeGatewayStartedShards; import org.elasticsearch.index.shard.ShardStateMetaData; @@ -67,9 +68,13 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { } }; - public static final Setting NODE_INITIAL_SHARDS_SETTING = new Setting<>("gateway.initial_shards", (settings) -> settings.get("gateway.local.initial_shards", "quorum"), INITIAL_SHARDS_PARSER, true, Setting.Scope.CLUSTER); + public static final Setting NODE_INITIAL_SHARDS_SETTING = + new Setting<>("gateway.initial_shards", (settings) -> settings.get("gateway.local.initial_shards", "quorum"), INITIAL_SHARDS_PARSER, + true, SettingsProperty.ClusterScope); @Deprecated - public static final Setting INDEX_RECOVERY_INITIAL_SHARDS_SETTING = new Setting<>("index.recovery.initial_shards", (settings) -> NODE_INITIAL_SHARDS_SETTING.get(settings) , INITIAL_SHARDS_PARSER, true, Setting.Scope.INDEX); + public static final Setting INDEX_RECOVERY_INITIAL_SHARDS_SETTING = + new Setting<>("index.recovery.initial_shards", (settings) -> NODE_INITIAL_SHARDS_SETTING.get(settings) , INITIAL_SHARDS_PARSER, true, + SettingsProperty.IndexScope); public PrimaryShardAllocator(Settings settings) { super(settings); diff --git a/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java b/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java index 0e362615f0c7..2332d8704e64 100644 --- a/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java +++ b/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java @@ -20,7 +20,7 @@ package org.elasticsearch.http; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Scope; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.transport.PortsRange; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -32,30 +32,51 @@ import static org.elasticsearch.common.settings.Setting.listSetting; public final class HttpTransportSettings { - public static final Setting SETTING_CORS_ENABLED = Setting.boolSetting("http.cors.enabled", false, false, Scope.CLUSTER); - public static final Setting SETTING_CORS_ALLOW_ORIGIN = new Setting("http.cors.allow-origin", "", (value) -> value, false, Scope.CLUSTER); - public static final Setting SETTING_CORS_MAX_AGE = Setting.intSetting("http.cors.max-age", 1728000, false, Scope.CLUSTER); - public static final Setting SETTING_CORS_ALLOW_METHODS = new Setting("http.cors.allow-methods", "OPTIONS, HEAD, GET, POST, PUT, DELETE", (value) -> value, false, Scope.CLUSTER); - public static final Setting SETTING_CORS_ALLOW_HEADERS = new Setting("http.cors.allow-headers", "X-Requested-With, Content-Type, Content-Length", (value) -> value, false, Scope.CLUSTER); - public static final Setting SETTING_CORS_ALLOW_CREDENTIALS = Setting.boolSetting("http.cors.allow-credentials", false, false, Scope.CLUSTER); - public static final Setting SETTING_PIPELINING = Setting.boolSetting("http.pipelining", true, false, Scope.CLUSTER); - public static final Setting SETTING_PIPELINING_MAX_EVENTS = Setting.intSetting("http.pipelining.max_events", 10000, false, Scope.CLUSTER); - public static final Setting SETTING_HTTP_COMPRESSION = Setting.boolSetting("http.compression", false, false, Scope.CLUSTER); - public static final Setting SETTING_HTTP_COMPRESSION_LEVEL = Setting.intSetting("http.compression_level", 6, false, Scope.CLUSTER); - public static final Setting> SETTING_HTTP_HOST = listSetting("http.host", emptyList(), s -> s, false, Scope.CLUSTER); - public static final Setting> SETTING_HTTP_PUBLISH_HOST = listSetting("http.publish_host", SETTING_HTTP_HOST, s -> s, false, Scope.CLUSTER); - public static final Setting> SETTING_HTTP_BIND_HOST = listSetting("http.bind_host", SETTING_HTTP_HOST, s -> s, false, Scope.CLUSTER); + public static final Setting SETTING_CORS_ENABLED = + Setting.boolSetting("http.cors.enabled", false, false, SettingsProperty.ClusterScope); + public static final Setting SETTING_CORS_ALLOW_ORIGIN = + new Setting("http.cors.allow-origin", "", (value) -> value, false, SettingsProperty.ClusterScope); + public static final Setting SETTING_CORS_MAX_AGE = + Setting.intSetting("http.cors.max-age", 1728000, false, SettingsProperty.ClusterScope); + public static final Setting SETTING_CORS_ALLOW_METHODS = + new Setting("http.cors.allow-methods", "OPTIONS, HEAD, GET, POST, PUT, DELETE", (value) -> value, false, SettingsProperty.ClusterScope); + public static final Setting SETTING_CORS_ALLOW_HEADERS = + new Setting("http.cors.allow-headers", "X-Requested-With, Content-Type, Content-Length", (value) -> value, false, SettingsProperty.ClusterScope); + public static final Setting SETTING_CORS_ALLOW_CREDENTIALS = + Setting.boolSetting("http.cors.allow-credentials", false, false, SettingsProperty.ClusterScope); + public static final Setting SETTING_PIPELINING = + Setting.boolSetting("http.pipelining", true, false, SettingsProperty.ClusterScope); + public static final Setting SETTING_PIPELINING_MAX_EVENTS = + Setting.intSetting("http.pipelining.max_events", 10000, false, SettingsProperty.ClusterScope); + public static final Setting SETTING_HTTP_COMPRESSION = + Setting.boolSetting("http.compression", false, false, SettingsProperty.ClusterScope); + public static final Setting SETTING_HTTP_COMPRESSION_LEVEL = + Setting.intSetting("http.compression_level", 6, false, SettingsProperty.ClusterScope); + public static final Setting> SETTING_HTTP_HOST = + listSetting("http.host", emptyList(), s -> s, false, SettingsProperty.ClusterScope); + public static final Setting> SETTING_HTTP_PUBLISH_HOST = + listSetting("http.publish_host", SETTING_HTTP_HOST, s -> s, false, SettingsProperty.ClusterScope); + public static final Setting> SETTING_HTTP_BIND_HOST = + listSetting("http.bind_host", SETTING_HTTP_HOST, s -> s, false, SettingsProperty.ClusterScope); - public static final Setting SETTING_HTTP_PORT = new Setting("http.port", "9200-9300", PortsRange::new, false, Scope.CLUSTER); - public static final Setting SETTING_HTTP_PUBLISH_PORT = Setting.intSetting("http.publish_port", 0, 0, false, Scope.CLUSTER); - public static final Setting SETTING_HTTP_DETAILED_ERRORS_ENABLED = Setting.boolSetting("http.detailed_errors.enabled", true, false, Scope.CLUSTER); - public static final Setting SETTING_HTTP_MAX_CONTENT_LENGTH = Setting.byteSizeSetting("http.max_content_length", new ByteSizeValue(100, ByteSizeUnit.MB), false, Scope.CLUSTER) ; - public static final Setting SETTING_HTTP_MAX_CHUNK_SIZE = Setting.byteSizeSetting("http.max_chunk_size", new ByteSizeValue(8, ByteSizeUnit.KB), false, Scope.CLUSTER) ; - public static final Setting SETTING_HTTP_MAX_HEADER_SIZE = Setting.byteSizeSetting("http.max_header_size", new ByteSizeValue(8, ByteSizeUnit.KB), false, Scope.CLUSTER) ; - public static final Setting SETTING_HTTP_MAX_INITIAL_LINE_LENGTH = Setting.byteSizeSetting("http.max_initial_line_length", new ByteSizeValue(4, ByteSizeUnit.KB), false, Scope.CLUSTER) ; + public static final Setting SETTING_HTTP_PORT = + new Setting("http.port", "9200-9300", PortsRange::new, false, SettingsProperty.ClusterScope); + public static final Setting SETTING_HTTP_PUBLISH_PORT = + Setting.intSetting("http.publish_port", 0, 0, false, SettingsProperty.ClusterScope); + public static final Setting SETTING_HTTP_DETAILED_ERRORS_ENABLED = + Setting.boolSetting("http.detailed_errors.enabled", true, false, SettingsProperty.ClusterScope); + public static final Setting SETTING_HTTP_MAX_CONTENT_LENGTH = + Setting.byteSizeSetting("http.max_content_length", new ByteSizeValue(100, ByteSizeUnit.MB), false, SettingsProperty.ClusterScope); + public static final Setting SETTING_HTTP_MAX_CHUNK_SIZE = + Setting.byteSizeSetting("http.max_chunk_size", new ByteSizeValue(8, ByteSizeUnit.KB), false, SettingsProperty.ClusterScope); + public static final Setting SETTING_HTTP_MAX_HEADER_SIZE = + Setting.byteSizeSetting("http.max_header_size", new ByteSizeValue(8, ByteSizeUnit.KB), false, SettingsProperty.ClusterScope); + public static final Setting SETTING_HTTP_MAX_INITIAL_LINE_LENGTH = + Setting.byteSizeSetting("http.max_initial_line_length", new ByteSizeValue(4, ByteSizeUnit.KB), false, SettingsProperty.ClusterScope); // don't reset cookies by default, since I don't think we really need to // note, parsing cookies was fixed in netty 3.5.1 regarding stack allocation, but still, currently, we don't need cookies - public static final Setting SETTING_HTTP_RESET_COOKIES = Setting.boolSetting("http.reset_cookies", false, false, Scope.CLUSTER); + public static final Setting SETTING_HTTP_RESET_COOKIES = + Setting.boolSetting("http.reset_cookies", false, false, SettingsProperty.ClusterScope); private HttpTransportSettings() { } diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java index 79927c276320..f021ea812f9e 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java +++ b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.netty.OpenChannelsHandler; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.InetSocketTransportAddress; @@ -116,33 +117,32 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent SETTING_HTTP_NETTY_MAX_CUMULATION_BUFFER_CAPACITY = - Setting.byteSizeSetting("http.netty.max_cumulation_buffer_capacity", new ByteSizeValue(-1), false, Setting.Scope.CLUSTER); + Setting.byteSizeSetting("http.netty.max_cumulation_buffer_capacity", new ByteSizeValue(-1), false, + SettingsProperty.ClusterScope); public static Setting SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = - Setting.intSetting("http.netty.max_composite_buffer_components", -1, false, Setting.Scope.CLUSTER); + Setting.intSetting("http.netty.max_composite_buffer_components", -1, false, SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_WORKER_COUNT = new Setting<>("http.netty.worker_count", (s) -> Integer.toString(EsExecutors.boundedNumberOfProcessors(s) * 2), - (s) -> Setting.parseInt(s, 1, "http.netty.worker_count"), false, Setting.Scope.CLUSTER); + (s) -> Setting.parseInt(s, 1, "http.netty.worker_count"), false, SettingsProperty.ClusterScope); - public static final Setting SETTING_HTTP_TCP_NO_DELAY = boolSetting("http.tcp_no_delay", NetworkService.TcpSettings - .TCP_NO_DELAY, false, - Setting.Scope.CLUSTER); - public static final Setting SETTING_HTTP_TCP_KEEP_ALIVE = boolSetting("http.tcp.keep_alive", NetworkService.TcpSettings - .TCP_KEEP_ALIVE, false, - Setting.Scope.CLUSTER); - public static final Setting SETTING_HTTP_TCP_BLOCKING_SERVER = boolSetting("http.tcp.blocking_server", NetworkService - .TcpSettings.TCP_BLOCKING_SERVER, - false, Setting.Scope.CLUSTER); - public static final Setting SETTING_HTTP_TCP_REUSE_ADDRESS = boolSetting("http.tcp.reuse_address", NetworkService - .TcpSettings.TCP_REUSE_ADDRESS, - false, Setting.Scope.CLUSTER); + public static final Setting SETTING_HTTP_TCP_NO_DELAY = + boolSetting("http.tcp_no_delay", NetworkService.TcpSettings.TCP_NO_DELAY, false, SettingsProperty.ClusterScope); + public static final Setting SETTING_HTTP_TCP_KEEP_ALIVE = + boolSetting("http.tcp.keep_alive", NetworkService.TcpSettings.TCP_KEEP_ALIVE, false, SettingsProperty.ClusterScope); + public static final Setting SETTING_HTTP_TCP_BLOCKING_SERVER = + boolSetting("http.tcp.blocking_server", NetworkService.TcpSettings.TCP_BLOCKING_SERVER, false, SettingsProperty.ClusterScope); + public static final Setting SETTING_HTTP_TCP_REUSE_ADDRESS = + boolSetting("http.tcp.reuse_address", NetworkService.TcpSettings.TCP_REUSE_ADDRESS, false, SettingsProperty.ClusterScope); - public static final Setting SETTING_HTTP_TCP_SEND_BUFFER_SIZE = Setting.byteSizeSetting("http.tcp.send_buffer_size", - NetworkService.TcpSettings.TCP_SEND_BUFFER_SIZE, false, Setting.Scope.CLUSTER); - public static final Setting SETTING_HTTP_TCP_RECEIVE_BUFFER_SIZE = Setting.byteSizeSetting("http.tcp" + - ".receive_buffer_size", NetworkService.TcpSettings.TCP_RECEIVE_BUFFER_SIZE, false, Setting.Scope.CLUSTER); - public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE = Setting.byteSizeSetting( - "transport.netty.receive_predictor_size", + public static final Setting SETTING_HTTP_TCP_SEND_BUFFER_SIZE = + Setting.byteSizeSetting("http.tcp.send_buffer_size", NetworkService.TcpSettings.TCP_SEND_BUFFER_SIZE, false, + SettingsProperty.ClusterScope); + public static final Setting SETTING_HTTP_TCP_RECEIVE_BUFFER_SIZE = + Setting.byteSizeSetting("http.tcp.receive_buffer_size", NetworkService.TcpSettings.TCP_RECEIVE_BUFFER_SIZE, false, + SettingsProperty.ClusterScope); + public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE = + Setting.byteSizeSetting("transport.netty.receive_predictor_size", settings -> { long defaultReceiverPredictor = 512 * 1024; if (JvmInfo.jvmInfo().getMem().getDirectMemoryMax().bytes() > 0) { @@ -152,13 +152,13 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MIN = byteSizeSetting("http.netty" + - ".receive_predictor_min", - SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE, false, Setting.Scope.CLUSTER); - public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MAX = byteSizeSetting("http.netty" + - ".receive_predictor_max", - SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE, false, Setting.Scope.CLUSTER); + }, false, SettingsProperty.ClusterScope); + public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MIN = + byteSizeSetting("http.netty.receive_predictor_min", SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE, false, + SettingsProperty.ClusterScope); + public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MAX = + byteSizeSetting("http.netty.receive_predictor_max", SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE, false, + SettingsProperty.ClusterScope); protected final NetworkService networkService; diff --git a/core/src/main/java/org/elasticsearch/index/IndexModule.java b/core/src/main/java/org/elasticsearch/index/IndexModule.java index f9eb3ec2b549..3586e726a401 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/core/src/main/java/org/elasticsearch/index/IndexModule.java @@ -22,6 +22,7 @@ package org.elasticsearch.index; import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.analysis.AnalysisRegistry; @@ -65,13 +66,16 @@ import java.util.function.Function; */ public final class IndexModule { - public static final Setting INDEX_STORE_TYPE_SETTING = new Setting<>("index.store.type", "", Function.identity(), false, Setting.Scope.INDEX); + public static final Setting INDEX_STORE_TYPE_SETTING = + new Setting<>("index.store.type", "", Function.identity(), false, SettingsProperty.IndexScope); public static final String SIMILARITY_SETTINGS_PREFIX = "index.similarity"; public static final String INDEX_QUERY_CACHE = "index"; public static final String NONE_QUERY_CACHE = "none"; - public static final Setting INDEX_QUERY_CACHE_TYPE_SETTING = new Setting<>("index.queries.cache.type", INDEX_QUERY_CACHE, Function.identity(), false, Setting.Scope.INDEX); + public static final Setting INDEX_QUERY_CACHE_TYPE_SETTING = + new Setting<>("index.queries.cache.type", INDEX_QUERY_CACHE, Function.identity(), false, SettingsProperty.IndexScope); // for test purposes only - public static final Setting INDEX_QUERY_CACHE_EVERYTHING_SETTING = Setting.boolSetting("index.queries.cache.everything", false, false, Setting.Scope.INDEX); + public static final Setting INDEX_QUERY_CACHE_EVERYTHING_SETTING = + Setting.boolSetting("index.queries.cache.everything", false, false, SettingsProperty.IndexScope); private final IndexSettings indexSettings; private final IndexStoreConfig indexStoreConfig; private final AnalysisRegistry analysisRegistry; diff --git a/core/src/main/java/org/elasticsearch/index/IndexSettings.java b/core/src/main/java/org/elasticsearch/index/IndexSettings.java index b17b8ab7edf3..dbf298e16f13 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/core/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -36,7 +37,6 @@ import org.elasticsearch.index.translog.Translog; import java.util.Locale; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; @@ -50,15 +50,26 @@ import java.util.function.Predicate; */ public final class IndexSettings { - public static final Setting DEFAULT_FIELD_SETTING = new Setting<>("index.query.default_field", AllFieldMapper.NAME, Function.identity(), false, Setting.Scope.INDEX); - public static final Setting QUERY_STRING_LENIENT_SETTING = Setting.boolSetting("index.query_string.lenient", false, false, Setting.Scope.INDEX); - public static final Setting QUERY_STRING_ANALYZE_WILDCARD = Setting.boolSetting("indices.query.query_string.analyze_wildcard", false, false, Setting.Scope.CLUSTER); - public static final Setting QUERY_STRING_ALLOW_LEADING_WILDCARD = Setting.boolSetting("indices.query.query_string.allowLeadingWildcard", true, false, Setting.Scope.CLUSTER); - public static final Setting ALLOW_UNMAPPED = Setting.boolSetting("index.query.parse.allow_unmapped_fields", true, false, Setting.Scope.INDEX); - public static final Setting INDEX_TRANSLOG_SYNC_INTERVAL_SETTING = Setting.timeSetting("index.translog.sync_interval", TimeValue.timeValueSeconds(5), TimeValue.timeValueMillis(100), false, Setting.Scope.INDEX); - public static final Setting INDEX_TRANSLOG_DURABILITY_SETTING = new Setting<>("index.translog.durability", Translog.Durability.REQUEST.name(), (value) -> Translog.Durability.valueOf(value.toUpperCase(Locale.ROOT)), true, Setting.Scope.INDEX); - public static final Setting INDEX_WARMER_ENABLED_SETTING = Setting.boolSetting("index.warmer.enabled", true, true, Setting.Scope.INDEX); - public static final Setting INDEX_TTL_DISABLE_PURGE_SETTING = Setting.boolSetting("index.ttl.disable_purge", false, true, Setting.Scope.INDEX); + public static final Setting DEFAULT_FIELD_SETTING = + new Setting<>("index.query.default_field", AllFieldMapper.NAME, Function.identity(), false, SettingsProperty.IndexScope); + public static final Setting QUERY_STRING_LENIENT_SETTING = + Setting.boolSetting("index.query_string.lenient", false, false, SettingsProperty.IndexScope); + public static final Setting QUERY_STRING_ANALYZE_WILDCARD = + Setting.boolSetting("indices.query.query_string.analyze_wildcard", false, false, SettingsProperty.ClusterScope); + public static final Setting QUERY_STRING_ALLOW_LEADING_WILDCARD = + Setting.boolSetting("indices.query.query_string.allowLeadingWildcard", true, false, SettingsProperty.ClusterScope); + public static final Setting ALLOW_UNMAPPED = + Setting.boolSetting("index.query.parse.allow_unmapped_fields", true, false, SettingsProperty.IndexScope); + public static final Setting INDEX_TRANSLOG_SYNC_INTERVAL_SETTING = + Setting.timeSetting("index.translog.sync_interval", TimeValue.timeValueSeconds(5), TimeValue.timeValueMillis(100), false, + SettingsProperty.IndexScope); + public static final Setting INDEX_TRANSLOG_DURABILITY_SETTING = + new Setting<>("index.translog.durability", Translog.Durability.REQUEST.name(), + (value) -> Translog.Durability.valueOf(value.toUpperCase(Locale.ROOT)), true, SettingsProperty.IndexScope); + public static final Setting INDEX_WARMER_ENABLED_SETTING = + Setting.boolSetting("index.warmer.enabled", true, true, SettingsProperty.IndexScope); + public static final Setting INDEX_TTL_DISABLE_PURGE_SETTING = + Setting.boolSetting("index.ttl.disable_purge", false, true, SettingsProperty.IndexScope); public static final Setting INDEX_CHECK_ON_STARTUP = new Setting<>("index.shard.check_on_startup", "false", (s) -> { switch(s) { case "false": @@ -69,7 +80,7 @@ public final class IndexSettings { default: throw new IllegalArgumentException("unknown value for [index.shard.check_on_startup] must be one of [true, false, fix, checksum] but was: " + s); } - }, false, Setting.Scope.INDEX); + }, false, SettingsProperty.IndexScope); /** * Index setting describing the maximum value of from + size on a query. @@ -79,10 +90,15 @@ public final class IndexSettings { * safely. 1,000,000 is probably way to high for any cluster to set * safely. */ - public static final Setting MAX_RESULT_WINDOW_SETTING = Setting.intSetting("index.max_result_window", 10000, 1, true, Setting.Scope.INDEX); + public static final Setting MAX_RESULT_WINDOW_SETTING = + Setting.intSetting("index.max_result_window", 10000, 1, true, SettingsProperty.IndexScope); public static final TimeValue DEFAULT_REFRESH_INTERVAL = new TimeValue(1, TimeUnit.SECONDS); - public static final Setting INDEX_REFRESH_INTERVAL_SETTING = Setting.timeSetting("index.refresh_interval", DEFAULT_REFRESH_INTERVAL, new TimeValue(-1, TimeUnit.MILLISECONDS), true, Setting.Scope.INDEX); - public static final Setting INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING = Setting.byteSizeSetting("index.translog.flush_threshold_size", new ByteSizeValue(512, ByteSizeUnit.MB), true, Setting.Scope.INDEX); + public static final Setting INDEX_REFRESH_INTERVAL_SETTING = + Setting.timeSetting("index.refresh_interval", DEFAULT_REFRESH_INTERVAL, new TimeValue(-1, TimeUnit.MILLISECONDS), true, + SettingsProperty.IndexScope); + public static final Setting INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING = + Setting.byteSizeSetting("index.translog.flush_threshold_size", new ByteSizeValue(512, ByteSizeUnit.MB), true, + SettingsProperty.IndexScope); /** @@ -90,7 +106,9 @@ public final class IndexSettings { * This setting is realtime updateable */ public static final TimeValue DEFAULT_GC_DELETES = TimeValue.timeValueSeconds(60); - public static final Setting INDEX_GC_DELETES_SETTING = Setting.timeSetting("index.gc_deletes", DEFAULT_GC_DELETES, new TimeValue(-1, TimeUnit.MILLISECONDS), true, Setting.Scope.INDEX); + public static final Setting INDEX_GC_DELETES_SETTING = + Setting.timeSetting("index.gc_deletes", DEFAULT_GC_DELETES, new TimeValue(-1, TimeUnit.MILLISECONDS), true, + SettingsProperty.IndexScope); private final Index index; private final Version version; diff --git a/core/src/main/java/org/elasticsearch/index/IndexWarmer.java b/core/src/main/java/org/elasticsearch/index/IndexWarmer.java index 9fabc8efc40f..1b790240587c 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexWarmer.java +++ b/core/src/main/java/org/elasticsearch/index/IndexWarmer.java @@ -27,6 +27,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.engine.Engine; @@ -56,7 +57,7 @@ public final class IndexWarmer extends AbstractComponent { public static final Setting INDEX_NORMS_LOADING_SETTING = new Setting<>("index.norms.loading", MappedFieldType.Loading.LAZY.toString(), (s) -> MappedFieldType.Loading.parse(s, MappedFieldType.Loading.LAZY), - false, Setting.Scope.INDEX); + false, SettingsProperty.IndexScope); private final List listeners; IndexWarmer(Settings settings, ThreadPool threadPool, Listener... listeners) { diff --git a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java index 5452daa7f077..c66e05a6c795 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java +++ b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.engine.Engine; @@ -54,12 +55,23 @@ public final class IndexingSlowLog implements IndexingOperationListener { private final ESLogger deleteLogger; private static final String INDEX_INDEXING_SLOWLOG_PREFIX = "index.indexing.slowlog"; - public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.warn", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); - public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.info", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); - public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.debug", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); - public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.trace", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); - public static final Setting INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING = Setting.boolSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".reformat", true, true, Setting.Scope.INDEX); - public static final Setting INDEX_INDEXING_SLOWLOG_LEVEL_SETTING = new Setting<>(INDEX_INDEXING_SLOWLOG_PREFIX +".level", SlowLogLevel.TRACE.name(), SlowLogLevel::parse, true, Setting.Scope.INDEX); + public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING = + Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.warn", TimeValue.timeValueNanos(-1), + TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING = + Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.info", TimeValue.timeValueNanos(-1), + TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING = + Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.debug", TimeValue.timeValueNanos(-1), + TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING = + Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.trace", TimeValue.timeValueNanos(-1), + TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + public static final Setting INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING = + Setting.boolSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".reformat", true, true, SettingsProperty.IndexScope); + public static final Setting INDEX_INDEXING_SLOWLOG_LEVEL_SETTING = + new Setting<>(INDEX_INDEXING_SLOWLOG_PREFIX +".level", SlowLogLevel.TRACE.name(), SlowLogLevel::parse, true, + SettingsProperty.IndexScope); /** * Reads how much of the source to log. The user can specify any value they * like and numbers are interpreted the maximum number of characters to log @@ -72,7 +84,7 @@ public final class IndexingSlowLog implements IndexingOperationListener { } catch (NumberFormatException e) { return Booleans.parseBoolean(value, true) ? Integer.MAX_VALUE : 0; } - }, true, Setting.Scope.INDEX); + }, true, SettingsProperty.IndexScope); IndexingSlowLog(IndexSettings indexSettings) { this(indexSettings, Loggers.getLogger(INDEX_INDEXING_SLOWLOG_PREFIX + ".index"), diff --git a/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java b/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java index fc9f30cf3fd5..5d2dc7e5bf2d 100644 --- a/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java +++ b/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.TieredMergePolicy; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -126,15 +127,27 @@ public final class MergePolicyConfig { public static final ByteSizeValue DEFAULT_MAX_MERGED_SEGMENT = new ByteSizeValue(5, ByteSizeUnit.GB); public static final double DEFAULT_SEGMENTS_PER_TIER = 10.0d; public static final double DEFAULT_RECLAIM_DELETES_WEIGHT = 2.0d; - public static final Setting INDEX_COMPOUND_FORMAT_SETTING = new Setting<>("index.compound_format", Double.toString(TieredMergePolicy.DEFAULT_NO_CFS_RATIO), MergePolicyConfig::parseNoCFSRatio, true, Setting.Scope.INDEX); + public static final Setting INDEX_COMPOUND_FORMAT_SETTING = + new Setting<>("index.compound_format", Double.toString(TieredMergePolicy.DEFAULT_NO_CFS_RATIO), MergePolicyConfig::parseNoCFSRatio, + true, SettingsProperty.IndexScope); - public static final Setting INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING = Setting.doubleSetting("index.merge.policy.expunge_deletes_allowed", DEFAULT_EXPUNGE_DELETES_ALLOWED, 0.0d, true, Setting.Scope.INDEX); - public static final Setting INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING = Setting.byteSizeSetting("index.merge.policy.floor_segment", DEFAULT_FLOOR_SEGMENT, true, Setting.Scope.INDEX); - public static final Setting INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING = Setting.intSetting("index.merge.policy.max_merge_at_once", DEFAULT_MAX_MERGE_AT_ONCE, 2, true, Setting.Scope.INDEX); - public static final Setting INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING = Setting.intSetting("index.merge.policy.max_merge_at_once_explicit", DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT, 2, true, Setting.Scope.INDEX); - public static final Setting INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING = Setting.byteSizeSetting("index.merge.policy.max_merged_segment", DEFAULT_MAX_MERGED_SEGMENT, true, Setting.Scope.INDEX); - public static final Setting INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING = Setting.doubleSetting("index.merge.policy.segments_per_tier", DEFAULT_SEGMENTS_PER_TIER, 2.0d, true, Setting.Scope.INDEX); - public static final Setting INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING = Setting.doubleSetting("index.merge.policy.reclaim_deletes_weight", DEFAULT_RECLAIM_DELETES_WEIGHT, 0.0d, true, Setting.Scope.INDEX); + public static final Setting INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING = + Setting.doubleSetting("index.merge.policy.expunge_deletes_allowed", DEFAULT_EXPUNGE_DELETES_ALLOWED, 0.0d, true, + SettingsProperty.IndexScope); + public static final Setting INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING = + Setting.byteSizeSetting("index.merge.policy.floor_segment", DEFAULT_FLOOR_SEGMENT, true, SettingsProperty.IndexScope); + public static final Setting INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING = + Setting.intSetting("index.merge.policy.max_merge_at_once", DEFAULT_MAX_MERGE_AT_ONCE, 2, true, SettingsProperty.IndexScope); + public static final Setting INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING = + Setting.intSetting("index.merge.policy.max_merge_at_once_explicit", DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT, 2, true, + SettingsProperty.IndexScope); + public static final Setting INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING = + Setting.byteSizeSetting("index.merge.policy.max_merged_segment", DEFAULT_MAX_MERGED_SEGMENT, true, SettingsProperty.IndexScope); + public static final Setting INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING = + Setting.doubleSetting("index.merge.policy.segments_per_tier", DEFAULT_SEGMENTS_PER_TIER, 2.0d, true, SettingsProperty.IndexScope); + public static final Setting INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING = + Setting.doubleSetting("index.merge.policy.reclaim_deletes_weight", DEFAULT_RECLAIM_DELETES_WEIGHT, 0.0d, true, + SettingsProperty.IndexScope); public static final String INDEX_MERGE_ENABLED = "index.merge.enabled"; // don't convert to Setting<> and register... we only set this in tests and register via a plugin diff --git a/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java b/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java index 0d212a4eb30e..e53315c02496 100644 --- a/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java +++ b/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java @@ -21,6 +21,7 @@ package org.elasticsearch.index; import org.apache.lucene.index.ConcurrentMergeScheduler; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.util.concurrent.EsExecutors; /** @@ -51,9 +52,17 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; */ public final class MergeSchedulerConfig { - public static final Setting MAX_THREAD_COUNT_SETTING = new Setting<>("index.merge.scheduler.max_thread_count", (s) -> Integer.toString(Math.max(1, Math.min(4, EsExecutors.boundedNumberOfProcessors(s) / 2))), (s) -> Setting.parseInt(s, 1, "index.merge.scheduler.max_thread_count"), true, Setting.Scope.INDEX); - public static final Setting MAX_MERGE_COUNT_SETTING = new Setting<>("index.merge.scheduler.max_merge_count", (s) -> Integer.toString(MAX_THREAD_COUNT_SETTING.get(s) + 5), (s) -> Setting.parseInt(s, 1, "index.merge.scheduler.max_merge_count"), true, Setting.Scope.INDEX); - public static final Setting AUTO_THROTTLE_SETTING = Setting.boolSetting("index.merge.scheduler.auto_throttle", true, true, Setting.Scope.INDEX); + public static final Setting MAX_THREAD_COUNT_SETTING = + new Setting<>("index.merge.scheduler.max_thread_count", + (s) -> Integer.toString(Math.max(1, Math.min(4, EsExecutors.boundedNumberOfProcessors(s) / 2))), + (s) -> Setting.parseInt(s, 1, "index.merge.scheduler.max_thread_count"), true, + SettingsProperty.IndexScope); + public static final Setting MAX_MERGE_COUNT_SETTING = + new Setting<>("index.merge.scheduler.max_merge_count", + (s) -> Integer.toString(MAX_THREAD_COUNT_SETTING.get(s) + 5), + (s) -> Setting.parseInt(s, 1, "index.merge.scheduler.max_merge_count"), true, SettingsProperty.IndexScope); + public static final Setting AUTO_THROTTLE_SETTING = + Setting.boolSetting("index.merge.scheduler.auto_throttle", true, true, SettingsProperty.IndexScope); private volatile boolean autoThrottle; private volatile int maxThreadCount; diff --git a/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java b/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java index df3139fe57c7..ae26eab2de1b 100644 --- a/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java +++ b/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.search.internal.SearchContext; @@ -50,16 +51,35 @@ public final class SearchSlowLog { private final ESLogger fetchLogger; private static final String INDEX_SEARCH_SLOWLOG_PREFIX = "index.search.slowlog"; - public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.warn", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); - public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.info", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); - public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.debug", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); - public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.trace", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); - public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.warn", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); - public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.info", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); - public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.debug", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); - public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.trace", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); - public static final Setting INDEX_SEARCH_SLOWLOG_REFORMAT = Setting.boolSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".reformat", true, true, Setting.Scope.INDEX); - public static final Setting INDEX_SEARCH_SLOWLOG_LEVEL = new Setting<>(INDEX_SEARCH_SLOWLOG_PREFIX + ".level", SlowLogLevel.TRACE.name(), SlowLogLevel::parse, true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING = + Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.warn", TimeValue.timeValueNanos(-1), + TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING = + Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.info", TimeValue.timeValueNanos(-1), + TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING = + Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.debug", TimeValue.timeValueNanos(-1), + TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING = + Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.trace", TimeValue.timeValueNanos(-1), + TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING = + Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.warn", TimeValue.timeValueNanos(-1), + TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING = + Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.info", TimeValue.timeValueNanos(-1), + TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING = + Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.debug", TimeValue.timeValueNanos(-1), + TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING = + Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.trace", TimeValue.timeValueNanos(-1), + TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + public static final Setting INDEX_SEARCH_SLOWLOG_REFORMAT = + Setting.boolSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".reformat", true, true, SettingsProperty.IndexScope); + public static final Setting INDEX_SEARCH_SLOWLOG_LEVEL = + new Setting<>(INDEX_SEARCH_SLOWLOG_PREFIX + ".level", SlowLogLevel.TRACE.name(), SlowLogLevel::parse, true, + SettingsProperty.IndexScope); public SearchSlowLog(IndexSettings indexSettings) { diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java index 25ff8f968348..1dd562c4bb14 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java @@ -93,7 +93,7 @@ public class NamedAnalyzer extends DelegatingAnalyzerWrapper { public String toString() { return "analyzer name[" + name + "], analyzer [" + analyzer + "]"; } - + /** It is an error if this is ever used, it means we screwed up! */ static final ReuseStrategy ERROR_STRATEGY = new Analyzer.ReuseStrategy() { @Override diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NumericDoubleAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/NumericDoubleAnalyzer.java index e90409421d26..77716e7a43da 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NumericDoubleAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NumericDoubleAnalyzer.java @@ -56,4 +56,4 @@ public class NumericDoubleAnalyzer extends NumericAnalyzer protected NumericFloatTokenizer createNumericTokenizer(char[] buffer) throws IOException { return new NumericFloatTokenizer(precisionStep, buffer); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NumericLongAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/NumericLongAnalyzer.java index ab1123963921..9b8659203418 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NumericLongAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NumericLongAnalyzer.java @@ -56,4 +56,4 @@ public class NumericLongAnalyzer extends NumericAnalyzer { protected NumericLongTokenizer createNumericTokenizer(char[] buffer) throws IOException { return new NumericLongTokenizer(precisionStep, buffer); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java index 7d5540b6224e..72435d90fd95 100644 --- a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +++ b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.cache.RemovalListener; import org.elasticsearch.common.cache.RemovalNotification; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; @@ -70,7 +71,8 @@ import java.util.concurrent.Executor; */ public final class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener>, Closeable { - public static final Setting INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING = Setting.boolSetting("index.load_fixed_bitset_filters_eagerly", true, false, Setting.Scope.INDEX); + public static final Setting INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING = + Setting.boolSetting("index.load_fixed_bitset_filters_eagerly", true, false, SettingsProperty.IndexScope); private final boolean loadRandomAccessFiltersEagerly; private final Cache> loadedFilters; diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java index 9740ccd03588..47110c62bfbd 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java +++ b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.similarities.Similarity; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -83,7 +84,7 @@ public final class EngineConfig { } return s; } - }, false, Setting.Scope.INDEX); + }, false, SettingsProperty.IndexScope); /** if set to true the engine will start even if the translog id in the commit point can not be found */ public static final String INDEX_FORCE_NEW_TRANSLOG = "index.engine.force_new_translog"; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java index 78bdcb0f7f30..6d12de8c3952 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java @@ -24,6 +24,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.plain.AbstractGeoPointDVIndexFieldData; @@ -66,7 +67,7 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo default: throw new IllegalArgumentException("failed to parse [" + s + "] must be one of [node,node]"); } - }, false, Setting.Scope.INDEX); + }, false, SettingsProperty.IndexScope); private static final IndexFieldData.Builder MISSING_DOC_VALUES_BUILDER = (indexProperties, fieldType, cache, breakerService, mapperService1) -> { throw new IllegalStateException("Can't load fielddata on [" + fieldType.name() diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index fd35398a9dc0..9fba8c0529ed 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -49,8 +50,10 @@ import java.util.Map; import java.util.stream.StreamSupport; public abstract class FieldMapper extends Mapper implements Cloneable { - public static final Setting IGNORE_MALFORMED_SETTING = Setting.boolSetting("index.mapping.ignore_malformed", false, false, Setting.Scope.INDEX); - public static final Setting COERCE_SETTING = Setting.boolSetting("index.mapping.coerce", false, false, Setting.Scope.INDEX); + public static final Setting IGNORE_MALFORMED_SETTING = + Setting.boolSetting("index.mapping.ignore_malformed", false, false, SettingsProperty.IndexScope); + public static final Setting COERCE_SETTING = + Setting.boolSetting("index.mapping.coerce", false, false, SettingsProperty.IndexScope); public abstract static class Builder extends Mapper.Builder { protected final MappedFieldType fieldType; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index b25f5f6a02df..c86219cc05ac 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -27,6 +27,7 @@ import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisService; @@ -81,9 +82,11 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } public static final String DEFAULT_MAPPING = "_default_"; - public static final Setting INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING = Setting.longSetting("index.mapping.nested_fields.limit", 50L, 0, true, Setting.Scope.INDEX); + public static final Setting INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING = + Setting.longSetting("index.mapping.nested_fields.limit", 50L, 0, true, SettingsProperty.IndexScope); public static final boolean INDEX_MAPPER_DYNAMIC_DEFAULT = true; - public static final Setting INDEX_MAPPER_DYNAMIC_SETTING = Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT, false, Setting.Scope.INDEX); + public static final Setting INDEX_MAPPER_DYNAMIC_SETTING = + Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT, false, SettingsProperty.IndexScope); private static ObjectHashSet META_FIELDS = ObjectHashSet.from( "_uid", "_id", "_type", "_all", "_parent", "_routing", "_index", "_size", "_timestamp", "_ttl" diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java index 90fb20ef827e..5f928043688d 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java @@ -33,6 +33,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -52,7 +53,9 @@ import java.util.List; * */ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { - private static final Setting COERCE_SETTING = Setting.boolSetting("index.mapping.coerce", true, false, Setting.Scope.INDEX); // this is private since it has a different default + // this is private since it has a different default + private static final Setting COERCE_SETTING = + Setting.boolSetting("index.mapping.coerce", true, false, SettingsProperty.IndexScope); public static class Defaults { diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java index 67ba0aaf1d2b..62d5da922591 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; @@ -61,7 +62,8 @@ import java.util.concurrent.TimeUnit; */ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent implements Closeable { - public final static Setting INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING = Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, false, Setting.Scope.INDEX); + public final static Setting INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING = + Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, false, SettingsProperty.IndexScope); private final ConcurrentMap percolateQueries = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency(); private final QueryShardContext queryShardContext; diff --git a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java index d5d6d5234bec..914979eac05f 100644 --- a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java +++ b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java @@ -34,6 +34,7 @@ import org.apache.lucene.util.Constants; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.IndexModule; @@ -60,7 +61,7 @@ public class FsDirectoryService extends DirectoryService implements StoreRateLim default: throw new IllegalArgumentException("unrecognized [index.store.fs.fs_lock] \"" + s + "\": must be native or simple"); } - }, false, Setting.Scope.INDEX); + }, false, SettingsProperty.IndexScope); private final CounterMetric rateLimitingTimeInNanos = new CounterMetric(); private final ShardPath path; diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStore.java b/core/src/main/java/org/elasticsearch/index/store/IndexStore.java index e98ad7cc6ebb..783ed980646b 100644 --- a/core/src/main/java/org/elasticsearch/index/store/IndexStore.java +++ b/core/src/main/java/org/elasticsearch/index/store/IndexStore.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.store; import org.apache.lucene.store.StoreRateLimiting; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; @@ -29,8 +30,10 @@ import org.elasticsearch.index.shard.ShardPath; * */ public class IndexStore extends AbstractIndexComponent { - public static final Setting INDEX_STORE_THROTTLE_TYPE_SETTING = new Setting<>("index.store.throttle.type", "none", IndexRateLimitingType::fromString, true, Setting.Scope.INDEX) ; - public static final Setting INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING = Setting.byteSizeSetting("index.store.throttle.max_bytes_per_sec", new ByteSizeValue(0), true, Setting.Scope.INDEX); + public static final Setting INDEX_STORE_THROTTLE_TYPE_SETTING = + new Setting<>("index.store.throttle.type", "none", IndexRateLimitingType::fromString, true, SettingsProperty.IndexScope); + public static final Setting INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING = + Setting.byteSizeSetting("index.store.throttle.max_bytes_per_sec", new ByteSizeValue(0), true, SettingsProperty.IndexScope); protected final IndexStoreConfig indexStoreConfig; private final StoreRateLimiting rateLimiting = new StoreRateLimiting(); diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java index ab7075afa5be..328d7604bcfd 100644 --- a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java +++ b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java @@ -22,6 +22,7 @@ import org.apache.lucene.store.StoreRateLimiting; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -36,11 +37,14 @@ public class IndexStoreConfig { /** * Configures the node / cluster level throttle type. See {@link StoreRateLimiting.Type}. */ - public static final Setting INDICES_STORE_THROTTLE_TYPE_SETTING = new Setting<>("indices.store.throttle.type", StoreRateLimiting.Type.NONE.name(),StoreRateLimiting.Type::fromString, true, Setting.Scope.CLUSTER); + public static final Setting INDICES_STORE_THROTTLE_TYPE_SETTING = + new Setting<>("indices.store.throttle.type", StoreRateLimiting.Type.NONE.name(),StoreRateLimiting.Type::fromString, true, + SettingsProperty.ClusterScope); /** * Configures the node / cluster level throttle intensity. The default is 10240 MB */ - public static final Setting INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING = Setting.byteSizeSetting("indices.store.throttle.max_bytes_per_sec", new ByteSizeValue(0), true, Setting.Scope.CLUSTER); + public static final Setting INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING = + Setting.byteSizeSetting("indices.store.throttle.max_bytes_per_sec", new ByteSizeValue(0), true, SettingsProperty.ClusterScope); private volatile StoreRateLimiting.Type rateLimitingType; private volatile ByteSizeValue rateLimitingThrottle; private final StoreRateLimiting rateLimiting = new StoreRateLimiting(); diff --git a/core/src/main/java/org/elasticsearch/index/store/Store.java b/core/src/main/java/org/elasticsearch/index/store/Store.java index c7377a4ab6be..b1d806d520e6 100644 --- a/core/src/main/java/org/elasticsearch/index/store/Store.java +++ b/core/src/main/java/org/elasticsearch/index/store/Store.java @@ -62,6 +62,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.Callback; @@ -129,7 +130,8 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref static final int VERSION_START = 0; static final int VERSION = VERSION_WRITE_THROWABLE; static final String CORRUPTED = "corrupted_"; - public static final Setting INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING = Setting.timeSetting("index.store.stats_refresh_interval", TimeValue.timeValueSeconds(10), false, Setting.Scope.INDEX); + public static final Setting INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING = + Setting.timeSetting("index.store.stats_refresh_interval", TimeValue.timeValueSeconds(10), false, SettingsProperty.IndexScope); private final AtomicBoolean isClosed = new AtomicBoolean(false); private final StoreDirectory directory; diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java b/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java index 926ff4822484..15eb19fc416a 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java @@ -30,10 +30,9 @@ import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.ShardCoreKeyMap; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Scope; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.cache.query.QueryCacheStats; @@ -50,9 +49,9 @@ import java.util.concurrent.ConcurrentHashMap; public class IndicesQueryCache extends AbstractComponent implements QueryCache, Closeable { public static final Setting INDICES_CACHE_QUERY_SIZE_SETTING = Setting.byteSizeSetting( - "indices.queries.cache.size", "10%", false, Scope.CLUSTER); + "indices.queries.cache.size", "10%", false, SettingsProperty.ClusterScope); public static final Setting INDICES_CACHE_QUERY_COUNT_SETTING = Setting.intSetting( - "indices.queries.cache.count", 10000, 1, false, Scope.CLUSTER); + "indices.queries.cache.count", 10000, 1, false, SettingsProperty.ClusterScope); private final LRUQueryCache cache; private final ShardCoreKeyMap shardKeyMap = new ShardCoreKeyMap(); diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java b/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java index 575153c8ada6..8ebe52a2c195 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.cache.RemovalNotification; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -68,12 +69,12 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo * A setting to enable or disable request caching on an index level. Its dynamic by default * since we are checking on the cluster state IndexMetaData always. */ - public static final Setting INDEX_CACHE_REQUEST_ENABLED_SETTING = Setting.boolSetting("index.requests.cache.enable", - false, true, Setting.Scope.INDEX); - public static final Setting INDICES_CACHE_QUERY_SIZE = Setting.byteSizeSetting("indices.requests.cache.size", "1%", - false, Setting.Scope.CLUSTER); - public static final Setting INDICES_CACHE_QUERY_EXPIRE = Setting.positiveTimeSetting("indices.requests.cache.expire", - new TimeValue(0), false, Setting.Scope.CLUSTER); + public static final Setting INDEX_CACHE_REQUEST_ENABLED_SETTING = + Setting.boolSetting("index.requests.cache.enable", false, true, SettingsProperty.IndexScope); + public static final Setting INDICES_CACHE_QUERY_SIZE = + Setting.byteSizeSetting("indices.requests.cache.size", "1%", false, SettingsProperty.ClusterScope); + public static final Setting INDICES_CACHE_QUERY_EXPIRE = + Setting.positiveTimeSetting("indices.requests.cache.expire", new TimeValue(0), false, SettingsProperty.ClusterScope); private final ConcurrentMap registeredClosedListeners = ConcurrentCollections.newConcurrentMap(); private final Set keysToClean = ConcurrentCollections.newConcurrentSet(); diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index c7d1be4bf710..d64bb3f0c4ce 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -47,6 +47,7 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -115,7 +116,8 @@ import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; public class IndicesService extends AbstractLifecycleComponent implements Iterable, IndexService.ShardStoreDeleter { public static final String INDICES_SHARDS_CLOSED_TIMEOUT = "indices.shards_closed_timeout"; - public static final Setting INDICES_CACHE_CLEAN_INTERVAL_SETTING = Setting.positiveTimeSetting("indices.cache.cleanup_interval", TimeValue.timeValueMinutes(1), false, Setting.Scope.CLUSTER); + public static final Setting INDICES_CACHE_CLEAN_INTERVAL_SETTING = + Setting.positiveTimeSetting("indices.cache.cleanup_interval", TimeValue.timeValueMinutes(1), false, SettingsProperty.ClusterScope); private final PluginsService pluginsService; private final NodeEnvironment nodeEnv; private final TimeValue shardsClosedTimeout; diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index f99b39ef6201..3b7b97828490 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -24,6 +24,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -71,9 +72,12 @@ import java.util.function.Function; */ public class HunspellService extends AbstractComponent { - public final static Setting HUNSPELL_LAZY_LOAD = Setting.boolSetting("indices.analysis.hunspell.dictionary.lazy", Boolean.FALSE, false, Setting.Scope.CLUSTER); - public final static Setting HUNSPELL_IGNORE_CASE = Setting.boolSetting("indices.analysis.hunspell.dictionary.ignore_case", Boolean.FALSE, false, Setting.Scope.CLUSTER); - public final static Setting HUNSPELL_DICTIONARY_OPTIONS = Setting.groupSetting("indices.analysis.hunspell.dictionary.", false, Setting.Scope.CLUSTER); + public final static Setting HUNSPELL_LAZY_LOAD = + Setting.boolSetting("indices.analysis.hunspell.dictionary.lazy", Boolean.FALSE, false, SettingsProperty.ClusterScope); + public final static Setting HUNSPELL_IGNORE_CASE = + Setting.boolSetting("indices.analysis.hunspell.dictionary.ignore_case", Boolean.FALSE, false, SettingsProperty.ClusterScope); + public final static Setting HUNSPELL_DICTIONARY_OPTIONS = + Setting.groupSetting("indices.analysis.hunspell.dictionary.", false, SettingsProperty.ClusterScope); private final ConcurrentHashMap dictionaries = new ConcurrentHashMap<>(); private final Map knownDictionaries; private final boolean defaultIgnoreCase; diff --git a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index 0e1532bc6b3d..fdee0b033434 100644 --- a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -46,15 +47,22 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { private final ConcurrentMap breakers = new ConcurrentHashMap(); - public static final Setting TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.total.limit", "70%", true, Setting.Scope.CLUSTER); + public static final Setting TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING = + Setting.byteSizeSetting("indices.breaker.total.limit", "70%", true, SettingsProperty.ClusterScope); - public static final Setting FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.fielddata.limit", "60%", true, Setting.Scope.CLUSTER); - public static final Setting FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.doubleSetting("indices.breaker.fielddata.overhead", 1.03d, 0.0d, true, Setting.Scope.CLUSTER); - public static final Setting FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING = new Setting<>("indices.breaker.fielddata.type", "memory", CircuitBreaker.Type::parseValue, false, Setting.Scope.CLUSTER); + public static final Setting FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING = + Setting.byteSizeSetting("indices.breaker.fielddata.limit", "60%", true, SettingsProperty.ClusterScope); + public static final Setting FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING = + Setting.doubleSetting("indices.breaker.fielddata.overhead", 1.03d, 0.0d, true, SettingsProperty.ClusterScope); + public static final Setting FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING = + new Setting<>("indices.breaker.fielddata.type", "memory", CircuitBreaker.Type::parseValue, false, SettingsProperty.ClusterScope); - public static final Setting REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.byteSizeSetting("indices.breaker.request.limit", "40%", true, Setting.Scope.CLUSTER); - public static final Setting REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.doubleSetting("indices.breaker.request.overhead", 1.0d, 0.0d, true, Setting.Scope.CLUSTER); - public static final Setting REQUEST_CIRCUIT_BREAKER_TYPE_SETTING = new Setting<>("indices.breaker.request.type", "memory", CircuitBreaker.Type::parseValue, false, Setting.Scope.CLUSTER); + public static final Setting REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING = + Setting.byteSizeSetting("indices.breaker.request.limit", "40%", true, SettingsProperty.ClusterScope); + public static final Setting REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING = + Setting.doubleSetting("indices.breaker.request.overhead", 1.0d, 0.0d, true, SettingsProperty.ClusterScope); + public static final Setting REQUEST_CIRCUIT_BREAKER_TYPE_SETTING = + new Setting<>("indices.breaker.request.type", "memory", CircuitBreaker.Type::parseValue, false, SettingsProperty.ClusterScope); diff --git a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java index 0a3f063dfcc6..dd60c6282239 100644 --- a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java +++ b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.Index; @@ -52,7 +53,8 @@ import java.util.function.ToLongBiFunction; */ public class IndicesFieldDataCache extends AbstractComponent implements RemovalListener, Releasable{ - public static final Setting INDICES_FIELDDATA_CACHE_SIZE_KEY = Setting.byteSizeSetting("indices.fielddata.cache.size", new ByteSizeValue(-1), false, Setting.Scope.CLUSTER); + public static final Setting INDICES_FIELDDATA_CACHE_SIZE_KEY = + Setting.byteSizeSetting("indices.fielddata.cache.size", new ByteSizeValue(-1), false, SettingsProperty.ClusterScope); private final IndexFieldDataCache.Listener indicesFieldDataCacheListener; private final Cache cache; diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java index 8d610dce05bb..742b1b789457 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -32,31 +33,45 @@ import org.elasticsearch.common.unit.TimeValue; public class RecoverySettings extends AbstractComponent { - public static final Setting INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING = Setting.byteSizeSetting("indices.recovery.max_bytes_per_sec", new ByteSizeValue(40, ByteSizeUnit.MB), true, Setting.Scope.CLUSTER); + public static final Setting INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING = + Setting.byteSizeSetting("indices.recovery.max_bytes_per_sec", new ByteSizeValue(40, ByteSizeUnit.MB), true, + SettingsProperty.ClusterScope); /** * how long to wait before retrying after issues cause by cluster state syncing between nodes * i.e., local node is not yet known on remote node, remote shard not yet started etc. */ - public static final Setting INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING = Setting.positiveTimeSetting("indices.recovery.retry_delay_state_sync", TimeValue.timeValueMillis(500), true, Setting.Scope.CLUSTER); + public static final Setting INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING = + Setting.positiveTimeSetting("indices.recovery.retry_delay_state_sync", TimeValue.timeValueMillis(500), true, + SettingsProperty.ClusterScope); /** how long to wait before retrying after network related issues */ - public static final Setting INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING = Setting.positiveTimeSetting("indices.recovery.retry_delay_network", TimeValue.timeValueSeconds(5), true, Setting.Scope.CLUSTER); + public static final Setting INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING = + Setting.positiveTimeSetting("indices.recovery.retry_delay_network", TimeValue.timeValueSeconds(5), true, + SettingsProperty.ClusterScope); /** timeout value to use for requests made as part of the recovery process */ - public static final Setting INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING = Setting.positiveTimeSetting("indices.recovery.internal_action_timeout", TimeValue.timeValueMinutes(15), true, Setting.Scope.CLUSTER); + public static final Setting INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING = + Setting.positiveTimeSetting("indices.recovery.internal_action_timeout", TimeValue.timeValueMinutes(15), true, + SettingsProperty.ClusterScope); /** * timeout value to use for requests made as part of the recovery process that are expected to take long time. * defaults to twice `indices.recovery.internal_action_timeout`. */ - public static final Setting INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING = Setting.timeSetting("indices.recovery.internal_action_long_timeout", (s) -> TimeValue.timeValueMillis(INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.get(s).millis() * 2).toString(), TimeValue.timeValueSeconds(0), true, Setting.Scope.CLUSTER); + public static final Setting INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING = + Setting.timeSetting("indices.recovery.internal_action_long_timeout", + (s) -> TimeValue.timeValueMillis(INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.get(s).millis() * 2).toString(), + TimeValue.timeValueSeconds(0), true, SettingsProperty.ClusterScope); /** * recoveries that don't show any activity for more then this interval will be failed. * defaults to `indices.recovery.internal_action_long_timeout` */ - public static final Setting INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING = Setting.timeSetting("indices.recovery.recovery_activity_timeout", (s) -> INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.getRaw(s) , TimeValue.timeValueSeconds(0), true, Setting.Scope.CLUSTER); + public static final Setting INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING = + Setting.timeSetting("indices.recovery.recovery_activity_timeout", + (s) -> INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.getRaw(s) , TimeValue.timeValueSeconds(0), true, + SettingsProperty.ClusterScope); public static final ByteSizeValue DEFAULT_CHUNK_SIZE = new ByteSizeValue(512, ByteSizeUnit.KB); diff --git a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index d0aec817ee9a..23e007c5366e 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; @@ -69,7 +70,9 @@ import java.util.concurrent.atomic.AtomicInteger; public class IndicesStore extends AbstractComponent implements ClusterStateListener, Closeable { // TODO this class can be foled into either IndicesService and partially into IndicesClusterStateService there is no need for a separate public service - public static final Setting INDICES_STORE_DELETE_SHARD_TIMEOUT = Setting.positiveTimeSetting("indices.store.delete.shard.timeout", new TimeValue(30, TimeUnit.SECONDS), false, Setting.Scope.CLUSTER); + public static final Setting INDICES_STORE_DELETE_SHARD_TIMEOUT = + Setting.positiveTimeSetting("indices.store.delete.shard.timeout", new TimeValue(30, TimeUnit.SECONDS), false, + SettingsProperty.ClusterScope); public static final String ACTION_SHARD_EXISTS = "internal:index/shard/exists"; private static final EnumSet ACTIVE_STATES = EnumSet.of(IndexShardState.STARTED, IndexShardState.RELOCATED); private final IndicesService indicesService; diff --git a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java index e4537b876fa2..6eb34adc9f82 100644 --- a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java +++ b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -67,7 +68,8 @@ import java.util.concurrent.locks.ReentrantLock; */ public class IndicesTTLService extends AbstractLifecycleComponent { - public static final Setting INDICES_TTL_INTERVAL_SETTING = Setting.positiveTimeSetting("indices.ttl.interval", TimeValue.timeValueSeconds(60), true, Setting.Scope.CLUSTER); + public static final Setting INDICES_TTL_INTERVAL_SETTING = + Setting.positiveTimeSetting("indices.ttl.interval", TimeValue.timeValueSeconds(60), true, SettingsProperty.ClusterScope); private final ClusterService clusterService; private final IndicesService indicesService; diff --git a/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java b/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java index 99a78f13a075..be985cb70209 100644 --- a/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java +++ b/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java @@ -21,6 +21,7 @@ package org.elasticsearch.monitor.fs; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.SingleObjectCache; @@ -37,7 +38,8 @@ public class FsService extends AbstractComponent { private final SingleObjectCache fsStatsCache; public final static Setting REFRESH_INTERVAL_SETTING = - Setting.timeSetting("monitor.fs.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, Setting.Scope.CLUSTER); + Setting.timeSetting("monitor.fs.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, + SettingsProperty.ClusterScope); public FsService(Settings settings, NodeEnvironment nodeEnvironment) throws IOException { super(settings); diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java index 97c813a0fe32..03c6c00d5395 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java @@ -21,7 +21,7 @@ package org.elasticsearch.monitor.jvm; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Scope; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.FutureUtils; @@ -47,12 +47,14 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent ENABLED_SETTING = Setting.boolSetting("monitor.jvm.gc.enabled", true, false, Scope.CLUSTER); + public final static Setting ENABLED_SETTING = + Setting.boolSetting("monitor.jvm.gc.enabled", true, false, SettingsProperty.ClusterScope); public final static Setting REFRESH_INTERVAL_SETTING = - Setting.timeSetting("monitor.jvm.gc.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, Scope.CLUSTER); + Setting.timeSetting("monitor.jvm.gc.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, + SettingsProperty.ClusterScope); private static String GC_COLLECTOR_PREFIX = "monitor.jvm.gc.collector."; - public final static Setting GC_SETTING = Setting.groupSetting(GC_COLLECTOR_PREFIX, false, Scope.CLUSTER); + public final static Setting GC_SETTING = Setting.groupSetting(GC_COLLECTOR_PREFIX, false, SettingsProperty.ClusterScope); static class GcThreshold { public final String name; diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java index fbec6cda1689..e816e51911e9 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java @@ -21,6 +21,7 @@ package org.elasticsearch.monitor.jvm; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -36,7 +37,8 @@ public class JvmService extends AbstractComponent { private JvmStats jvmStats; public final static Setting REFRESH_INTERVAL_SETTING = - Setting.timeSetting("monitor.jvm.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, Setting.Scope.CLUSTER); + Setting.timeSetting("monitor.jvm.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, + SettingsProperty.ClusterScope); public JvmService(Settings settings) { super(settings); diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsService.java b/core/src/main/java/org/elasticsearch/monitor/os/OsService.java index 5f836c6f9284..1cd0910ab3e9 100644 --- a/core/src/main/java/org/elasticsearch/monitor/os/OsService.java +++ b/core/src/main/java/org/elasticsearch/monitor/os/OsService.java @@ -21,6 +21,7 @@ package org.elasticsearch.monitor.os; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.SingleObjectCache; @@ -38,7 +39,8 @@ public class OsService extends AbstractComponent { private SingleObjectCache osStatsCache; public final static Setting REFRESH_INTERVAL_SETTING = - Setting.timeSetting("monitor.os.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, Setting.Scope.CLUSTER); + Setting.timeSetting("monitor.os.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, + SettingsProperty.ClusterScope); public OsService(Settings settings) { super(settings); diff --git a/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java b/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java index 9e3283af4fc9..316c8a8131f4 100644 --- a/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java +++ b/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java @@ -21,6 +21,7 @@ package org.elasticsearch.monitor.process; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.SingleObjectCache; @@ -35,7 +36,8 @@ public final class ProcessService extends AbstractComponent { private final SingleObjectCache processStatsCache; public final static Setting REFRESH_INTERVAL_SETTING = - Setting.timeSetting("monitor.process.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, Setting.Scope.CLUSTER); + Setting.timeSetting("monitor.process.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, + SettingsProperty.ClusterScope); public ProcessService(Settings settings) { super(settings); diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index ee523e975a1a..4b0c806749f2 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -49,6 +49,7 @@ import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; @@ -124,17 +125,23 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; */ public class Node implements Closeable { - public static final Setting WRITE_PORTS_FIELD_SETTING = Setting.boolSetting("node.portsfile", false, false, Setting.Scope.CLUSTER); - public static final Setting NODE_CLIENT_SETTING = Setting.boolSetting("node.client", false, false, Setting.Scope.CLUSTER); - public static final Setting NODE_DATA_SETTING = Setting.boolSetting("node.data", true, false, Setting.Scope.CLUSTER); - public static final Setting NODE_MASTER_SETTING = Setting.boolSetting("node.master", true, false, Setting.Scope.CLUSTER); - public static final Setting NODE_LOCAL_SETTING = Setting.boolSetting("node.local", false, false, Setting.Scope.CLUSTER); - public static final Setting NODE_MODE_SETTING = new Setting<>("node.mode", "network", Function.identity(), false, Setting.Scope.CLUSTER); - public static final Setting NODE_INGEST_SETTING = Setting.boolSetting("node.ingest", true, false, Setting.Scope.CLUSTER); - public static final Setting NODE_NAME_SETTING = Setting.simpleString("node.name", false, Setting.Scope.CLUSTER); + public static final Setting WRITE_PORTS_FIELD_SETTING = + Setting.boolSetting("node.portsfile", false, false, SettingsProperty.ClusterScope); + public static final Setting NODE_CLIENT_SETTING = + Setting.boolSetting("node.client", false, false, SettingsProperty.ClusterScope); + public static final Setting NODE_DATA_SETTING = Setting.boolSetting("node.data", true, false, SettingsProperty.ClusterScope); + public static final Setting NODE_MASTER_SETTING = + Setting.boolSetting("node.master", true, false, SettingsProperty.ClusterScope); + public static final Setting NODE_LOCAL_SETTING = + Setting.boolSetting("node.local", false, false, SettingsProperty.ClusterScope); + public static final Setting NODE_MODE_SETTING = + new Setting<>("node.mode", "network", Function.identity(), false, SettingsProperty.ClusterScope); + public static final Setting NODE_INGEST_SETTING = + Setting.boolSetting("node.ingest", true, false, SettingsProperty.ClusterScope); + public static final Setting NODE_NAME_SETTING = Setting.simpleString("node.name", false, SettingsProperty.ClusterScope); // this sucks that folks can mistype client etc and get away with it. // TODO: we should move this to node.attribute.${name} = ${value} instead. - public static final Setting NODE_ATTRIBUTES = Setting.groupSetting("node.", false, Setting.Scope.CLUSTER); + public static final Setting NODE_ATTRIBUTES = Setting.groupSetting("node.", false, SettingsProperty.ClusterScope); private static final String CLIENT_TYPE = "node"; diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java index faf449586c11..45fd66d6daa2 100644 --- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.cli.Terminal; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.env.Environment; @@ -57,7 +58,8 @@ public class InternalSettingsPreparer { public static final String SECRET_PROMPT_VALUE = "${prompt.secret}"; public static final String TEXT_PROMPT_VALUE = "${prompt.text}"; - public static final Setting IGNORE_SYSTEM_PROPERTIES_SETTING = Setting.boolSetting("config.ignore_system_properties", false, false, Setting.Scope.CLUSTER); + public static final Setting IGNORE_SYSTEM_PROPERTIES_SETTING = + Setting.boolSetting("config.ignore_system_properties", false, false, SettingsProperty.ClusterScope); /** * Prepares the settings by gathering all elasticsearch system properties and setting defaults. diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java index 3e36c5d8f096..1fb5875109e7 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; @@ -71,7 +72,8 @@ public class PluginsService extends AbstractComponent { */ private final List> plugins; private final PluginsAndModules info; - public static final Setting> MANDATORY_SETTING = Setting.listSetting("plugin.mandatory", Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER); + public static final Setting> MANDATORY_SETTING = + Setting.listSetting("plugin.mandatory", Collections.emptyList(), Function.identity(), false, SettingsProperty.ClusterScope); private final Map> onModuleReferences; diff --git a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java index 0aa62225479c..8b3641057189 100644 --- a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.fs.FsBlobStore; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.index.snapshots.IndexShardRepository; @@ -51,12 +52,17 @@ public class FsRepository extends BlobStoreRepository { public final static String TYPE = "fs"; - public static final Setting LOCATION_SETTING = new Setting<>("location", "", Function.identity(), false, Setting.Scope.CLUSTER); - public static final Setting REPOSITORIES_LOCATION_SETTING = new Setting<>("repositories.fs.location", LOCATION_SETTING, Function.identity(), false, Setting.Scope.CLUSTER); - public static final Setting CHUNK_SIZE_SETTING = Setting.byteSizeSetting("chunk_size", "-1", false, Setting.Scope.CLUSTER); - public static final Setting REPOSITORIES_CHUNK_SIZE_SETTING = Setting.byteSizeSetting("repositories.fs.chunk_size", "-1", false, Setting.Scope.CLUSTER); - public static final Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, false, Setting.Scope.CLUSTER); - public static final Setting REPOSITORIES_COMPRESS_SETTING = Setting.boolSetting("repositories.fs.compress", false, false, Setting.Scope.CLUSTER); + public static final Setting LOCATION_SETTING = + new Setting<>("location", "", Function.identity(), false, SettingsProperty.ClusterScope); + public static final Setting REPOSITORIES_LOCATION_SETTING = + new Setting<>("repositories.fs.location", LOCATION_SETTING, Function.identity(), false, SettingsProperty.ClusterScope); + public static final Setting CHUNK_SIZE_SETTING = + Setting.byteSizeSetting("chunk_size", "-1", false, SettingsProperty.ClusterScope); + public static final Setting REPOSITORIES_CHUNK_SIZE_SETTING = + Setting.byteSizeSetting("repositories.fs.chunk_size", "-1", false, SettingsProperty.ClusterScope); + public static final Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, false, SettingsProperty.ClusterScope); + public static final Setting REPOSITORIES_COMPRESS_SETTING = + Setting.boolSetting("repositories.fs.compress", false, false, SettingsProperty.ClusterScope); private final FsBlobStore blobStore; diff --git a/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java b/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java index 2d15db245aa3..c5255fd8b5e9 100644 --- a/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.url.URLBlobStore; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.util.URIPattern; import org.elasticsearch.env.Environment; import org.elasticsearch.index.snapshots.IndexShardRepository; @@ -55,19 +56,24 @@ public class URLRepository extends BlobStoreRepository { public final static String TYPE = "url"; - public static final Setting> SUPPORTED_PROTOCOLS_SETTING = Setting.listSetting("repositories.url.supported_protocols", - Arrays.asList("http", "https", "ftp", "file", "jar"), Function.identity(), false, Setting.Scope.CLUSTER); + public static final Setting> SUPPORTED_PROTOCOLS_SETTING = + Setting.listSetting("repositories.url.supported_protocols", Arrays.asList("http", "https", "ftp", "file", "jar"), + Function.identity(), false, SettingsProperty.ClusterScope); - public static final Setting> ALLOWED_URLS_SETTING = Setting.listSetting("repositories.url.allowed_urls", - Collections.emptyList(), URIPattern::new, false, Setting.Scope.CLUSTER); + public static final Setting> ALLOWED_URLS_SETTING = + Setting.listSetting("repositories.url.allowed_urls", Collections.emptyList(), URIPattern::new, false, + SettingsProperty.ClusterScope); - public static final Setting URL_SETTING = new Setting<>("url", "http:", URLRepository::parseURL, false, Setting.Scope.CLUSTER); - public static final Setting REPOSITORIES_URL_SETTING = new Setting<>("repositories.url.url", (s) -> s.get("repositories.uri.url", "http:"), - URLRepository::parseURL, false, Setting.Scope.CLUSTER); + public static final Setting URL_SETTING = + new Setting<>("url", "http:", URLRepository::parseURL, false, SettingsProperty.ClusterScope); + public static final Setting REPOSITORIES_URL_SETTING = + new Setting<>("repositories.url.url", (s) -> s.get("repositories.uri.url", "http:"), URLRepository::parseURL, false, + SettingsProperty.ClusterScope); - public static final Setting LIST_DIRECTORIES_SETTING = Setting.boolSetting("list_directories", true, false, Setting.Scope.CLUSTER); - public static final Setting REPOSITORIES_LIST_DIRECTORIES_SETTING = Setting.boolSetting("repositories.uri.list_directories", true, - false, Setting.Scope.CLUSTER); + public static final Setting LIST_DIRECTORIES_SETTING = + Setting.boolSetting("list_directories", true, false, SettingsProperty.ClusterScope); + public static final Setting REPOSITORIES_LIST_DIRECTORIES_SETTING = + Setting.boolSetting("repositories.uri.list_directories", true, false, SettingsProperty.ClusterScope); private final List supportedProtocols; diff --git a/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index 60b3ccce930d..1ea87c6c61e8 100644 --- a/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -23,6 +23,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; /** @@ -34,7 +35,8 @@ import org.elasticsearch.common.settings.Settings; * {@link org.elasticsearch.rest.RestController#registerRelevantHeaders(String...)} */ public abstract class BaseRestHandler extends AbstractComponent implements RestHandler { - public static final Setting MULTI_ALLOW_EXPLICIT_INDEX = Setting.boolSetting("rest.action.multi.allow_explicit_index", true, false, Setting.Scope.CLUSTER); + public static final Setting MULTI_ALLOW_EXPLICIT_INDEX = + Setting.boolSetting("rest.action.multi.allow_explicit_index", true, false, SettingsProperty.ClusterScope); private final Client client; protected final ParseFieldMatcher parseFieldMatcher; diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index d21283d9cfaf..058deadcf4c7 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -46,6 +46,7 @@ import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -84,10 +85,13 @@ public class ScriptService extends AbstractComponent implements Closeable { static final String DISABLE_DYNAMIC_SCRIPTING_SETTING = "script.disable_dynamic"; - public static final Setting SCRIPT_CACHE_SIZE_SETTING = Setting.intSetting("script.cache.max_size", 100, 0, false, Setting.Scope.CLUSTER); - public static final Setting SCRIPT_CACHE_EXPIRE_SETTING = Setting.positiveTimeSetting("script.cache.expire", TimeValue.timeValueMillis(0), false, Setting.Scope.CLUSTER); + public static final Setting SCRIPT_CACHE_SIZE_SETTING = + Setting.intSetting("script.cache.max_size", 100, 0, false, SettingsProperty.ClusterScope); + public static final Setting SCRIPT_CACHE_EXPIRE_SETTING = + Setting.positiveTimeSetting("script.cache.expire", TimeValue.timeValueMillis(0), false, SettingsProperty.ClusterScope); public static final String SCRIPT_INDEX = ".scripts"; - public static final Setting SCRIPT_AUTO_RELOAD_ENABLED_SETTING = Setting.boolSetting("script.auto_reload_enabled", true, false, Setting.Scope.CLUSTER); + public static final Setting SCRIPT_AUTO_RELOAD_ENABLED_SETTING = + Setting.boolSetting("script.auto_reload_enabled", true, false, SettingsProperty.ClusterScope); private final String defaultLang; diff --git a/core/src/main/java/org/elasticsearch/script/ScriptSettings.java b/core/src/main/java/org/elasticsearch/script/ScriptSettings.java index 8ececfe25bb1..a2ab5f9c269b 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptSettings.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptSettings.java @@ -21,6 +21,7 @@ package org.elasticsearch.script; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import java.util.ArrayList; @@ -45,7 +46,7 @@ public class ScriptSettings { scriptType.getDefaultScriptMode().getMode(), ScriptMode::parse, false, - Setting.Scope.CLUSTER)); + SettingsProperty.ClusterScope)); } SCRIPT_TYPE_SETTING_MAP = Collections.unmodifiableMap(scriptTypeSettingMap); } @@ -66,7 +67,7 @@ public class ScriptSettings { throw new IllegalArgumentException("unregistered default language [" + setting + "]"); } return setting; - }, false, Setting.Scope.CLUSTER); + }, false, SettingsProperty.ClusterScope); } private static Map> contextSettings(ScriptContextRegistry scriptContextRegistry) { @@ -77,7 +78,7 @@ public class ScriptSettings { ScriptMode.OFF.getMode(), ScriptMode::parse, false, - Setting.Scope.CLUSTER + SettingsProperty.ClusterScope )); } return scriptContextSettingMap; @@ -138,7 +139,7 @@ public class ScriptSettings { defaultSetting, ScriptMode::parse, false, - Setting.Scope.CLUSTER); + SettingsProperty.ClusterScope); scriptModeSettings.add(setting); } } diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 3f62066cd4c2..a4106feb2317 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; @@ -106,11 +107,14 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes; public class SearchService extends AbstractLifecycleComponent implements IndexEventListener { // we can have 5 minutes here, since we make sure to clean with search requests and when shard/index closes - public static final Setting DEFAULT_KEEPALIVE_SETTING = Setting.positiveTimeSetting("search.default_keep_alive", timeValueMinutes(5), false, Setting.Scope.CLUSTER); - public static final Setting KEEPALIVE_INTERVAL_SETTING = Setting.positiveTimeSetting("search.keep_alive_interval", timeValueMinutes(1), false, Setting.Scope.CLUSTER); + public static final Setting DEFAULT_KEEPALIVE_SETTING = + Setting.positiveTimeSetting("search.default_keep_alive", timeValueMinutes(5), false, SettingsProperty.ClusterScope); + public static final Setting KEEPALIVE_INTERVAL_SETTING = + Setting.positiveTimeSetting("search.keep_alive_interval", timeValueMinutes(1), false, SettingsProperty.ClusterScope); public static final TimeValue NO_TIMEOUT = timeValueMillis(-1); - public static final Setting DEFAULT_SEARCH_TIMEOUT_SETTING = Setting.timeSetting("search.default_search_timeout", NO_TIMEOUT, true, Setting.Scope.CLUSTER); + public static final Setting DEFAULT_SEARCH_TIMEOUT_SETTING = + Setting.timeSetting("search.default_search_timeout", NO_TIMEOUT, true, SettingsProperty.ClusterScope); private final ThreadPool threadPool; diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index c7f4392e56a3..14c52ab627a6 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.SizeValue; @@ -188,7 +189,8 @@ public class ThreadPool extends AbstractComponent implements Closeable { } } - public static final Setting THREADPOOL_GROUP_SETTING = Setting.groupSetting("threadpool.", true, Setting.Scope.CLUSTER); + public static final Setting THREADPOOL_GROUP_SETTING = + Setting.groupSetting("threadpool.", true, SettingsProperty.ClusterScope); private volatile Map executors; diff --git a/core/src/main/java/org/elasticsearch/transport/Transport.java b/core/src/main/java/org/elasticsearch/transport/Transport.java index c930773f39c2..c72bcb2bb549 100644 --- a/core/src/main/java/org/elasticsearch/transport/Transport.java +++ b/core/src/main/java/org/elasticsearch/transport/Transport.java @@ -22,6 +22,7 @@ package org.elasticsearch.transport; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; @@ -35,7 +36,7 @@ import java.util.Map; public interface Transport extends LifecycleComponent { - Setting TRANSPORT_TCP_COMPRESS = Setting.boolSetting("transport.tcp.compress", false, false, Setting.Scope.CLUSTER); + Setting TRANSPORT_TCP_COMPRESS = Setting.boolSetting("transport.tcp.compress", false, false, SettingsProperty.ClusterScope); void transportServiceAdapter(TransportServiceAdapter service); diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index d04966bc2ca3..2a03c5162558 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -33,7 +33,7 @@ import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Scope; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; @@ -98,10 +98,11 @@ public class TransportService extends AbstractLifecycleComponent> TRACE_LOG_INCLUDE_SETTING = listSetting("transport.tracer.include", emptyList(), - Function.identity(), true, Scope.CLUSTER); - public static final Setting> TRACE_LOG_EXCLUDE_SETTING = listSetting("transport.tracer.exclude", - Arrays.asList("internal:discovery/zen/fd*", TransportLivenessAction.NAME), Function.identity(), true, Scope.CLUSTER); + public static final Setting> TRACE_LOG_INCLUDE_SETTING = + listSetting("transport.tracer.include", emptyList(), Function.identity(), true, SettingsProperty.ClusterScope); + public static final Setting> TRACE_LOG_EXCLUDE_SETTING = + listSetting("transport.tracer.exclude", Arrays.asList("internal:discovery/zen/fd*", TransportLivenessAction.NAME), + Function.identity(), true, SettingsProperty.ClusterScope); private final ESLogger tracerLog; diff --git a/core/src/main/java/org/elasticsearch/transport/TransportSettings.java b/core/src/main/java/org/elasticsearch/transport/TransportSettings.java index e02dcc412edd..e5fb9f7e14dd 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportSettings.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportSettings.java @@ -19,7 +19,7 @@ package org.elasticsearch.transport; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Scope; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import java.util.List; @@ -35,13 +35,19 @@ import static org.elasticsearch.common.settings.Setting.listSetting; */ final public class TransportSettings { - public static final Setting> HOST = listSetting("transport.host", emptyList(), s -> s, false, Scope.CLUSTER); - public static final Setting> PUBLISH_HOST = listSetting("transport.publish_host", HOST, s -> s, false, Scope.CLUSTER); - public static final Setting> BIND_HOST = listSetting("transport.bind_host", HOST, s -> s, false, Scope.CLUSTER); - public static final Setting PORT = new Setting<>("transport.tcp.port", "9300-9400", s -> s, false, Scope.CLUSTER); - public static final Setting PUBLISH_PORT = intSetting("transport.publish_port", -1, -1, false, Scope.CLUSTER); + public static final Setting> HOST = + listSetting("transport.host", emptyList(), s -> s, false, SettingsProperty.ClusterScope); + public static final Setting> PUBLISH_HOST = + listSetting("transport.publish_host", HOST, s -> s, false, SettingsProperty.ClusterScope); + public static final Setting> BIND_HOST = + listSetting("transport.bind_host", HOST, s -> s, false, SettingsProperty.ClusterScope); + public static final Setting PORT = + new Setting<>("transport.tcp.port", "9300-9400", s -> s, false, SettingsProperty.ClusterScope); + public static final Setting PUBLISH_PORT = + intSetting("transport.publish_port", -1, -1, false, SettingsProperty.ClusterScope); public static final String DEFAULT_PROFILE = "default"; - public static final Setting TRANSPORT_PROFILES_SETTING = groupSetting("transport.profiles.", true, Scope.CLUSTER); + public static final Setting TRANSPORT_PROFILES_SETTING = + groupSetting("transport.profiles.", true, SettingsProperty.ClusterScope); private TransportSettings() { diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java index 685fdeda6837..dd250fabd1d3 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java @@ -42,7 +42,7 @@ import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.network.NetworkService.TcpSettings; import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Scope; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.InetSocketTransportAddress; @@ -147,40 +147,46 @@ public class NettyTransport extends AbstractLifecycleComponent implem public static final String TRANSPORT_CLIENT_WORKER_THREAD_NAME_PREFIX = "transport_client_worker"; public static final String TRANSPORT_CLIENT_BOSS_THREAD_NAME_PREFIX = "transport_client_boss"; - public static final Setting WORKER_COUNT = new Setting<>("transport.netty.worker_count", + public static final Setting WORKER_COUNT = + new Setting<>("transport.netty.worker_count", (s) -> Integer.toString(EsExecutors.boundedNumberOfProcessors(s) * 2), - (s) -> Setting.parseInt(s, 1, "transport.netty.worker_count"), false, Setting.Scope.CLUSTER); - public static final Setting CONNECTIONS_PER_NODE_RECOVERY = intSetting("transport.connections_per_node.recovery", 2, 1, false, - Scope.CLUSTER); - public static final Setting CONNECTIONS_PER_NODE_BULK = intSetting("transport.connections_per_node.bulk", 3, 1, false, - Scope.CLUSTER); - public static final Setting CONNECTIONS_PER_NODE_REG = intSetting("transport.connections_per_node.reg", 6, 1, false, - Scope.CLUSTER); - public static final Setting CONNECTIONS_PER_NODE_STATE = intSetting("transport.connections_per_node.state", 1, 1, false, - Scope.CLUSTER); - public static final Setting CONNECTIONS_PER_NODE_PING = intSetting("transport.connections_per_node.ping", 1, 1, false, - Scope.CLUSTER); + (s) -> Setting.parseInt(s, 1, "transport.netty.worker_count"), false, SettingsProperty.ClusterScope); + public static final Setting CONNECTIONS_PER_NODE_RECOVERY = + intSetting("transport.connections_per_node.recovery", 2, 1, false, SettingsProperty.ClusterScope); + public static final Setting CONNECTIONS_PER_NODE_BULK = + intSetting("transport.connections_per_node.bulk", 3, 1, false, SettingsProperty.ClusterScope); + public static final Setting CONNECTIONS_PER_NODE_REG = + intSetting("transport.connections_per_node.reg", 6, 1, false, SettingsProperty.ClusterScope); + public static final Setting CONNECTIONS_PER_NODE_STATE = + intSetting("transport.connections_per_node.state", 1, 1, false, SettingsProperty.ClusterScope); + public static final Setting CONNECTIONS_PER_NODE_PING = + intSetting("transport.connections_per_node.ping", 1, 1, false, SettingsProperty.ClusterScope); // the scheduled internal ping interval setting, defaults to disabled (-1) - public static final Setting PING_SCHEDULE = timeSetting("transport.ping_schedule", TimeValue.timeValueSeconds(-1), false, - Setting.Scope.CLUSTER); - public static final Setting TCP_BLOCKING_CLIENT = boolSetting("transport.tcp.blocking_client", TcpSettings.TCP_BLOCKING_CLIENT, - false, Setting.Scope.CLUSTER); - public static final Setting TCP_CONNECT_TIMEOUT = timeSetting("transport.tcp.connect_timeout", - TcpSettings.TCP_CONNECT_TIMEOUT, false, Setting.Scope.CLUSTER); - public static final Setting TCP_NO_DELAY = boolSetting("transport.tcp_no_delay", TcpSettings.TCP_NO_DELAY, false, - Setting.Scope.CLUSTER); - public static final Setting TCP_KEEP_ALIVE = boolSetting("transport.tcp.keep_alive", TcpSettings.TCP_KEEP_ALIVE, false, - Setting.Scope.CLUSTER); - public static final Setting TCP_BLOCKING_SERVER = boolSetting("transport.tcp.blocking_server", TcpSettings.TCP_BLOCKING_SERVER, - false, Setting.Scope.CLUSTER); - public static final Setting TCP_REUSE_ADDRESS = boolSetting("transport.tcp.reuse_address", TcpSettings.TCP_REUSE_ADDRESS, - false, Setting.Scope.CLUSTER); + public static final Setting PING_SCHEDULE = + timeSetting("transport.ping_schedule", TimeValue.timeValueSeconds(-1), false, SettingsProperty.ClusterScope); + public static final Setting TCP_BLOCKING_CLIENT = + boolSetting("transport.tcp.blocking_client", TcpSettings.TCP_BLOCKING_CLIENT, false, SettingsProperty.ClusterScope); + public static final Setting TCP_CONNECT_TIMEOUT = + timeSetting("transport.tcp.connect_timeout", TcpSettings.TCP_CONNECT_TIMEOUT, false, SettingsProperty.ClusterScope); + public static final Setting TCP_NO_DELAY = + boolSetting("transport.tcp_no_delay", TcpSettings.TCP_NO_DELAY, false, SettingsProperty.ClusterScope); + public static final Setting TCP_KEEP_ALIVE = + boolSetting("transport.tcp.keep_alive", TcpSettings.TCP_KEEP_ALIVE, false, SettingsProperty.ClusterScope); + public static final Setting TCP_BLOCKING_SERVER = + boolSetting("transport.tcp.blocking_server", TcpSettings.TCP_BLOCKING_SERVER, false, SettingsProperty.ClusterScope); + public static final Setting TCP_REUSE_ADDRESS = + boolSetting("transport.tcp.reuse_address", TcpSettings.TCP_REUSE_ADDRESS, false, SettingsProperty.ClusterScope); - public static final Setting TCP_SEND_BUFFER_SIZE = Setting.byteSizeSetting("transport.tcp.send_buffer_size", TcpSettings.TCP_SEND_BUFFER_SIZE, false, Setting.Scope.CLUSTER); - public static final Setting TCP_RECEIVE_BUFFER_SIZE = Setting.byteSizeSetting("transport.tcp.receive_buffer_size", TcpSettings.TCP_RECEIVE_BUFFER_SIZE, false, Setting.Scope.CLUSTER); + public static final Setting TCP_SEND_BUFFER_SIZE = + Setting.byteSizeSetting("transport.tcp.send_buffer_size", TcpSettings.TCP_SEND_BUFFER_SIZE, false, SettingsProperty.ClusterScope); + public static final Setting TCP_RECEIVE_BUFFER_SIZE = + Setting.byteSizeSetting("transport.tcp.receive_buffer_size", TcpSettings.TCP_RECEIVE_BUFFER_SIZE, false, + SettingsProperty.ClusterScope); - public static final Setting NETTY_MAX_CUMULATION_BUFFER_CAPACITY = Setting.byteSizeSetting("transport.netty.max_cumulation_buffer_capacity", new ByteSizeValue(-1), false, Setting.Scope.CLUSTER); - public static final Setting NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = Setting.intSetting("transport.netty.max_composite_buffer_components", -1, -1, false, Setting.Scope.CLUSTER); + public static final Setting NETTY_MAX_CUMULATION_BUFFER_CAPACITY = + Setting.byteSizeSetting("transport.netty.max_cumulation_buffer_capacity", new ByteSizeValue(-1), false, SettingsProperty.ClusterScope); + public static final Setting NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = + Setting.intSetting("transport.netty.max_composite_buffer_components", -1, -1, false, SettingsProperty.ClusterScope); // See AdaptiveReceiveBufferSizePredictor#DEFAULT_XXX for default values in netty..., we can use higher ones for us, even fixed one public static final Setting NETTY_RECEIVE_PREDICTOR_SIZE = Setting.byteSizeSetting( @@ -193,12 +199,13 @@ public class NettyTransport extends AbstractLifecycleComponent implem defaultReceiverPredictor = Math.min(defaultReceiverPredictor, Math.max(l, 64 * 1024)); } return new ByteSizeValue(defaultReceiverPredictor).toString(); - }, false, Setting.Scope.CLUSTER); - public static final Setting NETTY_RECEIVE_PREDICTOR_MIN = byteSizeSetting("transport.netty.receive_predictor_min", - NETTY_RECEIVE_PREDICTOR_SIZE, false, Scope.CLUSTER); - public static final Setting NETTY_RECEIVE_PREDICTOR_MAX = byteSizeSetting("transport.netty.receive_predictor_max", - NETTY_RECEIVE_PREDICTOR_SIZE, false, Scope.CLUSTER); - public static final Setting NETTY_BOSS_COUNT = intSetting("transport.netty.boss_count", 1, 1, false, Scope.CLUSTER); + }, false, SettingsProperty.ClusterScope); + public static final Setting NETTY_RECEIVE_PREDICTOR_MIN = + byteSizeSetting("transport.netty.receive_predictor_min", NETTY_RECEIVE_PREDICTOR_SIZE, false, SettingsProperty.ClusterScope); + public static final Setting NETTY_RECEIVE_PREDICTOR_MAX = + byteSizeSetting("transport.netty.receive_predictor_max", NETTY_RECEIVE_PREDICTOR_SIZE, false, SettingsProperty.ClusterScope); + public static final Setting NETTY_BOSS_COUNT = + intSetting("transport.netty.boss_count", 1, 1, false, SettingsProperty.ClusterScope); protected final NetworkService networkService; protected final Version version; diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index af2e3fe96f0a..0b4f99457eb6 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -44,6 +44,7 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.set.Sets; @@ -121,7 +122,7 @@ public class TribeService extends AbstractLifecycleComponent { } // internal settings only - public static final Setting TRIBE_NAME_SETTING = Setting.simpleString("tribe.name", false, Setting.Scope.CLUSTER); + public static final Setting TRIBE_NAME_SETTING = Setting.simpleString("tribe.name", false, SettingsProperty.ClusterScope); private final ClusterService clusterService; private final String[] blockIndicesWrite; private final String[] blockIndicesRead; @@ -140,18 +141,21 @@ public class TribeService extends AbstractLifecycleComponent { throw new IllegalArgumentException( "Invalid value for [tribe.on_conflict] must be either [any, drop or start with prefer_] but was: [" + s + "]"); } - }, false, Setting.Scope.CLUSTER); + }, false, SettingsProperty.ClusterScope); - public static final Setting BLOCKS_METADATA_SETTING = Setting.boolSetting("tribe.blocks.metadata", false, false, - Setting.Scope.CLUSTER); - public static final Setting BLOCKS_WRITE_SETTING = Setting.boolSetting("tribe.blocks.write", false, false, - Setting.Scope.CLUSTER); - public static final Setting> BLOCKS_WRITE_INDICES_SETTING = Setting.listSetting("tribe.blocks.write.indices", - Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER); - public static final Setting> BLOCKS_READ_INDICES_SETTING = Setting.listSetting("tribe.blocks.read.indices", - Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER); - public static final Setting> BLOCKS_METADATA_INDICES_SETTING = Setting.listSetting("tribe.blocks.metadata.indices", - Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER); + public static final Setting BLOCKS_METADATA_SETTING = + Setting.boolSetting("tribe.blocks.metadata", false, false, SettingsProperty.ClusterScope); + public static final Setting BLOCKS_WRITE_SETTING = + Setting.boolSetting("tribe.blocks.write", false, false, SettingsProperty.ClusterScope); + public static final Setting> BLOCKS_WRITE_INDICES_SETTING = + Setting.listSetting("tribe.blocks.write.indices", Collections.emptyList(), Function.identity(), false, + SettingsProperty.ClusterScope); + public static final Setting> BLOCKS_READ_INDICES_SETTING = + Setting.listSetting("tribe.blocks.read.indices", Collections.emptyList(), Function.identity(), false, + SettingsProperty.ClusterScope); + public static final Setting> BLOCKS_METADATA_INDICES_SETTING = + Setting.listSetting("tribe.blocks.metadata.indices", Collections.emptyList(), Function.identity(), false, + SettingsProperty.ClusterScope); public static final Set TRIBE_SETTING_KEYS = Sets.newHashSet(TRIBE_NAME_SETTING.getKey(), ON_CONFLICT_SETTING.getKey(), BLOCKS_METADATA_INDICES_SETTING.getKey(), BLOCKS_METADATA_SETTING.getKey(), BLOCKS_READ_INDICES_SETTING.getKey(), BLOCKS_WRITE_INDICES_SETTING.getKey(), BLOCKS_WRITE_SETTING.getKey()); diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java index 2c2bab24605f..9efccd0afd2a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.inject.ModuleTestCase; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; @@ -83,7 +84,7 @@ public class ClusterModuleTests extends ModuleTestCase { public void testRegisterClusterDynamicSetting() { SettingsModule module = new SettingsModule(Settings.EMPTY); - module.registerSetting(Setting.boolSetting("foo.bar", false, true, Setting.Scope.CLUSTER)); + module.registerSetting(Setting.boolSetting("foo.bar", false, true, SettingsProperty.ClusterScope)); assertInstanceBinding(module, ClusterSettings.class, service -> service.hasDynamicSetting("foo.bar")); } @@ -98,7 +99,7 @@ public class ClusterModuleTests extends ModuleTestCase { public void testRegisterIndexDynamicSetting() { SettingsModule module = new SettingsModule(Settings.EMPTY); - module.registerSetting(Setting.boolSetting("foo.bar", false, true, Setting.Scope.INDEX)); + module.registerSetting(Setting.boolSetting("foo.bar", false, true, SettingsProperty.IndexScope)); assertInstanceBinding(module, IndexScopedSettings.class, service -> service.hasDynamicSetting("foo.bar")); } diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java index dbf502d58055..9590d214d5be 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java @@ -22,19 +22,15 @@ package org.elasticsearch.cluster.settings; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import java.util.Collection; -import java.util.Collections; import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -51,9 +47,9 @@ public class SettingsFilteringIT extends ESIntegTestCase { public static class SettingsFilteringPlugin extends Plugin { public static final Setting SOME_NODE_SETTING = - Setting.boolSetting("some.node.setting", false, false, Setting.Scope.CLUSTER, true); + Setting.boolSetting("some.node.setting", false, false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); public static final Setting SOME_OTHER_NODE_SETTING = - Setting.boolSetting("some.other.node.setting", false, false, Setting.Scope.CLUSTER); + Setting.boolSetting("some.other.node.setting", false, false, SettingsProperty.ClusterScope); /** * The name of the plugin. @@ -79,7 +75,7 @@ public class SettingsFilteringIT extends ESIntegTestCase { public void onModule(SettingsModule module) { module.registerSetting(SOME_NODE_SETTING); module.registerSetting(SOME_OTHER_NODE_SETTING); - module.registerSetting(Setting.groupSetting("index.filter_test.", false, Setting.Scope.INDEX)); + module.registerSetting(Setting.groupSetting("index.filter_test.", false, SettingsProperty.IndexScope)); module.registerSettingsFilter("index.filter_test.foo"); module.registerSettingsFilter("index.filter_test.bar*"); } diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 58f5cde65cea..7a2e424393bd 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.elasticsearch.common.logging.ESLoggerFactory; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.index.IndexModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; @@ -38,8 +39,8 @@ import java.util.concurrent.atomic.AtomicReference; public class ScopedSettingsTests extends ESTestCase { public void testAddConsumer() { - Setting testSetting = Setting.intSetting("foo.bar", 1, true, Setting.Scope.CLUSTER); - Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, true, Setting.Scope.CLUSTER); + Setting testSetting = Setting.intSetting("foo.bar", 1, true, SettingsProperty.ClusterScope); + Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, true, SettingsProperty.ClusterScope); AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY, Collections.singleton(testSetting)); AtomicInteger consumer = new AtomicInteger(); @@ -66,8 +67,8 @@ public class ScopedSettingsTests extends ESTestCase { } public void testApply() { - Setting testSetting = Setting.intSetting("foo.bar", 1, true, Setting.Scope.CLUSTER); - Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, true, Setting.Scope.CLUSTER); + Setting testSetting = Setting.intSetting("foo.bar", 1, true, SettingsProperty.ClusterScope); + Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, true, SettingsProperty.ClusterScope); AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(testSetting, testSetting2))); AtomicInteger consumer = new AtomicInteger(); @@ -136,7 +137,10 @@ public class ScopedSettingsTests extends ESTestCase { } public void testIsDynamic(){ - ClusterSettings settings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(Setting.intSetting("foo.bar", 1, true, Setting.Scope.CLUSTER), Setting.intSetting("foo.bar.baz", 1, false, Setting.Scope.CLUSTER)))); + ClusterSettings settings = + new ClusterSettings(Settings.EMPTY, + new HashSet<>(Arrays.asList(Setting.intSetting("foo.bar", 1, true, SettingsProperty.ClusterScope), + Setting.intSetting("foo.bar.baz", 1, false, SettingsProperty.ClusterScope)))); assertFalse(settings.hasDynamicSetting("foo.bar.baz")); assertTrue(settings.hasDynamicSetting("foo.bar")); assertNotNull(settings.get("foo.bar.baz")); @@ -147,8 +151,8 @@ public class ScopedSettingsTests extends ESTestCase { } public void testDiff() throws IOException { - Setting foobarbaz = Setting.intSetting("foo.bar.baz", 1, false, Setting.Scope.CLUSTER); - Setting foobar = Setting.intSetting("foo.bar", 1, true, Setting.Scope.CLUSTER); + Setting foobarbaz = Setting.intSetting("foo.bar.baz", 1, false, SettingsProperty.ClusterScope); + Setting foobar = Setting.intSetting("foo.bar", 1, true, SettingsProperty.ClusterScope); ClusterSettings settings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(foobar, foobarbaz))); Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.EMPTY); assertEquals(diff.getAsMap().size(), 1); @@ -237,22 +241,22 @@ public class ScopedSettingsTests extends ESTestCase { try { new IndexScopedSettings( - Settings.EMPTY, Collections.singleton(Setting.groupSetting("boo .", false, Setting.Scope.INDEX))); + Settings.EMPTY, Collections.singleton(Setting.groupSetting("boo .", false, SettingsProperty.IndexScope))); fail(); } catch (IllegalArgumentException e) { assertEquals("illegal settings key: [boo .]", e.getMessage()); } new IndexScopedSettings( - Settings.EMPTY, Collections.singleton(Setting.groupSetting("boo.", false, Setting.Scope.INDEX))); + Settings.EMPTY, Collections.singleton(Setting.groupSetting("boo.", false, SettingsProperty.IndexScope))); try { new IndexScopedSettings( - Settings.EMPTY, Collections.singleton(Setting.boolSetting("boo.", true, false, Setting.Scope.INDEX))); + Settings.EMPTY, Collections.singleton(Setting.boolSetting("boo.", true, false, SettingsProperty.IndexScope))); fail(); } catch (IllegalArgumentException e) { assertEquals("illegal settings key: [boo.]", e.getMessage()); } new IndexScopedSettings( - Settings.EMPTY, Collections.singleton(Setting.boolSetting("boo", true, false, Setting.Scope.INDEX))); + Settings.EMPTY, Collections.singleton(Setting.boolSetting("boo", true, false, SettingsProperty.IndexScope))); } public void testLoggingUpdates() { diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index f5b84fb366f1..ed9213d392af 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.settings; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; @@ -27,26 +28,28 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.atomic.AtomicReference; +import static org.hamcrest.Matchers.is; + public class SettingTests extends ESTestCase { public void testGet() { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.CLUSTER); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, SettingsProperty.ClusterScope); assertFalse(booleanSetting.get(Settings.EMPTY)); assertFalse(booleanSetting.get(Settings.builder().put("foo.bar", false).build())); assertTrue(booleanSetting.get(Settings.builder().put("foo.bar", true).build())); } public void testByteSize() { - Setting byteSizeValueSetting = Setting.byteSizeSetting("a.byte.size", new ByteSizeValue(1024), true, Setting.Scope.CLUSTER); + Setting byteSizeValueSetting = + Setting.byteSizeSetting("a.byte.size", new ByteSizeValue(1024), true, SettingsProperty.ClusterScope); assertFalse(byteSizeValueSetting.isGroupSetting()); ByteSizeValue byteSizeValue = byteSizeValueSetting.get(Settings.EMPTY); assertEquals(byteSizeValue.bytes(), 1024); - byteSizeValueSetting = Setting.byteSizeSetting("a.byte.size", s -> "2048b", true, Setting.Scope.CLUSTER); + byteSizeValueSetting = Setting.byteSizeSetting("a.byte.size", s -> "2048b", true, SettingsProperty.ClusterScope); byteSizeValue = byteSizeValueSetting.get(Settings.EMPTY); assertEquals(byteSizeValue.bytes(), 2048); @@ -65,7 +68,7 @@ public class SettingTests extends ESTestCase { } public void testSimpleUpdate() { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.CLUSTER); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, SettingsProperty.ClusterScope); AtomicReference atomicBoolean = new AtomicReference<>(null); ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(atomicBoolean::set, logger); Settings build = Settings.builder().put("foo.bar", false).build(); @@ -86,7 +89,7 @@ public class SettingTests extends ESTestCase { } public void testUpdateNotDynamic() { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, false, Setting.Scope.CLUSTER); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, false, SettingsProperty.ClusterScope); assertFalse(booleanSetting.isGroupSetting()); AtomicReference atomicBoolean = new AtomicReference<>(null); try { @@ -98,7 +101,7 @@ public class SettingTests extends ESTestCase { } public void testUpdaterIsIsolated() { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.CLUSTER); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, SettingsProperty.ClusterScope); AtomicReference ab1 = new AtomicReference<>(null); AtomicReference ab2 = new AtomicReference<>(null); ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(ab1::set, logger); @@ -109,24 +112,27 @@ public class SettingTests extends ESTestCase { public void testDefault() { TimeValue defautlValue = TimeValue.timeValueMillis(randomIntBetween(0, 1000000)); - Setting setting = Setting.positiveTimeSetting("my.time.value", defautlValue, randomBoolean(), Setting.Scope.CLUSTER); + Setting setting = + Setting.positiveTimeSetting("my.time.value", defautlValue, randomBoolean(), SettingsProperty.ClusterScope); assertFalse(setting.isGroupSetting()); String aDefault = setting.getDefaultRaw(Settings.EMPTY); assertEquals(defautlValue.millis() + "ms", aDefault); assertEquals(defautlValue.millis(), setting.get(Settings.EMPTY).millis()); assertEquals(defautlValue, setting.getDefault(Settings.EMPTY)); - Setting secondaryDefault = new Setting<>("foo.bar", (s) -> s.get("old.foo.bar", "some_default"), (s) -> s, randomBoolean(), Setting.Scope.CLUSTER); + Setting secondaryDefault = + new Setting<>("foo.bar", (s) -> s.get("old.foo.bar", "some_default"), (s) -> s, randomBoolean(), SettingsProperty.ClusterScope); assertEquals("some_default", secondaryDefault.get(Settings.EMPTY)); assertEquals("42", secondaryDefault.get(Settings.builder().put("old.foo.bar", 42).build())); - Setting secondaryDefaultViaSettings = new Setting<>("foo.bar", secondaryDefault, (s) -> s, randomBoolean(), Setting.Scope.CLUSTER); + Setting secondaryDefaultViaSettings = + new Setting<>("foo.bar", secondaryDefault, (s) -> s, randomBoolean(), SettingsProperty.ClusterScope); assertEquals("some_default", secondaryDefaultViaSettings.get(Settings.EMPTY)); assertEquals("42", secondaryDefaultViaSettings.get(Settings.builder().put("old.foo.bar", 42).build())); } public void testComplexType() { AtomicReference ref = new AtomicReference<>(null); - Setting setting = new Setting<>("foo.bar", (s) -> "", (s) -> new ComplexType(s), true, Setting.Scope.CLUSTER); + Setting setting = new Setting<>("foo.bar", (s) -> "", (s) -> new ComplexType(s), true, SettingsProperty.ClusterScope); assertFalse(setting.isGroupSetting()); ref.set(setting.get(Settings.EMPTY)); ComplexType type = ref.get(); @@ -147,15 +153,19 @@ public class SettingTests extends ESTestCase { } public void testType() { - Setting integerSetting = Setting.intSetting("foo.int.bar", 1, true, Setting.Scope.CLUSTER); - assertEquals(integerSetting.getScope(), Setting.Scope.CLUSTER); - integerSetting = Setting.intSetting("foo.int.bar", 1, true, Setting.Scope.INDEX); - assertEquals(integerSetting.getScope(), Setting.Scope.INDEX); + Setting integerSetting = Setting.intSetting("foo.int.bar", 1, true, SettingsProperty.ClusterScope); + assertThat(integerSetting.hasClusterScope(), is(true)); + assertThat(integerSetting.hasIndexScope(), is(false)); + assertThat(integerSetting.hasNodeScope(), is(false)); + integerSetting = Setting.intSetting("foo.int.bar", 1, true, SettingsProperty.IndexScope); + assertThat(integerSetting.hasIndexScope(), is(true)); + assertThat(integerSetting.hasClusterScope(), is(false)); + assertThat(integerSetting.hasNodeScope(), is(false)); } public void testGroups() { AtomicReference ref = new AtomicReference<>(null); - Setting setting = Setting.groupSetting("foo.bar.", true, Setting.Scope.CLUSTER); + Setting setting = Setting.groupSetting("foo.bar.", true, SettingsProperty.ClusterScope); assertTrue(setting.isGroupSetting()); ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger); @@ -233,8 +243,8 @@ public class SettingTests extends ESTestCase { public void testComposite() { Composite c = new Composite(); - Setting a = Setting.intSetting("foo.int.bar.a", 1, true, Setting.Scope.CLUSTER); - Setting b = Setting.intSetting("foo.int.bar.b", 1, true, Setting.Scope.CLUSTER); + Setting a = Setting.intSetting("foo.int.bar.a", 1, true, SettingsProperty.ClusterScope); + Setting b = Setting.intSetting("foo.int.bar.b", 1, true, SettingsProperty.ClusterScope); ClusterSettings.SettingUpdater> settingUpdater = Setting.compoundUpdater(c::set, a, b, logger); assertFalse(settingUpdater.apply(Settings.EMPTY, Settings.EMPTY)); assertNull(c.a); @@ -262,7 +272,7 @@ public class SettingTests extends ESTestCase { } public void testListSettings() { - Setting> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(), true, Setting.Scope.CLUSTER); + Setting> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(), true, SettingsProperty.ClusterScope); List value = listSetting.get(Settings.EMPTY); assertEquals(1, value.size()); assertEquals("foo,bar", value.get(0)); @@ -301,7 +311,7 @@ public class SettingTests extends ESTestCase { assertEquals(1, ref.get().size()); assertEquals("foo,bar", ref.get().get(0)); - Setting> otherSettings = Setting.listSetting("foo.bar", Collections.emptyList(), Integer::parseInt, true, Setting.Scope.CLUSTER); + Setting> otherSettings = Setting.listSetting("foo.bar", Collections.emptyList(), Integer::parseInt, true, SettingsProperty.ClusterScope); List defaultValue = otherSettings.get(Settings.EMPTY); assertEquals(0, defaultValue.size()); List intValues = otherSettings.get(Settings.builder().put("foo.bar", "0,1,2,3").build()); @@ -310,7 +320,7 @@ public class SettingTests extends ESTestCase { assertEquals(i, intValues.get(i).intValue()); } - Setting> settingWithFallback = Setting.listSetting("foo.baz", listSetting, s -> s, true, Setting.Scope.CLUSTER); + Setting> settingWithFallback = Setting.listSetting("foo.baz", listSetting, s -> s, true, SettingsProperty.ClusterScope); value = settingWithFallback.get(Settings.EMPTY); assertEquals(1, value.size()); assertEquals("foo,bar", value.get(0)); @@ -332,7 +342,7 @@ public class SettingTests extends ESTestCase { } public void testListSettingAcceptsNumberSyntax() { - Setting> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(), true, Setting.Scope.CLUSTER); + Setting> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(), true, SettingsProperty.ClusterScope); List input = Arrays.asList("test", "test1, test2", "test", ",,,,"); Settings.Builder builder = Settings.builder().putArray("foo.bar", input.toArray(new String[0])); // try to parse this really annoying format @@ -350,7 +360,7 @@ public class SettingTests extends ESTestCase { } public void testDynamicKeySetting() { - Setting setting = Setting.dynamicKeySetting("foo.", "false", Boolean::parseBoolean, false, Setting.Scope.CLUSTER); + Setting setting = Setting.dynamicKeySetting("foo.", "false", Boolean::parseBoolean, false, SettingsProperty.ClusterScope); assertTrue(setting.hasComplexMatcher()); assertTrue(setting.match("foo.bar")); assertFalse(setting.match("foo")); @@ -367,7 +377,7 @@ public class SettingTests extends ESTestCase { } public void testMinMaxInt() { - Setting integerSetting = Setting.intSetting("foo.bar", 1, 0, 10, false, Setting.Scope.CLUSTER); + Setting integerSetting = Setting.intSetting("foo.bar", 1, 0, 10, false, SettingsProperty.ClusterScope); try { integerSetting.get(Settings.builder().put("foo.bar", 11).build()); fail(); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java index ce32be6c935e..3cd2bb2d0214 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.settings; import org.elasticsearch.common.inject.ModuleTestCase; +import org.elasticsearch.common.settings.Setting.SettingsProperty; public class SettingsModuleTests extends ModuleTestCase { @@ -45,13 +46,13 @@ public class SettingsModuleTests extends ModuleTestCase { { Settings settings = Settings.builder().put("some.custom.setting", "2.0").build(); SettingsModule module = new SettingsModule(settings); - module.registerSetting(Setting.floatSetting("some.custom.setting", 1.0f, false, Setting.Scope.CLUSTER)); + module.registerSetting(Setting.floatSetting("some.custom.setting", 1.0f, false, SettingsProperty.ClusterScope)); assertInstanceBinding(module, Settings.class, (s) -> s == settings); } { Settings settings = Settings.builder().put("some.custom.setting", "false").build(); SettingsModule module = new SettingsModule(settings); - module.registerSetting(Setting.floatSetting("some.custom.setting", 1.0f, false, Setting.Scope.CLUSTER)); + module.registerSetting(Setting.floatSetting("some.custom.setting", 1.0f, false, SettingsProperty.ClusterScope)); try { assertInstanceBinding(module, Settings.class, (s) -> s == settings); fail(); @@ -131,9 +132,9 @@ public class SettingsModuleTests extends ModuleTestCase { public void testRegisterSettingsFilter() { Settings settings = Settings.builder().put("foo.bar", "false").put("bar.foo", false).put("bar.baz", false).build(); SettingsModule module = new SettingsModule(settings); - module.registerSetting(Setting.boolSetting("foo.bar", true, false, Setting.Scope.CLUSTER)); - module.registerSetting(Setting.boolSetting("bar.foo", true, false, Setting.Scope.CLUSTER, true)); - module.registerSetting(Setting.boolSetting("bar.baz", true, false, Setting.Scope.CLUSTER)); + module.registerSetting(Setting.boolSetting("foo.bar", true, false, SettingsProperty.ClusterScope)); + module.registerSetting(Setting.boolSetting("bar.foo", true, false, SettingsProperty.ClusterScope, SettingsProperty.Filtered)); + module.registerSetting(Setting.boolSetting("bar.baz", true, false, SettingsProperty.ClusterScope)); module.registerSettingsFilter("foo.*"); try { diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 9e0c3776bf11..5a17caff67d8 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.env.Environment; @@ -194,9 +195,9 @@ public class IndexModuleTests extends ESTestCase { public void testListener() throws IOException { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.INDEX); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, SettingsProperty.IndexScope); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings, booleanSetting), null, new AnalysisRegistry(null, environment)); - Setting booleanSetting2 = Setting.boolSetting("foo.bar.baz", false, true, Setting.Scope.INDEX); + Setting booleanSetting2 = Setting.boolSetting("foo.bar.baz", false, true, SettingsProperty.IndexScope); AtomicBoolean atomicBoolean = new AtomicBoolean(false); module.addSettingsUpdateConsumer(booleanSetting, atomicBoolean::set); diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index 677c8358fb01..ad8edee61bfb 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -44,7 +45,7 @@ public class IndexSettingsTests extends ESTestCase { Version version = VersionUtils.getPreviousVersion(); Settings theSettings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build(); final AtomicInteger integer = new AtomicInteger(0); - Setting integerSetting = Setting.intSetting("index.test.setting.int", -1, true, Setting.Scope.INDEX); + Setting integerSetting = Setting.intSetting("index.test.setting.int", -1, true, SettingsProperty.IndexScope); IndexMetaData metaData = newIndexMeta("index", theSettings); IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, integerSetting); settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, integer::set); @@ -65,8 +66,8 @@ public class IndexSettingsTests extends ESTestCase { .put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build(); final AtomicInteger integer = new AtomicInteger(0); final StringBuilder builder = new StringBuilder(); - Setting integerSetting = Setting.intSetting("index.test.setting.int", -1, true, Setting.Scope.INDEX); - Setting notUpdated = new Setting<>("index.not.updated", "", Function.identity(), true, Setting.Scope.INDEX); + Setting integerSetting = Setting.intSetting("index.test.setting.int", -1, true, SettingsProperty.IndexScope); + Setting notUpdated = new Setting<>("index.not.updated", "", Function.identity(), true, SettingsProperty.IndexScope); IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, integerSetting, notUpdated); settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, integer::set); @@ -128,7 +129,7 @@ public class IndexSettingsTests extends ESTestCase { Settings nodeSettings = Settings.settingsBuilder().put("index.foo.bar", 43).build(); final AtomicInteger indexValue = new AtomicInteger(0); - Setting integerSetting = Setting.intSetting("index.foo.bar", -1, true, Setting.Scope.INDEX); + Setting integerSetting = Setting.intSetting("index.foo.bar", -1, true, SettingsProperty.IndexScope); IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), nodeSettings, integerSetting); settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, indexValue::set); assertEquals(numReplicas, settings.getNumberOfReplicas()); diff --git a/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java b/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java index 7dbff244fcc8..69f5316d4d0b 100644 --- a/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java +++ b/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java @@ -21,6 +21,7 @@ package org.elasticsearch.index; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.plugins.Plugin; @@ -43,7 +44,7 @@ public class SettingsListenerIT extends ESIntegTestCase { public static class SettingsListenerPlugin extends Plugin { private final SettingsTestingService service = new SettingsTestingService(); - private static final Setting SETTING = Setting.intSetting("index.test.new.setting", 0, true, Setting.Scope.INDEX); + private static final Setting SETTING = Setting.intSetting("index.test.new.setting", 0, true, SettingsProperty.IndexScope); /** * The name of the plugin. */ @@ -93,7 +94,7 @@ public class SettingsListenerIT extends ESIntegTestCase { public static class SettingsTestingService { public volatile int value; - public static Setting VALUE = Setting.intSetting("index.test.new.setting", -1, -1, true, Setting.Scope.INDEX); + public static Setting VALUE = Setting.intSetting("index.test.new.setting", -1, -1, true, SettingsProperty.IndexScope); public void setValue(int value) { this.value = value; diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index aad4e34c3da7..8dee6712833e 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -47,6 +47,7 @@ import org.elasticsearch.action.suggest.SuggestRequestBuilder; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.index.IndexNotFoundException; @@ -642,9 +643,12 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { return "a plugin that adds a dynamic tst setting"; } - private static final Setting INDEX_A = new Setting<>("index.a", "", Function.identity(), true, Setting.Scope.INDEX); - private static final Setting INDEX_C = new Setting<>("index.c", "", Function.identity(), true, Setting.Scope.INDEX); - private static final Setting INDEX_E = new Setting<>("index.e", "", Function.identity(), false, Setting.Scope.INDEX); + private static final Setting INDEX_A = + new Setting<>("index.a", "", Function.identity(), true, SettingsProperty.IndexScope); + private static final Setting INDEX_C = + new Setting<>("index.c", "", Function.identity(), true, SettingsProperty.IndexScope); + private static final Setting INDEX_E = + new Setting<>("index.e", "", Function.identity(), false, SettingsProperty.IndexScope); public void onModule(SettingsModule module) { diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index 35ed7a2c657d..d1ad28101ff6 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.client.Requests; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.unit.TimeValue; @@ -197,8 +198,10 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase { // TODO: Generalize this class and add it as a utility public static class RandomExceptionDirectoryReaderWrapper extends MockEngineSupport.DirectoryReaderWrapper { - public static final Setting EXCEPTION_TOP_LEVEL_RATIO_SETTING = Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, Setting.Scope.INDEX); - public static final Setting EXCEPTION_LOW_LEVEL_RATIO_SETTING = Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, Setting.Scope.INDEX); + public static final Setting EXCEPTION_TOP_LEVEL_RATIO_SETTING = + Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, SettingsProperty.IndexScope); + public static final Setting EXCEPTION_LOW_LEVEL_RATIO_SETTING = + Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, SettingsProperty.IndexScope); public static class TestPlugin extends Plugin { @Override public String name() { diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java index d342402e4bf6..ee260a51b042 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java @@ -29,6 +29,7 @@ import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.common.settings.SettingsModule; @@ -152,8 +153,10 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase { public static class RandomExceptionDirectoryReaderWrapper extends MockEngineSupport.DirectoryReaderWrapper { public static class TestPlugin extends Plugin { - public static final Setting EXCEPTION_TOP_LEVEL_RATIO_SETTING = Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, Setting.Scope.INDEX); - public static final Setting EXCEPTION_LOW_LEVEL_RATIO_SETTING = Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, Setting.Scope.INDEX); + public static final Setting EXCEPTION_TOP_LEVEL_RATIO_SETTING = + Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, SettingsProperty.IndexScope); + public static final Setting EXCEPTION_LOW_LEVEL_RATIO_SETTING = + Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, SettingsProperty.IndexScope); @Override public String name() { return "random-exception-reader-wrapper"; diff --git a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java index 1e3832907533..02fd27a952d2 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java +++ b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java @@ -28,13 +28,11 @@ import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.env.Environment; import org.elasticsearch.index.snapshots.IndexShardRepository; @@ -50,8 +48,6 @@ import java.io.UnsupportedEncodingException; import java.nio.file.Path; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; -import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -65,9 +61,9 @@ public class MockRepository extends FsRepository { public static class Plugin extends org.elasticsearch.plugins.Plugin { public static final Setting USERNAME_SETTING = - Setting.simpleString("secret.mock.username", false, Setting.Scope.CLUSTER); + Setting.simpleString("secret.mock.username", false, SettingsProperty.ClusterScope); public static final Setting PASSWORD_SETTING = - Setting.simpleString("secret.mock.password", false, Setting.Scope.CLUSTER, true); + Setting.simpleString("secret.mock.password", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); @Override public String name() { diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java index 027caaccebc0..0a6a752908f1 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java @@ -22,7 +22,7 @@ package org.elasticsearch.cloud.azure.management; import com.microsoft.windowsazure.core.utils.KeyStoreType; import com.microsoft.windowsazure.management.compute.models.HostedServiceGetDetailedResponse; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Scope; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.azure.AzureUnicastHostsProvider; @@ -30,27 +30,29 @@ public interface AzureComputeService { final class Management { public static final Setting SUBSCRIPTION_ID_SETTING = - Setting.simpleString("cloud.azure.management.subscription.id", false, Scope.CLUSTER, true); + Setting.simpleString("cloud.azure.management.subscription.id", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); public static final Setting SERVICE_NAME_SETTING = - Setting.simpleString("cloud.azure.management.cloud.service.name", false, Scope.CLUSTER); + Setting.simpleString("cloud.azure.management.cloud.service.name", false, SettingsProperty.ClusterScope); // Keystore settings public static final Setting KEYSTORE_PATH_SETTING = - Setting.simpleString("cloud.azure.management.keystore.path", false, Scope.CLUSTER, true); + Setting.simpleString("cloud.azure.management.keystore.path", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); public static final Setting KEYSTORE_PASSWORD_SETTING = - Setting.simpleString("cloud.azure.management.keystore.password", false, Scope.CLUSTER, true); + Setting.simpleString("cloud.azure.management.keystore.password", false, SettingsProperty.ClusterScope, + SettingsProperty.Filtered); public static final Setting KEYSTORE_TYPE_SETTING = new Setting<>("cloud.azure.management.keystore.type", KeyStoreType.pkcs12.name(), KeyStoreType::fromString, false, - Scope.CLUSTER, false); + SettingsProperty.ClusterScope, SettingsProperty.Filtered); } final class Discovery { public static final Setting REFRESH_SETTING = - Setting.positiveTimeSetting("discovery.azure.refresh_interval", TimeValue.timeValueSeconds(0), false, Scope.CLUSTER); + Setting.positiveTimeSetting("discovery.azure.refresh_interval", TimeValue.timeValueSeconds(0), false, + SettingsProperty.ClusterScope); public static final Setting HOST_TYPE_SETTING = new Setting<>("discovery.azure.host.type", AzureUnicastHostsProvider.HostType.PRIVATE_IP.name(), - AzureUnicastHostsProvider.HostType::fromString, false, Scope.CLUSTER); + AzureUnicastHostsProvider.HostType::fromString, false, SettingsProperty.ClusterScope); public static final String ENDPOINT_NAME = "discovery.azure.endpoint.name"; public static final String DEPLOYMENT_NAME = "discovery.azure.deployment.name"; diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java index ce34dd61f40b..b88704e18db6 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java @@ -22,6 +22,7 @@ package org.elasticsearch.cloud.aws; import com.amazonaws.Protocol; import com.amazonaws.services.ec2.AmazonEC2; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -32,7 +33,8 @@ import java.util.Locale; import java.util.function.Function; public interface AwsEc2Service { - Setting AUTO_ATTRIBUTE_SETTING = Setting.boolSetting("cloud.node.auto_attributes", false, false, Setting.Scope.CLUSTER); + Setting AUTO_ATTRIBUTE_SETTING = + Setting.boolSetting("cloud.node.auto_attributes", false, false, SettingsProperty.ClusterScope); // Global AWS settings (shared between discovery-ec2 and repository-s3) // Each setting starting with `cloud.aws` also exists in repository-s3 project. Don't forget to update @@ -40,40 +42,44 @@ public interface AwsEc2Service { /** * cloud.aws.access_key: AWS Access key. Shared with repository-s3 plugin */ - Setting KEY_SETTING = Setting.simpleString("cloud.aws.access_key", false, Setting.Scope.CLUSTER, true); + Setting KEY_SETTING = + Setting.simpleString("cloud.aws.access_key", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.secret_key: AWS Secret key. Shared with repository-s3 plugin */ - Setting SECRET_SETTING = Setting.simpleString("cloud.aws.secret_key", false, Setting.Scope.CLUSTER, true); + Setting SECRET_SETTING = + Setting.simpleString("cloud.aws.secret_key", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.protocol: Protocol for AWS API: http or https. Defaults to https. Shared with repository-s3 plugin */ Setting PROTOCOL_SETTING = new Setting<>("cloud.aws.protocol", "https", s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), - false, Setting.Scope.CLUSTER); + false, SettingsProperty.ClusterScope); /** * cloud.aws.proxy.host: In case of proxy, define its hostname/IP. Shared with repository-s3 plugin */ - Setting PROXY_HOST_SETTING = Setting.simpleString("cloud.aws.proxy.host", false, Setting.Scope.CLUSTER); + Setting PROXY_HOST_SETTING = Setting.simpleString("cloud.aws.proxy.host", false, SettingsProperty.ClusterScope); /** * cloud.aws.proxy.port: In case of proxy, define its port. Defaults to 80. Shared with repository-s3 plugin */ - Setting PROXY_PORT_SETTING = Setting.intSetting("cloud.aws.proxy.port", 80, 0, 1<<16, false, Setting.Scope.CLUSTER); + Setting PROXY_PORT_SETTING = Setting.intSetting("cloud.aws.proxy.port", 80, 0, 1<<16, false, SettingsProperty.ClusterScope); /** * cloud.aws.proxy.username: In case of proxy with auth, define the username. Shared with repository-s3 plugin */ - Setting PROXY_USERNAME_SETTING = Setting.simpleString("cloud.aws.proxy.username", false, Setting.Scope.CLUSTER); + Setting PROXY_USERNAME_SETTING = Setting.simpleString("cloud.aws.proxy.username", false, SettingsProperty.ClusterScope); /** * cloud.aws.proxy.password: In case of proxy with auth, define the password. Shared with repository-s3 plugin */ - Setting PROXY_PASSWORD_SETTING = Setting.simpleString("cloud.aws.proxy.password", false, Setting.Scope.CLUSTER, true); + Setting PROXY_PASSWORD_SETTING = + Setting.simpleString("cloud.aws.proxy.password", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.signer: If you are using an old AWS API version, you can define a Signer. Shared with repository-s3 plugin */ - Setting SIGNER_SETTING = Setting.simpleString("cloud.aws.signer", false, Setting.Scope.CLUSTER); + Setting SIGNER_SETTING = Setting.simpleString("cloud.aws.signer", false, SettingsProperty.ClusterScope); /** * cloud.aws.region: Region. Shared with repository-s3 plugin */ - Setting REGION_SETTING = new Setting<>("cloud.aws.region", "", s -> s.toLowerCase(Locale.ROOT), false, Setting.Scope.CLUSTER); + Setting REGION_SETTING = + new Setting<>("cloud.aws.region", "", s -> s.toLowerCase(Locale.ROOT), false, SettingsProperty.ClusterScope); /** * Defines specific ec2 settings starting with cloud.aws.ec2. @@ -84,62 +90,62 @@ public interface AwsEc2Service { * @see AwsEc2Service#KEY_SETTING */ Setting KEY_SETTING = new Setting<>("cloud.aws.ec2.access_key", AwsEc2Service.KEY_SETTING, Function.identity(), false, - Setting.Scope.CLUSTER, true); + SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.ec2.secret_key: AWS Secret key specific for EC2 API calls. Defaults to cloud.aws.secret_key. * @see AwsEc2Service#SECRET_SETTING */ Setting SECRET_SETTING = new Setting<>("cloud.aws.ec2.secret_key", AwsEc2Service.SECRET_SETTING, Function.identity(), false, - Setting.Scope.CLUSTER, true); + SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.ec2.protocol: Protocol for AWS API specific for EC2 API calls: http or https. Defaults to cloud.aws.protocol. * @see AwsEc2Service#PROTOCOL_SETTING */ Setting PROTOCOL_SETTING = new Setting<>("cloud.aws.ec2.protocol", AwsEc2Service.PROTOCOL_SETTING, - s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), false, Setting.Scope.CLUSTER); + s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), false, SettingsProperty.ClusterScope); /** * cloud.aws.ec2.proxy.host: In case of proxy, define its hostname/IP specific for EC2 API calls. Defaults to cloud.aws.proxy.host. * @see AwsEc2Service#PROXY_HOST_SETTING */ Setting PROXY_HOST_SETTING = new Setting<>("cloud.aws.ec2.proxy.host", AwsEc2Service.PROXY_HOST_SETTING, - Function.identity(), false, Setting.Scope.CLUSTER); + Function.identity(), false, SettingsProperty.ClusterScope); /** * cloud.aws.ec2.proxy.port: In case of proxy, define its port specific for EC2 API calls. Defaults to cloud.aws.proxy.port. * @see AwsEc2Service#PROXY_PORT_SETTING */ Setting PROXY_PORT_SETTING = new Setting<>("cloud.aws.ec2.proxy.port", AwsEc2Service.PROXY_PORT_SETTING, - s -> Setting.parseInt(s, 0, 1<<16, "cloud.aws.ec2.proxy.port"), false, Setting.Scope.CLUSTER); + s -> Setting.parseInt(s, 0, 1<<16, "cloud.aws.ec2.proxy.port"), false, SettingsProperty.ClusterScope); /** * cloud.aws.ec2.proxy.username: In case of proxy with auth, define the username specific for EC2 API calls. * Defaults to cloud.aws.proxy.username. * @see AwsEc2Service#PROXY_USERNAME_SETTING */ Setting PROXY_USERNAME_SETTING = new Setting<>("cloud.aws.ec2.proxy.username", AwsEc2Service.PROXY_USERNAME_SETTING, - Function.identity(), false, Setting.Scope.CLUSTER); + Function.identity(), false, SettingsProperty.ClusterScope); /** * cloud.aws.ec2.proxy.password: In case of proxy with auth, define the password specific for EC2 API calls. * Defaults to cloud.aws.proxy.password. * @see AwsEc2Service#PROXY_PASSWORD_SETTING */ Setting PROXY_PASSWORD_SETTING = new Setting<>("cloud.aws.ec2.proxy.password", AwsEc2Service.PROXY_PASSWORD_SETTING, - Function.identity(), false, Setting.Scope.CLUSTER, true); + Function.identity(), false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.ec2.signer: If you are using an old AWS API version, you can define a Signer. Specific for EC2 API calls. * Defaults to cloud.aws.signer. * @see AwsEc2Service#SIGNER_SETTING */ Setting SIGNER_SETTING = new Setting<>("cloud.aws.ec2.signer", AwsEc2Service.SIGNER_SETTING, Function.identity(), - false, Setting.Scope.CLUSTER); + false, SettingsProperty.ClusterScope); /** * cloud.aws.ec2.region: Region specific for EC2 API calls. Defaults to cloud.aws.region. * @see AwsEc2Service#REGION_SETTING */ Setting REGION_SETTING = new Setting<>("cloud.aws.ec2.region", AwsEc2Service.REGION_SETTING, - s -> s.toLowerCase(Locale.ROOT), false, Setting.Scope.CLUSTER); + s -> s.toLowerCase(Locale.ROOT), false, SettingsProperty.ClusterScope); /** * cloud.aws.ec2.endpoint: Endpoint. If not set, endpoint will be guessed based on region setting. */ - Setting ENDPOINT_SETTING = Setting.simpleString("cloud.aws.ec2.endpoint", false, Setting.Scope.CLUSTER); + Setting ENDPOINT_SETTING = Setting.simpleString("cloud.aws.ec2.endpoint", false, SettingsProperty.ClusterScope); } /** @@ -159,31 +165,31 @@ public interface AwsEc2Service { */ Setting HOST_TYPE_SETTING = new Setting<>("discovery.ec2.host_type", HostType.PRIVATE_IP.name(), s -> HostType.valueOf(s.toUpperCase(Locale.ROOT)), false, - Setting.Scope.CLUSTER); + SettingsProperty.ClusterScope); /** * discovery.ec2.any_group: If set to false, will require all security groups to be present for the instance to be used for the * discovery. Defaults to true. */ Setting ANY_GROUP_SETTING = - Setting.boolSetting("discovery.ec2.any_group", true, false, Setting.Scope.CLUSTER); + Setting.boolSetting("discovery.ec2.any_group", true, false, SettingsProperty.ClusterScope); /** * discovery.ec2.groups: Either a comma separated list or array based list of (security) groups. Only instances with the provided * security groups will be used in the cluster discovery. (NOTE: You could provide either group NAME or group ID.) */ Setting> GROUPS_SETTING = - Setting.listSetting("discovery.ec2.groups", new ArrayList<>(), s -> s.toString(), false, Setting.Scope.CLUSTER); + Setting.listSetting("discovery.ec2.groups", new ArrayList<>(), s -> s.toString(), false, SettingsProperty.ClusterScope); /** * discovery.ec2.availability_zones: Either a comma separated list or array based list of availability zones. Only instances within * the provided availability zones will be used in the cluster discovery. */ Setting> AVAILABILITY_ZONES_SETTING = Setting.listSetting("discovery.ec2.availability_zones", Collections.emptyList(), s -> s.toString(), false, - Setting.Scope.CLUSTER); + SettingsProperty.ClusterScope); /** * discovery.ec2.node_cache_time: How long the list of hosts is cached to prevent further requests to the AWS API. Defaults to 10s. */ Setting NODE_CACHE_TIME_SETTING = - Setting.timeSetting("discovery.ec2.node_cache_time", TimeValue.timeValueSeconds(10), false, Setting.Scope.CLUSTER); + Setting.timeSetting("discovery.ec2.node_cache_time", TimeValue.timeValueSeconds(10), false, SettingsProperty.ClusterScope); /** * discovery.ec2.tag.*: The ec2 discovery can filter machines to include in the cluster based on tags (and not just groups). @@ -191,7 +197,7 @@ public interface AwsEc2Service { * instances with a tag key set to stage, and a value of dev. Several tags set will require all of those tags to be set for the * instance to be included. */ - Setting TAG_SETTING = Setting.groupSetting("discovery.ec2.tag.", false,Setting.Scope.CLUSTER); + Setting TAG_SETTING = Setting.groupSetting("discovery.ec2.tag.", false, SettingsProperty.ClusterScope); } AmazonEC2 client(); diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java index 58e93c9dd646..53e0c10d058f 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -71,9 +72,12 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; public class AttachmentMapper extends FieldMapper { private static ESLogger logger = ESLoggerFactory.getLogger("mapper.attachment"); - public static final Setting INDEX_ATTACHMENT_IGNORE_ERRORS_SETTING = Setting.boolSetting("index.mapping.attachment.ignore_errors", true, false, Setting.Scope.INDEX); - public static final Setting INDEX_ATTACHMENT_DETECT_LANGUAGE_SETTING = Setting.boolSetting("index.mapping.attachment.detect_language", false, false, Setting.Scope.INDEX); - public static final Setting INDEX_ATTACHMENT_INDEXED_CHARS_SETTING = Setting.intSetting("index.mapping.attachment.indexed_chars", 100000, false, Setting.Scope.INDEX); + public static final Setting INDEX_ATTACHMENT_IGNORE_ERRORS_SETTING = + Setting.boolSetting("index.mapping.attachment.ignore_errors", true, false, SettingsProperty.IndexScope); + public static final Setting INDEX_ATTACHMENT_DETECT_LANGUAGE_SETTING = + Setting.boolSetting("index.mapping.attachment.detect_language", false, false, SettingsProperty.IndexScope); + public static final Setting INDEX_ATTACHMENT_INDEXED_CHARS_SETTING = + Setting.intSetting("index.mapping.attachment.indexed_chars", 100000, false, SettingsProperty.IndexScope); public static final String CONTENT_TYPE = "attachment"; diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java index 2c5521887d8b..f16e9b6729cf 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java @@ -24,6 +24,7 @@ import com.microsoft.azure.storage.StorageException; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -31,7 +32,6 @@ import java.io.InputStream; import java.io.OutputStream; import java.net.URISyntaxException; import java.util.Map; -import java.util.function.Function; /** * Azure Storage Service interface @@ -42,19 +42,19 @@ public interface AzureStorageService { final class Storage { public static final String PREFIX = "cloud.azure.storage."; public static final Setting TIMEOUT_SETTING = - Setting.timeSetting("cloud.azure.storage.timeout", TimeValue.timeValueMinutes(5), false, Setting.Scope.CLUSTER); + Setting.timeSetting("cloud.azure.storage.timeout", TimeValue.timeValueMinutes(5), false, SettingsProperty.ClusterScope); public static final Setting ACCOUNT_SETTING = - Setting.simpleString("repositories.azure.account", false, Setting.Scope.CLUSTER, true); + Setting.simpleString("repositories.azure.account", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); public static final Setting CONTAINER_SETTING = - Setting.simpleString("repositories.azure.container", false, Setting.Scope.CLUSTER); + Setting.simpleString("repositories.azure.container", false, SettingsProperty.ClusterScope); public static final Setting BASE_PATH_SETTING = - Setting.simpleString("repositories.azure.base_path", false, Setting.Scope.CLUSTER); + Setting.simpleString("repositories.azure.base_path", false, SettingsProperty.ClusterScope); public static final Setting LOCATION_MODE_SETTING = - Setting.simpleString("repositories.azure.location_mode", false, Setting.Scope.CLUSTER); + Setting.simpleString("repositories.azure.location_mode", false, SettingsProperty.ClusterScope); public static final Setting CHUNK_SIZE_SETTING = - Setting.byteSizeSetting("repositories.azure.chunk_size", new ByteSizeValue(-1), false, Setting.Scope.CLUSTER); + Setting.byteSizeSetting("repositories.azure.chunk_size", new ByteSizeValue(-1), false, SettingsProperty.ClusterScope); public static final Setting COMPRESS_SETTING = - Setting.boolSetting("repositories.azure.compress", false, false, Setting.Scope.CLUSTER); + Setting.boolSetting("repositories.azure.compress", false, false, SettingsProperty.ClusterScope); } boolean doesContainerExist(String account, LocationMode mode, String container); diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index f2773bccbbdb..013007a84a7f 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -67,12 +68,17 @@ public class AzureRepository extends BlobStoreRepository { public final static String TYPE = "azure"; public static final class Repository { - public static final Setting ACCOUNT_SETTING = Setting.simpleString("account", false, Setting.Scope.CLUSTER); - public static final Setting CONTAINER_SETTING = new Setting<>("container", "elasticsearch-snapshots", Function.identity(), false, Setting.Scope.CLUSTER); - public static final Setting BASE_PATH_SETTING = Setting.simpleString("base_path", false, Setting.Scope.CLUSTER); - public static final Setting LOCATION_MODE_SETTING = Setting.simpleString("location_mode", false, Setting.Scope.CLUSTER); - public static final Setting CHUNK_SIZE_SETTING = Setting.byteSizeSetting("chunk_size", MAX_CHUNK_SIZE, false, Setting.Scope.CLUSTER); - public static final Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, false, Setting.Scope.CLUSTER); + public static final Setting ACCOUNT_SETTING = + Setting.simpleString("account", false, SettingsProperty.ClusterScope); + public static final Setting CONTAINER_SETTING = + new Setting<>("container", "elasticsearch-snapshots", Function.identity(), false, SettingsProperty.ClusterScope); + public static final Setting BASE_PATH_SETTING = Setting.simpleString("base_path", false, SettingsProperty.ClusterScope); + public static final Setting LOCATION_MODE_SETTING = + Setting.simpleString("location_mode", false, SettingsProperty.ClusterScope); + public static final Setting CHUNK_SIZE_SETTING = + Setting.byteSizeSetting("chunk_size", MAX_CHUNK_SIZE, false, SettingsProperty.ClusterScope); + public static final Setting COMPRESS_SETTING = + Setting.boolSetting("compress", false, false, SettingsProperty.ClusterScope); } private final AzureBlobStore blobStore; diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java index 3af9446fbe9f..22bc136523ba 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java @@ -23,6 +23,7 @@ import com.amazonaws.Protocol; import com.amazonaws.services.s3.AmazonS3; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import java.util.Locale; import java.util.function.Function; @@ -38,40 +39,44 @@ public interface AwsS3Service extends LifecycleComponent { /** * cloud.aws.access_key: AWS Access key. Shared with discovery-ec2 plugin */ - Setting KEY_SETTING = Setting.simpleString("cloud.aws.access_key", false, Setting.Scope.CLUSTER, true); + Setting KEY_SETTING = + Setting.simpleString("cloud.aws.access_key", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.secret_key: AWS Secret key. Shared with discovery-ec2 plugin */ - Setting SECRET_SETTING = Setting.simpleString("cloud.aws.secret_key", false, Setting.Scope.CLUSTER, true); + Setting SECRET_SETTING = + Setting.simpleString("cloud.aws.secret_key", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.protocol: Protocol for AWS API: http or https. Defaults to https. Shared with discovery-ec2 plugin */ Setting PROTOCOL_SETTING = new Setting<>("cloud.aws.protocol", "https", s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), - false, Setting.Scope.CLUSTER); + false, SettingsProperty.ClusterScope); /** * cloud.aws.proxy.host: In case of proxy, define its hostname/IP. Shared with discovery-ec2 plugin */ - Setting PROXY_HOST_SETTING = Setting.simpleString("cloud.aws.proxy.host", false, Setting.Scope.CLUSTER); + Setting PROXY_HOST_SETTING = Setting.simpleString("cloud.aws.proxy.host", false, SettingsProperty.ClusterScope); /** * cloud.aws.proxy.port: In case of proxy, define its port. Defaults to 80. Shared with discovery-ec2 plugin */ - Setting PROXY_PORT_SETTING = Setting.intSetting("cloud.aws.proxy.port", 80, 0, 1<<16, false, Setting.Scope.CLUSTER); + Setting PROXY_PORT_SETTING = Setting.intSetting("cloud.aws.proxy.port", 80, 0, 1<<16, false, SettingsProperty.ClusterScope); /** * cloud.aws.proxy.username: In case of proxy with auth, define the username. Shared with discovery-ec2 plugin */ - Setting PROXY_USERNAME_SETTING = Setting.simpleString("cloud.aws.proxy.username", false, Setting.Scope.CLUSTER); + Setting PROXY_USERNAME_SETTING = Setting.simpleString("cloud.aws.proxy.username", false, SettingsProperty.ClusterScope); /** * cloud.aws.proxy.password: In case of proxy with auth, define the password. Shared with discovery-ec2 plugin */ - Setting PROXY_PASSWORD_SETTING = Setting.simpleString("cloud.aws.proxy.password", false, Setting.Scope.CLUSTER, true); + Setting PROXY_PASSWORD_SETTING = + Setting.simpleString("cloud.aws.proxy.password", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.signer: If you are using an old AWS API version, you can define a Signer. Shared with discovery-ec2 plugin */ - Setting SIGNER_SETTING = Setting.simpleString("cloud.aws.signer", false, Setting.Scope.CLUSTER); + Setting SIGNER_SETTING = Setting.simpleString("cloud.aws.signer", false, SettingsProperty.ClusterScope); /** * cloud.aws.region: Region. Shared with discovery-ec2 plugin */ - Setting REGION_SETTING = new Setting<>("cloud.aws.region", "", s -> s.toLowerCase(Locale.ROOT), false, Setting.Scope.CLUSTER); + Setting REGION_SETTING = + new Setting<>("cloud.aws.region", "", s -> s.toLowerCase(Locale.ROOT), false, SettingsProperty.ClusterScope); /** * Defines specific s3 settings starting with cloud.aws.s3. @@ -82,33 +87,36 @@ public interface AwsS3Service extends LifecycleComponent { * @see AwsS3Service#KEY_SETTING */ Setting KEY_SETTING = - new Setting<>("cloud.aws.s3.access_key", AwsS3Service.KEY_SETTING, Function.identity(), false, Setting.Scope.CLUSTER, true); + new Setting<>("cloud.aws.s3.access_key", AwsS3Service.KEY_SETTING, Function.identity(), false, + SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.s3.secret_key: AWS Secret key specific for S3 API calls. Defaults to cloud.aws.secret_key. * @see AwsS3Service#SECRET_SETTING */ Setting SECRET_SETTING = - new Setting<>("cloud.aws.s3.secret_key", AwsS3Service.SECRET_SETTING, Function.identity(), false, Setting.Scope.CLUSTER, true); + new Setting<>("cloud.aws.s3.secret_key", AwsS3Service.SECRET_SETTING, Function.identity(), false, + SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.s3.protocol: Protocol for AWS API specific for S3 API calls: http or https. Defaults to cloud.aws.protocol. * @see AwsS3Service#PROTOCOL_SETTING */ Setting PROTOCOL_SETTING = new Setting<>("cloud.aws.s3.protocol", AwsS3Service.PROTOCOL_SETTING, s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), false, - Setting.Scope.CLUSTER); + SettingsProperty.ClusterScope); /** * cloud.aws.s3.proxy.host: In case of proxy, define its hostname/IP specific for S3 API calls. Defaults to cloud.aws.proxy.host. * @see AwsS3Service#PROXY_HOST_SETTING */ Setting PROXY_HOST_SETTING = - new Setting<>("cloud.aws.s3.proxy.host", AwsS3Service.PROXY_HOST_SETTING, Function.identity(), false, Setting.Scope.CLUSTER); + new Setting<>("cloud.aws.s3.proxy.host", AwsS3Service.PROXY_HOST_SETTING, Function.identity(), false, + SettingsProperty.ClusterScope); /** * cloud.aws.s3.proxy.port: In case of proxy, define its port specific for S3 API calls. Defaults to cloud.aws.proxy.port. * @see AwsS3Service#PROXY_PORT_SETTING */ Setting PROXY_PORT_SETTING = new Setting<>("cloud.aws.s3.proxy.port", AwsS3Service.PROXY_PORT_SETTING, - s -> Setting.parseInt(s, 0, 1<<16, "cloud.aws.s3.proxy.port"), false, Setting.Scope.CLUSTER); + s -> Setting.parseInt(s, 0, 1<<16, "cloud.aws.s3.proxy.port"), false, SettingsProperty.ClusterScope); /** * cloud.aws.s3.proxy.username: In case of proxy with auth, define the username specific for S3 API calls. * Defaults to cloud.aws.proxy.username. @@ -116,7 +124,7 @@ public interface AwsS3Service extends LifecycleComponent { */ Setting PROXY_USERNAME_SETTING = new Setting<>("cloud.aws.s3.proxy.username", AwsS3Service.PROXY_USERNAME_SETTING, Function.identity(), false, - Setting.Scope.CLUSTER); + SettingsProperty.ClusterScope); /** * cloud.aws.s3.proxy.password: In case of proxy with auth, define the password specific for S3 API calls. * Defaults to cloud.aws.proxy.password. @@ -124,26 +132,26 @@ public interface AwsS3Service extends LifecycleComponent { */ Setting PROXY_PASSWORD_SETTING = new Setting<>("cloud.aws.s3.proxy.password", AwsS3Service.PROXY_PASSWORD_SETTING, Function.identity(), false, - Setting.Scope.CLUSTER, true); + SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.s3.signer: If you are using an old AWS API version, you can define a Signer. Specific for S3 API calls. * Defaults to cloud.aws.signer. * @see AwsS3Service#SIGNER_SETTING */ Setting SIGNER_SETTING = - new Setting<>("cloud.aws.s3.signer", AwsS3Service.SIGNER_SETTING, Function.identity(), false, Setting.Scope.CLUSTER); + new Setting<>("cloud.aws.s3.signer", AwsS3Service.SIGNER_SETTING, Function.identity(), false, SettingsProperty.ClusterScope); /** * cloud.aws.s3.region: Region specific for S3 API calls. Defaults to cloud.aws.region. * @see AwsS3Service#REGION_SETTING */ Setting REGION_SETTING = new Setting<>("cloud.aws.s3.region", AwsS3Service.REGION_SETTING, s -> s.toLowerCase(Locale.ROOT), false, - Setting.Scope.CLUSTER); + SettingsProperty.ClusterScope); /** * cloud.aws.s3.endpoint: Endpoint. If not set, endpoint will be guessed based on region setting. */ Setting ENDPOINT_SETTING = - Setting.simpleString("cloud.aws.s3.endpoint", false, Setting.Scope.CLUSTER); + Setting.simpleString("cloud.aws.s3.endpoint", false, SettingsProperty.ClusterScope); } AmazonS3 client(String endpoint, Protocol protocol, String region, String account, String key, Integer maxRetries); diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index 6b3b5bf943a4..3fdc8a487aab 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.snapshots.IndexShardRepository; @@ -65,70 +66,78 @@ public class S3Repository extends BlobStoreRepository { * repositories.s3.access_key: AWS Access key specific for all S3 Repositories API calls. Defaults to cloud.aws.s3.access_key. * @see CLOUD_S3#KEY_SETTING */ - Setting KEY_SETTING = new Setting<>("repositories.s3.access_key", CLOUD_S3.KEY_SETTING, Function.identity(), false, Setting.Scope.CLUSTER); + Setting KEY_SETTING = + new Setting<>("repositories.s3.access_key", CLOUD_S3.KEY_SETTING, Function.identity(), false, SettingsProperty.ClusterScope); /** * repositories.s3.secret_key: AWS Secret key specific for all S3 Repositories API calls. Defaults to cloud.aws.s3.secret_key. * @see CLOUD_S3#SECRET_SETTING */ - Setting SECRET_SETTING = new Setting<>("repositories.s3.secret_key", CLOUD_S3.SECRET_SETTING, Function.identity(), false, Setting.Scope.CLUSTER); + Setting SECRET_SETTING = + new Setting<>("repositories.s3.secret_key", CLOUD_S3.SECRET_SETTING, Function.identity(), false, SettingsProperty.ClusterScope); /** * repositories.s3.region: Region specific for all S3 Repositories API calls. Defaults to cloud.aws.s3.region. * @see CLOUD_S3#REGION_SETTING */ - Setting REGION_SETTING = new Setting<>("repositories.s3.region", CLOUD_S3.REGION_SETTING, s -> s.toLowerCase(Locale.ROOT), false, Setting.Scope.CLUSTER); + Setting REGION_SETTING = + new Setting<>("repositories.s3.region", CLOUD_S3.REGION_SETTING, s -> s.toLowerCase(Locale.ROOT), false, SettingsProperty.ClusterScope); /** * repositories.s3.endpoint: Endpoint specific for all S3 Repositories API calls. Defaults to cloud.aws.s3.endpoint. * @see CLOUD_S3#ENDPOINT_SETTING */ - Setting ENDPOINT_SETTING = new Setting<>("repositories.s3.endpoint", CLOUD_S3.ENDPOINT_SETTING, s -> s.toLowerCase(Locale.ROOT), false, Setting.Scope.CLUSTER); + Setting ENDPOINT_SETTING = + new Setting<>("repositories.s3.endpoint", CLOUD_S3.ENDPOINT_SETTING, s -> s.toLowerCase(Locale.ROOT), false, SettingsProperty.ClusterScope); /** * repositories.s3.protocol: Protocol specific for all S3 Repositories API calls. Defaults to cloud.aws.s3.protocol. * @see CLOUD_S3#PROTOCOL_SETTING */ - Setting PROTOCOL_SETTING = new Setting<>("repositories.s3.protocol", CLOUD_S3.PROTOCOL_SETTING, s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), false, Setting.Scope.CLUSTER); + Setting PROTOCOL_SETTING = + new Setting<>("repositories.s3.protocol", CLOUD_S3.PROTOCOL_SETTING, s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), false, SettingsProperty.ClusterScope); /** * repositories.s3.bucket: The name of the bucket to be used for snapshots. */ - Setting BUCKET_SETTING = Setting.simpleString("repositories.s3.bucket", false, Setting.Scope.CLUSTER); + Setting BUCKET_SETTING = Setting.simpleString("repositories.s3.bucket", false, SettingsProperty.ClusterScope); /** * repositories.s3.server_side_encryption: When set to true files are encrypted on server side using AES256 algorithm. * Defaults to false. */ - Setting SERVER_SIDE_ENCRYPTION_SETTING = Setting.boolSetting("repositories.s3.server_side_encryption", false, false, Setting.Scope.CLUSTER); + Setting SERVER_SIDE_ENCRYPTION_SETTING = + Setting.boolSetting("repositories.s3.server_side_encryption", false, false, SettingsProperty.ClusterScope); /** * repositories.s3.buffer_size: Minimum threshold below which the chunk is uploaded using a single request. Beyond this threshold, * the S3 repository will use the AWS Multipart Upload API to split the chunk into several parts, each of buffer_size length, and * to upload each part in its own request. Note that setting a buffer size lower than 5mb is not allowed since it will prevents the * use of the Multipart API and may result in upload errors. Defaults to 5mb. */ - Setting BUFFER_SIZE_SETTING = Setting.byteSizeSetting("repositories.s3.buffer_size", S3BlobStore.MIN_BUFFER_SIZE, false, Setting.Scope.CLUSTER); + Setting BUFFER_SIZE_SETTING = + Setting.byteSizeSetting("repositories.s3.buffer_size", S3BlobStore.MIN_BUFFER_SIZE, false, SettingsProperty.ClusterScope); /** * repositories.s3.max_retries: Number of retries in case of S3 errors. Defaults to 3. */ - Setting MAX_RETRIES_SETTING = Setting.intSetting("repositories.s3.max_retries", 3, false, Setting.Scope.CLUSTER); + Setting MAX_RETRIES_SETTING = Setting.intSetting("repositories.s3.max_retries", 3, false, SettingsProperty.ClusterScope); /** * repositories.s3.chunk_size: Big files can be broken down into chunks during snapshotting if needed. Defaults to 100m. */ - Setting CHUNK_SIZE_SETTING = Setting.byteSizeSetting("repositories.s3.chunk_size", new ByteSizeValue(100, ByteSizeUnit.MB), false, Setting.Scope.CLUSTER); + Setting CHUNK_SIZE_SETTING = + Setting.byteSizeSetting("repositories.s3.chunk_size", new ByteSizeValue(100, ByteSizeUnit.MB), false, SettingsProperty.ClusterScope); /** * repositories.s3.compress: When set to true metadata files are stored in compressed format. This setting doesn’t affect index * files that are already compressed by default. Defaults to false. */ - Setting COMPRESS_SETTING = Setting.boolSetting("repositories.s3.compress", false, false, Setting.Scope.CLUSTER); + Setting COMPRESS_SETTING = Setting.boolSetting("repositories.s3.compress", false, false, SettingsProperty.ClusterScope); /** * repositories.s3.storage_class: Sets the S3 storage class type for the backup files. Values may be standard, reduced_redundancy, * standard_ia. Defaults to standard. */ - Setting STORAGE_CLASS_SETTING = Setting.simpleString("repositories.s3.storage_class", false, Setting.Scope.CLUSTER); + Setting STORAGE_CLASS_SETTING = Setting.simpleString("repositories.s3.storage_class", false, SettingsProperty.ClusterScope); /** * repositories.s3.canned_acl: The S3 repository supports all S3 canned ACLs : private, public-read, public-read-write, * authenticated-read, log-delivery-write, bucket-owner-read, bucket-owner-full-control. Defaults to private. */ - Setting CANNED_ACL_SETTING = Setting.simpleString("repositories.s3.canned_acl", false, Setting.Scope.CLUSTER); + Setting CANNED_ACL_SETTING = Setting.simpleString("repositories.s3.canned_acl", false, SettingsProperty.ClusterScope); /** * repositories.s3.base_path: Specifies the path within bucket to repository data. Defaults to root directory. */ - Setting BASE_PATH_SETTING = Setting.simpleString("repositories.s3.base_path", false, Setting.Scope.CLUSTER); + Setting BASE_PATH_SETTING = Setting.simpleString("repositories.s3.base_path", false, SettingsProperty.ClusterScope); } /** @@ -140,72 +149,77 @@ public class S3Repository extends BlobStoreRepository { * access_key * @see Repositories#KEY_SETTING */ - Setting KEY_SETTING = Setting.simpleString("access_key", false, Setting.Scope.CLUSTER, true); + Setting KEY_SETTING = + Setting.simpleString("access_key", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * secret_key * @see Repositories#SECRET_SETTING */ - Setting SECRET_SETTING = Setting.simpleString("secret_key", false, Setting.Scope.CLUSTER, true); + Setting SECRET_SETTING = + Setting.simpleString("secret_key", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * bucket * @see Repositories#BUCKET_SETTING */ - Setting BUCKET_SETTING = Setting.simpleString("bucket", false, Setting.Scope.CLUSTER); + Setting BUCKET_SETTING = Setting.simpleString("bucket", false, SettingsProperty.ClusterScope); /** * endpoint * @see Repositories#ENDPOINT_SETTING */ - Setting ENDPOINT_SETTING = Setting.simpleString("endpoint", false, Setting.Scope.CLUSTER); + Setting ENDPOINT_SETTING = Setting.simpleString("endpoint", false, SettingsProperty.ClusterScope); /** * protocol * @see Repositories#PROTOCOL_SETTING */ - Setting PROTOCOL_SETTING = new Setting<>("protocol", "https", s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), false, Setting.Scope.CLUSTER); + Setting PROTOCOL_SETTING = + new Setting<>("protocol", "https", s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), false, SettingsProperty.ClusterScope); /** * region * @see Repositories#REGION_SETTING */ - Setting REGION_SETTING = new Setting<>("region", "", s -> s.toLowerCase(Locale.ROOT), false, Setting.Scope.CLUSTER); + Setting REGION_SETTING = new Setting<>("region", "", s -> s.toLowerCase(Locale.ROOT), false, SettingsProperty.ClusterScope); /** * server_side_encryption * @see Repositories#SERVER_SIDE_ENCRYPTION_SETTING */ - Setting SERVER_SIDE_ENCRYPTION_SETTING = Setting.boolSetting("server_side_encryption", false, false, Setting.Scope.CLUSTER); + Setting SERVER_SIDE_ENCRYPTION_SETTING = + Setting.boolSetting("server_side_encryption", false, false, SettingsProperty.ClusterScope); /** * buffer_size * @see Repositories#BUFFER_SIZE_SETTING */ - Setting BUFFER_SIZE_SETTING = Setting.byteSizeSetting("buffer_size", S3BlobStore.MIN_BUFFER_SIZE, false, Setting.Scope.CLUSTER); + Setting BUFFER_SIZE_SETTING = + Setting.byteSizeSetting("buffer_size", S3BlobStore.MIN_BUFFER_SIZE, false, SettingsProperty.ClusterScope); /** * max_retries * @see Repositories#MAX_RETRIES_SETTING */ - Setting MAX_RETRIES_SETTING = Setting.intSetting("max_retries", 3, false, Setting.Scope.CLUSTER); + Setting MAX_RETRIES_SETTING = Setting.intSetting("max_retries", 3, false, SettingsProperty.ClusterScope); /** * chunk_size * @see Repositories#CHUNK_SIZE_SETTING */ - Setting CHUNK_SIZE_SETTING = Setting.byteSizeSetting("chunk_size", "-1", false, Setting.Scope.CLUSTER); + Setting CHUNK_SIZE_SETTING = Setting.byteSizeSetting("chunk_size", "-1", false, SettingsProperty.ClusterScope); /** * compress * @see Repositories#COMPRESS_SETTING */ - Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, false, Setting.Scope.CLUSTER); + Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, false, SettingsProperty.ClusterScope); /** * storage_class * @see Repositories#STORAGE_CLASS_SETTING */ - Setting STORAGE_CLASS_SETTING = Setting.simpleString("storage_class", false, Setting.Scope.CLUSTER); + Setting STORAGE_CLASS_SETTING = Setting.simpleString("storage_class", false, SettingsProperty.ClusterScope); /** * canned_acl * @see Repositories#CANNED_ACL_SETTING */ - Setting CANNED_ACL_SETTING = Setting.simpleString("canned_acl", false, Setting.Scope.CLUSTER); + Setting CANNED_ACL_SETTING = Setting.simpleString("canned_acl", false, SettingsProperty.ClusterScope); /** * base_path * @see Repositories#BASE_PATH_SETTING */ - Setting BASE_PATH_SETTING = Setting.simpleString("base_path", false, Setting.Scope.CLUSTER); + Setting BASE_PATH_SETTING = Setting.simpleString("base_path", false, SettingsProperty.ClusterScope); } private final S3BlobStore blobStore; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index a3161f4090f9..4273c2027790 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -77,6 +77,7 @@ import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.transport.InetSocketTransportAddress; @@ -268,7 +269,8 @@ public abstract class ESIntegTestCase extends ESTestCase { * The value of this seed can be used to initialize a random context for a specific index. * It's set once per test via a generic index template. */ - public static final Setting INDEX_TEST_SEED_SETTING = Setting.longSetting("index.tests.seed", 0, Long.MIN_VALUE, false, Setting.Scope.INDEX); + public static final Setting INDEX_TEST_SEED_SETTING = + Setting.longSetting("index.tests.seed", 0, Long.MIN_VALUE, false, SettingsProperty.IndexScope); /** * A boolean value to enable or disable mock modules. This is useful to test the diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java index 64719f0f9de2..4b4692be90c4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java @@ -20,6 +20,7 @@ package org.elasticsearch.test; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.plugins.Plugin; @@ -34,9 +35,12 @@ public final class InternalSettingsPlugin extends Plugin { return "a plugin that allows to set values for internal settings which are can't be set via the ordinary API without this pluging installed"; } - public static final Setting VERSION_CREATED = Setting.intSetting("index.version.created", 0, false, Setting.Scope.INDEX); - public static final Setting MERGE_ENABLED = Setting.boolSetting("index.merge.enabled", true, false, Setting.Scope.INDEX); - public static final Setting INDEX_CREATION_DATE_SETTING = Setting.longSetting(IndexMetaData.SETTING_CREATION_DATE, -1, -1, false, Setting.Scope.INDEX); + public static final Setting VERSION_CREATED = + Setting.intSetting("index.version.created", 0, false, SettingsProperty.IndexScope); + public static final Setting MERGE_ENABLED = + Setting.boolSetting("index.merge.enabled", true, false, SettingsProperty.IndexScope); + public static final Setting INDEX_CREATION_DATE_SETTING = + Setting.longSetting(IndexMetaData.SETTING_CREATION_DATE, -1, -1, false, SettingsProperty.IndexScope); public void onModule(SettingsModule module) { module.registerSetting(VERSION_CREATED); diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java b/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java index 13f533a583e1..b94e7c7e8548 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java @@ -22,6 +22,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.index.Index; @@ -63,7 +64,7 @@ public final class MockIndexEventListener { /** * For tests to pass in to fail on listener invocation */ - public static final Setting INDEX_FAIL = Setting.boolSetting("index.fail", false, false, Setting.Scope.INDEX); + public static final Setting INDEX_FAIL = Setting.boolSetting("index.fail", false, false, SettingsProperty.IndexScope); public void onModule(SettingsModule module) { module.registerSetting(INDEX_FAIL); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java index ddccfe88e38f..cde26a5b55f2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java @@ -31,6 +31,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineConfig; @@ -55,11 +56,13 @@ public final class MockEngineSupport { * Allows tests to wrap an index reader randomly with a given ratio. This is disabled by default ie. 0.0d since reader wrapping is insanely * slow if {@link org.apache.lucene.index.AssertingDirectoryReader} is used. */ - public static final Setting WRAP_READER_RATIO = Setting.doubleSetting("index.engine.mock.random.wrap_reader_ratio", 0.0d, 0.0d, false, Setting.Scope.INDEX); + public static final Setting WRAP_READER_RATIO = + Setting.doubleSetting("index.engine.mock.random.wrap_reader_ratio", 0.0d, 0.0d, false, SettingsProperty.IndexScope); /** * Allows tests to prevent an engine from being flushed on close ie. to test translog recovery... */ - public static final Setting DISABLE_FLUSH_ON_CLOSE = Setting.boolSetting("index.mock.disable_flush_on_close", false, false, Setting.Scope.INDEX); + public static final Setting DISABLE_FLUSH_ON_CLOSE = + Setting.boolSetting("index.mock.disable_flush_on_close", false, false, SettingsProperty.IndexScope); private final AtomicBoolean closing = new AtomicBoolean(false); diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java index ef3be122cdb2..6f0e6d51d10f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; @@ -57,17 +58,20 @@ import java.io.PrintStream; import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.util.Arrays; -import java.util.Collections; import java.util.Random; -import java.util.Set; public class MockFSDirectoryService extends FsDirectoryService { - public static final Setting RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING = Setting.doubleSetting("index.store.mock.random.io_exception_rate_on_open", 0.0d, 0.0d, false, Setting.Scope.INDEX); - public static final Setting RANDOM_IO_EXCEPTION_RATE_SETTING = Setting.doubleSetting("index.store.mock.random.io_exception_rate", 0.0d, 0.0d, false, Setting.Scope.INDEX); - public static final Setting RANDOM_PREVENT_DOUBLE_WRITE_SETTING = Setting.boolSetting("index.store.mock.random.prevent_double_write", true, false, Setting.Scope.INDEX);// true is default in MDW - public static final Setting RANDOM_NO_DELETE_OPEN_FILE_SETTING = Setting.boolSetting("index.store.mock.random.no_delete_open_file", true, false, Setting.Scope.INDEX);// true is default in MDW - public static final Setting CRASH_INDEX_SETTING = Setting.boolSetting("index.store.mock.random.crash_index", true, false, Setting.Scope.INDEX);// true is default in MDW + public static final Setting RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING = + Setting.doubleSetting("index.store.mock.random.io_exception_rate_on_open", 0.0d, 0.0d, false, SettingsProperty.IndexScope); + public static final Setting RANDOM_IO_EXCEPTION_RATE_SETTING = + Setting.doubleSetting("index.store.mock.random.io_exception_rate", 0.0d, 0.0d, false, SettingsProperty.IndexScope); + public static final Setting RANDOM_PREVENT_DOUBLE_WRITE_SETTING = + Setting.boolSetting("index.store.mock.random.prevent_double_write", true, false, SettingsProperty.IndexScope);// true is default in MDW + public static final Setting RANDOM_NO_DELETE_OPEN_FILE_SETTING = + Setting.boolSetting("index.store.mock.random.no_delete_open_file", true, false, SettingsProperty.IndexScope);// true is default in MDW + public static final Setting CRASH_INDEX_SETTING = + Setting.boolSetting("index.store.mock.random.crash_index", true, false, SettingsProperty.IndexScope);// true is default in MDW private final FsDirectoryService delegateService; private final Random random; diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java index 80251d549514..8d1a2beed89a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.index.IndexModule; @@ -44,7 +45,8 @@ import java.util.Map; public class MockFSIndexStore extends IndexStore { - public static final Setting INDEX_CHECK_INDEX_ON_CLOSE_SETTING = Setting.boolSetting("index.store.mock.check_index_on_close", true, false, Setting.Scope.INDEX); + public static final Setting INDEX_CHECK_INDEX_ON_CLOSE_SETTING = + Setting.boolSetting("index.store.mock.check_index_on_close", true, false, SettingsProperty.IndexScope); public static class TestPlugin extends Plugin { @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java index 4c48f990d6a0..bdafa98b6acd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java +++ b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java @@ -20,6 +20,7 @@ package org.elasticsearch.test.tasks; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; @@ -33,7 +34,8 @@ import java.util.concurrent.CopyOnWriteArrayList; */ public class MockTaskManager extends TaskManager { - public static final Setting USE_MOCK_TASK_MANAGER_SETTING = Setting.boolSetting("tests.mock.taskmanager.enabled", false, false, Setting.Scope.CLUSTER); + public static final Setting USE_MOCK_TASK_MANAGER_SETTING = + Setting.boolSetting("tests.mock.taskmanager.enabled", false, false, SettingsProperty.ClusterScope); private final Collection listeners = new CopyOnWriteArrayList<>(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java index fb3102391557..49a89977e95d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.plugins.Plugin; @@ -67,10 +68,12 @@ public class AssertingLocalTransport extends LocalTransport { } } - public static final Setting ASSERTING_TRANSPORT_MIN_VERSION_KEY = new Setting<>("transport.asserting.version.min", - Integer.toString(Version.CURRENT.minimumCompatibilityVersion().id), (s) -> Version.fromId(Integer.parseInt(s)), false, Setting.Scope.CLUSTER); - public static final Setting ASSERTING_TRANSPORT_MAX_VERSION_KEY = new Setting<>("transport.asserting.version.max", - Integer.toString(Version.CURRENT.id), (s) -> Version.fromId(Integer.parseInt(s)), false, Setting.Scope.CLUSTER); + public static final Setting ASSERTING_TRANSPORT_MIN_VERSION_KEY = + new Setting<>("transport.asserting.version.min", Integer.toString(Version.CURRENT.minimumCompatibilityVersion().id), + (s) -> Version.fromId(Integer.parseInt(s)), false, SettingsProperty.ClusterScope); + public static final Setting ASSERTING_TRANSPORT_MAX_VERSION_KEY = + new Setting<>("transport.asserting.version.max", Integer.toString(Version.CURRENT.id), + (s) -> Version.fromId(Integer.parseInt(s)), false, SettingsProperty.ClusterScope); private final Random random; private final Version minVersion; private final Version maxVersion; From a70df69af45be60a0b6eaeb16a86ef848d505eaf Mon Sep 17 00:00:00 2001 From: "George P. Stathis" Date: Sat, 27 Feb 2016 18:56:53 -0500 Subject: [PATCH 030/320] Allow pre v3 indices to overwrite built-in similarities. --- .../index/similarity/SimilarityService.java | 13 +++++++++++-- .../index/similarity/SimilarityServiceTests.java | 13 +++++++++++++ 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java index 7ad7dfad3a06..cdeaacb9f28a 100644 --- a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java @@ -81,7 +81,13 @@ public final class SimilarityService extends AbstractIndexComponent { } providers.put(name, factory.apply(name, settings)); } - addSimilarities(similaritySettings, providers, DEFAULTS); + for (Map.Entry entry : addSimilarities(similaritySettings, DEFAULTS).entrySet()) { + // Avoid overwriting custom providers for indices older that v3.0 + if (providers.containsKey(entry.getKey()) && indexSettings.getIndexVersionCreated().before(Version.V_3_0_0)) { + continue; + } + providers.put(entry.getKey(), entry.getValue()); + } this.similarities = providers; defaultSimilarity = (providers.get("default") != null) ? providers.get("default").get() : providers.get(SimilarityService.DEFAULT_SIMILARITY).get(); @@ -96,7 +102,9 @@ public final class SimilarityService extends AbstractIndexComponent { defaultSimilarity; } - private void addSimilarities(Map similaritySettings, Map providers, Map> similarities) { + private Map addSimilarities(Map similaritySettings, + Map> similarities) { + Map providers = new HashMap<>(similarities.size()); for (Map.Entry> entry : similarities.entrySet()) { String name = entry.getKey(); BiFunction factory = entry.getValue(); @@ -106,6 +114,7 @@ public final class SimilarityService extends AbstractIndexComponent { } providers.put(name, factory.apply(name, settings)); } + return providers; } public SimilarityProvider getSimilarity(String name) { diff --git a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java index 0ec7dc5d64de..edb337fd4e63 100644 --- a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java @@ -18,6 +18,8 @@ */ package org.elasticsearch.index.similarity; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.test.ESTestCase; @@ -39,6 +41,17 @@ public class SimilarityServiceTests extends ESTestCase { } } + // Pre v3 indices could override built-in similarities + public void testOverrideBuiltInSimilarityPreV3() { + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0) + .put("index.similarity.BM25.type", "classic") + .build(); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings); + SimilarityService service = new SimilarityService(indexSettings, Collections.emptyMap()); + assertTrue(service.getSimilarity("BM25") instanceof ClassicSimilarityProvider); + } + // Tests #16594 public void testDefaultSimilarity() { Settings settings = Settings.builder().put("index.similarity.default.type", "BM25").build(); From f8d2400ee697520d6defdacdd7a29104d791224d Mon Sep 17 00:00:00 2001 From: "George P. Stathis" Date: Sat, 27 Feb 2016 18:57:29 -0500 Subject: [PATCH 031/320] First pass at validating similarities insite the Settings infrastructure. --- .../common/settings/IndexScopedSettings.java | 12 ++++++++++-- .../org/elasticsearch/common/settings/Setting.java | 7 ++++++- .../common/settings/ScopedSettingsTests.java | 7 +++++++ 3 files changed, 23 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index 157bbfbd5b9c..4d550e53dac1 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -35,6 +35,7 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; +import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.FsDirectoryService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; @@ -133,8 +134,15 @@ public final class IndexScopedSettings extends AbstractScopedSettings { FsDirectoryService.INDEX_LOCK_FACTOR_SETTING, EngineConfig.INDEX_CODEC_SETTING, IndexWarmer.INDEX_NORMS_LOADING_SETTING, - // this sucks but we can't really validate all the analyzers/similarity in here - Setting.groupSetting("index.similarity.", false, Setting.Scope.INDEX), // this allows similarity settings to be passed + // validate that built-in similarities don't get redefined + Setting.groupSetting("index.similarity.", false, Setting.Scope.INDEX, (s) -> { + boolean valid = true; + String similarityName = s.substring(0, s.indexOf(".")); + if(SimilarityService.BUILT_IN.keySet().contains(similarityName)) { + throw new IllegalArgumentException("Cannot redefine built-in Similarity [" + similarityName + "]"); + } + return valid; + }), // this allows similarity settings to be passed Setting.groupSetting("index.analysis.", false, Setting.Scope.INDEX) // this allows analysis settings to be passed ))); diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 7f64c0111335..a6c86edde779 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -40,6 +40,7 @@ import java.util.Objects; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; +import java.util.function.Predicate; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -486,6 +487,10 @@ public class Setting extends ToXContentToBytes { } public static Setting groupSetting(String key, boolean dynamic, Scope scope) { + return groupSetting(key, dynamic, scope, (s) -> true); + } + + public static Setting groupSetting(String key, boolean dynamic, Scope scope, Predicate settingsValidator) { if (key.endsWith(".") == false) { throw new IllegalArgumentException("key must end with a '.'"); } @@ -498,7 +503,7 @@ public class Setting extends ToXContentToBytes { @Override public Settings get(Settings settings) { - return settings.getByPrefix(key); + return settings.getByPrefix(key).filter(settingsValidator); } @Override diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 58f5cde65cea..fa5a018aa9b7 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -213,6 +213,13 @@ public class ScopedSettingsTests extends ESTestCase { } catch (IllegalArgumentException e) { assertEquals("Failed to parse value [true] for setting [index.number_of_replicas]", e.getMessage()); } + + try { + settings.validate("index.similarity.classic.type", Settings.builder().put("index.similarity.classic.type", "mine").build()); + fail(); + } catch (IllegalArgumentException e) { + assertEquals("Cannot redefine built-in Similarity [classic]", e.getMessage()); + } } From d77daf386124ea90acfb9a1dd5a8e1d262a33325 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Sun, 28 Feb 2016 11:06:45 +0100 Subject: [PATCH 032/320] Use an SettingsProperty.Dynamic for dynamic properties --- .../close/TransportCloseIndexAction.java | 2 +- .../action/support/AutoCreateIndex.java | 2 +- .../action/support/DestructiveOperations.java | 2 +- .../master/TransportMasterNodeReadAction.java | 2 +- .../bootstrap/BootstrapSettings.java | 8 +- .../cache/recycler/PageCacheRecycler.java | 12 +- .../java/org/elasticsearch/client/Client.java | 2 +- .../TransportClientNodesService.java | 8 +- .../elasticsearch/cluster/ClusterModule.java | 2 +- .../elasticsearch/cluster/ClusterName.java | 2 +- .../cluster/InternalClusterInfoService.java | 7 +- .../action/index/MappingUpdatedAction.java | 3 +- .../cluster/metadata/AutoExpandReplicas.java | 2 +- .../cluster/metadata/IndexMetaData.java | 28 ++-- .../cluster/metadata/MetaData.java | 2 +- .../cluster/routing/UnassignedInfo.java | 2 +- .../allocator/BalancedShardsAllocator.java | 7 +- .../decider/AwarenessAllocationDecider.java | 4 +- .../ClusterRebalanceAllocationDecider.java | 2 +- .../ConcurrentRebalanceAllocationDecider.java | 3 +- .../decider/DiskThresholdDecider.java | 17 +- .../decider/EnableAllocationDecider.java | 12 +- .../decider/FilterAllocationDecider.java | 6 +- .../decider/ShardsLimitAllocationDecider.java | 6 +- .../SnapshotInProgressAllocationDecider.java | 3 +- .../decider/ThrottlingAllocationDecider.java | 14 +- .../service/InternalClusterService.java | 7 +- .../common/logging/ESLoggerFactory.java | 5 +- .../common/network/NetworkModule.java | 9 +- .../common/network/NetworkService.java | 26 +-- .../common/settings/IndexScopedSettings.java | 4 +- .../common/settings/Setting.java | 153 ++++++++++-------- .../common/util/concurrent/EsExecutors.java | 2 +- .../common/util/concurrent/ThreadContext.java | 3 +- .../discovery/DiscoveryModule.java | 4 +- .../discovery/DiscoveryService.java | 5 +- .../discovery/DiscoverySettings.java | 12 +- .../discovery/zen/ZenDiscovery.java | 18 +-- .../zen/elect/ElectMasterService.java | 2 +- .../discovery/zen/fd/FaultDetection.java | 10 +- .../zen/ping/unicast/UnicastZenPing.java | 4 +- .../org/elasticsearch/env/Environment.java | 19 ++- .../elasticsearch/env/NodeEnvironment.java | 10 +- .../elasticsearch/gateway/GatewayService.java | 14 +- .../gateway/PrimaryShardAllocator.java | 6 +- .../http/HttpTransportSettings.java | 42 ++--- .../http/netty/NettyHttpServerTransport.java | 26 ++- .../org/elasticsearch/index/IndexModule.java | 6 +- .../elasticsearch/index/IndexSettings.java | 30 ++-- .../org/elasticsearch/index/IndexWarmer.java | 2 +- .../elasticsearch/index/IndexingSlowLog.java | 14 +- .../index/MergePolicyConfig.java | 26 +-- .../index/MergeSchedulerConfig.java | 6 +- .../elasticsearch/index/SearchSlowLog.java | 20 +-- .../index/cache/bitset/BitsetFilterCache.java | 2 +- .../index/engine/EngineConfig.java | 2 +- .../fielddata/IndexFieldDataService.java | 2 +- .../index/mapper/FieldMapper.java | 4 +- .../index/mapper/MapperService.java | 4 +- .../index/mapper/core/NumberFieldMapper.java | 2 +- .../percolator/PercolatorQueriesRegistry.java | 2 +- .../index/store/FsDirectoryService.java | 2 +- .../elasticsearch/index/store/IndexStore.java | 6 +- .../index/store/IndexStoreConfig.java | 7 +- .../org/elasticsearch/index/store/Store.java | 2 +- .../indices/IndicesQueryCache.java | 4 +- .../indices/IndicesRequestCache.java | 6 +- .../elasticsearch/indices/IndicesService.java | 2 +- .../indices/analysis/HunspellService.java | 6 +- .../HierarchyCircuitBreakerService.java | 14 +- .../cache/IndicesFieldDataCache.java | 2 +- .../indices/recovery/RecoverySettings.java | 22 +-- .../indices/store/IndicesStore.java | 2 +- .../indices/ttl/IndicesTTLService.java | 3 +- .../elasticsearch/monitor/fs/FsService.java | 2 +- .../monitor/jvm/JvmGcMonitorService.java | 6 +- .../elasticsearch/monitor/jvm/JvmService.java | 2 +- .../elasticsearch/monitor/os/OsService.java | 2 +- .../monitor/process/ProcessService.java | 2 +- .../java/org/elasticsearch/node/Node.java | 18 +-- .../internal/InternalSettingsPreparer.java | 2 +- .../elasticsearch/plugins/PluginsService.java | 2 +- .../repositories/fs/FsRepository.java | 12 +- .../repositories/uri/URLRepository.java | 14 +- .../elasticsearch/rest/BaseRestHandler.java | 2 +- .../elasticsearch/script/ScriptService.java | 6 +- .../elasticsearch/script/ScriptSettings.java | 5 +- .../elasticsearch/search/SearchService.java | 6 +- .../elasticsearch/threadpool/ThreadPool.java | 2 +- .../elasticsearch/transport/Transport.java | 2 +- .../transport/TransportService.java | 4 +- .../transport/TransportSettings.java | 12 +- .../transport/netty/NettyTransport.java | 43 +++-- .../org/elasticsearch/tribe/TribeService.java | 17 +- .../cluster/ClusterModuleTests.java | 4 +- .../cluster/settings/SettingsFilteringIT.java | 6 +- .../common/settings/ScopedSettingsTests.java | 24 +-- .../common/settings/SettingTests.java | 49 +++--- .../common/settings/SettingsModuleTests.java | 10 +- .../elasticsearch/index/IndexModuleTests.java | 4 +- .../index/IndexSettingsTests.java | 11 +- .../index/SettingsListenerIT.java | 6 +- .../indices/IndicesOptionsIntegrationIT.java | 6 +- .../RandomExceptionCircuitBreakerIT.java | 4 +- .../basic/SearchWithRandomExceptionsIT.java | 4 +- .../snapshots/mockstore/MockRepository.java | 5 +- .../azure/management/AzureComputeService.java | 15 +- .../cloud/aws/AwsEc2Service.java | 53 +++--- .../mapper/attachments/AttachmentMapper.java | 6 +- .../azure/storage/AzureStorageService.java | 14 +- .../repositories/azure/AzureRepository.java | 15 +- .../elasticsearch/cloud/aws/AwsS3Service.java | 39 +++-- .../repositories/s3/S3Repository.java | 58 ++++--- .../elasticsearch/test/ESIntegTestCase.java | 2 +- .../test/InternalSettingsPlugin.java | 6 +- .../test/MockIndexEventListener.java | 2 +- .../test/engine/MockEngineSupport.java | 4 +- .../test/store/MockFSDirectoryService.java | 10 +- .../test/store/MockFSIndexStore.java | 2 +- .../test/tasks/MockTaskManager.java | 2 +- .../transport/AssertingLocalTransport.java | 4 +- 121 files changed, 647 insertions(+), 620 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java index 82602a10c008..6065a2ec66e6 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java @@ -48,7 +48,7 @@ public class TransportCloseIndexAction extends TransportMasterNodeAction CLUSTER_INDICES_CLOSE_ENABLE_SETTING = - Setting.boolSetting("cluster.indices.close.enable", true, true, SettingsProperty.ClusterScope); + Setting.boolSetting("cluster.indices.close.enable", true, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); @Inject public TransportCloseIndexAction(Settings settings, TransportService transportService, ClusterService clusterService, diff --git a/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java b/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java index ee304dd05f2b..2169d3a1521f 100644 --- a/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java +++ b/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java @@ -41,7 +41,7 @@ import java.util.List; public final class AutoCreateIndex { public static final Setting AUTO_CREATE_INDEX_SETTING = - new Setting<>("action.auto_create_index", "true", AutoCreate::new, false, SettingsProperty.ClusterScope); + new Setting<>("action.auto_create_index", "true", AutoCreate::new, SettingsProperty.ClusterScope); private final boolean dynamicMappingDisabled; private final IndexNameExpressionResolver resolver; diff --git a/core/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java b/core/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java index cfdd45cdfa1e..6591384271b2 100644 --- a/core/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java +++ b/core/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java @@ -35,7 +35,7 @@ public final class DestructiveOperations extends AbstractComponent { * Setting which controls whether wildcard usage (*, prefix*, _all) is allowed. */ public static final Setting REQUIRES_NAME_SETTING = - Setting.boolSetting("action.destructive_requires_name", false, true, SettingsProperty.ClusterScope); + Setting.boolSetting("action.destructive_requires_name", false, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); private volatile boolean destructiveRequiresName; @Inject diff --git a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java index 5c15acbbdca4..08ba0defd739 100644 --- a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java @@ -39,7 +39,7 @@ public abstract class TransportMasterNodeReadAction { public static final Setting FORCE_LOCAL_SETTING = - Setting.boolSetting("action.master.force_local", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("action.master.force_local", false, SettingsProperty.ClusterScope); private final boolean forceLocal; diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapSettings.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapSettings.java index 9c0bdcbd2c96..dd9263330e6d 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapSettings.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapSettings.java @@ -29,13 +29,13 @@ public final class BootstrapSettings { // TODO: remove this hack when insecure defaults are removed from java public static final Setting SECURITY_FILTER_BAD_DEFAULTS_SETTING = - Setting.boolSetting("security.manager.filter_bad_defaults", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("security.manager.filter_bad_defaults", true, SettingsProperty.ClusterScope); public static final Setting MLOCKALL_SETTING = - Setting.boolSetting("bootstrap.mlockall", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("bootstrap.mlockall", false, SettingsProperty.ClusterScope); public static final Setting SECCOMP_SETTING = - Setting.boolSetting("bootstrap.seccomp", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("bootstrap.seccomp", true, SettingsProperty.ClusterScope); public static final Setting CTRLHANDLER_SETTING = - Setting.boolSetting("bootstrap.ctrlhandler", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("bootstrap.ctrlhandler", true, SettingsProperty.ClusterScope); } diff --git a/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java b/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java index 9cec74115f60..f58947409e94 100644 --- a/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java +++ b/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java @@ -45,18 +45,18 @@ import static org.elasticsearch.common.recycler.Recyclers.none; public class PageCacheRecycler extends AbstractComponent implements Releasable { public static final Setting TYPE_SETTING = - new Setting<>("cache.recycler.page.type", Type.CONCURRENT.name(), Type::parse, false, SettingsProperty.ClusterScope); + new Setting<>("cache.recycler.page.type", Type.CONCURRENT.name(), Type::parse, SettingsProperty.ClusterScope); public static final Setting LIMIT_HEAP_SETTING = - Setting.byteSizeSetting("cache.recycler.page.limit.heap", "10%", false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("cache.recycler.page.limit.heap", "10%", SettingsProperty.ClusterScope); public static final Setting WEIGHT_BYTES_SETTING = - Setting.doubleSetting("cache.recycler.page.weight.bytes", 1d, 0d, false, SettingsProperty.ClusterScope); + Setting.doubleSetting("cache.recycler.page.weight.bytes", 1d, 0d, SettingsProperty.ClusterScope); public static final Setting WEIGHT_LONG_SETTING = - Setting.doubleSetting("cache.recycler.page.weight.longs", 1d, 0d, false, SettingsProperty.ClusterScope); + Setting.doubleSetting("cache.recycler.page.weight.longs", 1d, 0d, SettingsProperty.ClusterScope); public static final Setting WEIGHT_INT_SETTING = - Setting.doubleSetting("cache.recycler.page.weight.ints", 1d, 0d, false, SettingsProperty.ClusterScope); + Setting.doubleSetting("cache.recycler.page.weight.ints", 1d, 0d, SettingsProperty.ClusterScope); // object pages are less useful to us so we give them a lower weight by default public static final Setting WEIGHT_OBJECTS_SETTING = - Setting.doubleSetting("cache.recycler.page.weight.objects", 0.1d, 0d, false, SettingsProperty.ClusterScope); + Setting.doubleSetting("cache.recycler.page.weight.objects", 0.1d, 0d, SettingsProperty.ClusterScope); private final Recycler bytePage; private final Recycler intPage; diff --git a/core/src/main/java/org/elasticsearch/client/Client.java b/core/src/main/java/org/elasticsearch/client/Client.java index 859a15e2c5b0..1ced0f2b0199 100644 --- a/core/src/main/java/org/elasticsearch/client/Client.java +++ b/core/src/main/java/org/elasticsearch/client/Client.java @@ -115,7 +115,7 @@ public interface Client extends ElasticsearchClient, Releasable { default: throw new IllegalArgumentException("Can't parse [client.type] must be one of [node, transport]"); } - }, false, SettingsProperty.ClusterScope); + }, SettingsProperty.ClusterScope); /** * The admin client that can be used to perform administrative operations. diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java index 2e4956333299..28c921333cae 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java @@ -102,13 +102,13 @@ public class TransportClientNodesService extends AbstractComponent { public static final Setting CLIENT_TRANSPORT_NODES_SAMPLER_INTERVAL = - Setting.positiveTimeSetting("client.transport.nodes_sampler_interval", timeValueSeconds(5), false, SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("client.transport.nodes_sampler_interval", timeValueSeconds(5), SettingsProperty.ClusterScope); public static final Setting CLIENT_TRANSPORT_PING_TIMEOUT = - Setting.positiveTimeSetting("client.transport.ping_timeout", timeValueSeconds(5), false, SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("client.transport.ping_timeout", timeValueSeconds(5), SettingsProperty.ClusterScope); public static final Setting CLIENT_TRANSPORT_IGNORE_CLUSTER_NAME = - Setting.boolSetting("client.transport.ignore_cluster_name", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("client.transport.ignore_cluster_name", false, SettingsProperty.ClusterScope); public static final Setting CLIENT_TRANSPORT_SNIFF = - Setting.boolSetting("client.transport.sniff", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("client.transport.sniff", false, SettingsProperty.ClusterScope); @Inject public TransportClientNodesService(Settings settings, ClusterName clusterName, TransportService transportService, diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index ec27ed3a4d40..c57549236e87 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -76,7 +76,7 @@ public class ClusterModule extends AbstractModule { public static final String EVEN_SHARD_COUNT_ALLOCATOR = "even_shard"; public static final String BALANCED_ALLOCATOR = "balanced"; // default public static final Setting SHARDS_ALLOCATOR_TYPE_SETTING = - new Setting<>("cluster.routing.allocation.type", BALANCED_ALLOCATOR, Function.identity(), false, SettingsProperty.ClusterScope); + new Setting<>("cluster.routing.allocation.type", BALANCED_ALLOCATOR, Function.identity(), SettingsProperty.ClusterScope); public static final List> DEFAULT_ALLOCATION_DECIDERS = Collections.unmodifiableList(Arrays.asList( SameShardAllocationDecider.class, diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterName.java b/core/src/main/java/org/elasticsearch/cluster/ClusterName.java index 9012b9b0278b..185c68e075c2 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterName.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterName.java @@ -38,7 +38,7 @@ public class ClusterName implements Streamable { throw new IllegalArgumentException("[cluster.name] must not be empty"); } return s; - }, false, SettingsProperty.ClusterScope); + }, SettingsProperty.ClusterScope); public static final ClusterName DEFAULT = new ClusterName(CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY).intern()); diff --git a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index e6d9c27c1c25..32f521a67824 100644 --- a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -66,10 +66,11 @@ import java.util.concurrent.TimeUnit; public class InternalClusterInfoService extends AbstractComponent implements ClusterInfoService, LocalNodeMasterListener, ClusterStateListener { public static final Setting INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING = - Setting.timeSetting("cluster.info.update.interval", TimeValue.timeValueSeconds(30), TimeValue.timeValueSeconds(10), true, - SettingsProperty.ClusterScope); + Setting.timeSetting("cluster.info.update.interval", TimeValue.timeValueSeconds(30), TimeValue.timeValueSeconds(10), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING = - Setting.positiveTimeSetting("cluster.info.update.timeout", TimeValue.timeValueSeconds(15), true, SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("cluster.info.update.timeout", TimeValue.timeValueSeconds(15), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); private volatile TimeValue updateFrequency; diff --git a/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java b/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java index 647f5df1cd47..4005631d5afc 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java @@ -43,7 +43,8 @@ import java.util.concurrent.TimeoutException; public class MappingUpdatedAction extends AbstractComponent { public static final Setting INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING = - Setting.positiveTimeSetting("indices.mapping.dynamic_timeout", TimeValue.timeValueSeconds(30), true, SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("indices.mapping.dynamic_timeout", TimeValue.timeValueSeconds(30), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); private IndicesAdminClient client; private volatile TimeValue dynamicMappingUpdateTimeout; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java b/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java index dac44814a92d..9a8499832eac 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java @@ -57,7 +57,7 @@ final class AutoExpandReplicas { } } return new AutoExpandReplicas(min, max, true); - }, true, SettingsProperty.IndexScope); + }, SettingsProperty.Dynamic, SettingsProperty.IndexScope); private final int minReplicas; private final int maxReplicas; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index f9982384d6e2..37e0ad970029 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -154,35 +154,35 @@ public class IndexMetaData implements Diffable, FromXContentBuild public static final String INDEX_SETTING_PREFIX = "index."; public static final String SETTING_NUMBER_OF_SHARDS = "index.number_of_shards"; public static final Setting INDEX_NUMBER_OF_SHARDS_SETTING = - Setting.intSetting(SETTING_NUMBER_OF_SHARDS, 5, 1, false, SettingsProperty.IndexScope); + Setting.intSetting(SETTING_NUMBER_OF_SHARDS, 5, 1, SettingsProperty.IndexScope); public static final String SETTING_NUMBER_OF_REPLICAS = "index.number_of_replicas"; public static final Setting INDEX_NUMBER_OF_REPLICAS_SETTING = - Setting.intSetting(SETTING_NUMBER_OF_REPLICAS, 1, 0, true, SettingsProperty.IndexScope); + Setting.intSetting(SETTING_NUMBER_OF_REPLICAS, 1, 0, SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final String SETTING_SHADOW_REPLICAS = "index.shadow_replicas"; public static final Setting INDEX_SHADOW_REPLICAS_SETTING = - Setting.boolSetting(SETTING_SHADOW_REPLICAS, false, false, SettingsProperty.IndexScope); + Setting.boolSetting(SETTING_SHADOW_REPLICAS, false, SettingsProperty.IndexScope); public static final String SETTING_SHARED_FILESYSTEM = "index.shared_filesystem"; public static final Setting INDEX_SHARED_FILESYSTEM_SETTING = - Setting.boolSetting(SETTING_SHARED_FILESYSTEM, false, false, SettingsProperty.IndexScope); + Setting.boolSetting(SETTING_SHARED_FILESYSTEM, false, SettingsProperty.IndexScope); public static final String SETTING_AUTO_EXPAND_REPLICAS = "index.auto_expand_replicas"; public static final Setting INDEX_AUTO_EXPAND_REPLICAS_SETTING = AutoExpandReplicas.SETTING; public static final String SETTING_READ_ONLY = "index.blocks.read_only"; public static final Setting INDEX_READ_ONLY_SETTING = - Setting.boolSetting(SETTING_READ_ONLY, false, true, SettingsProperty.IndexScope); + Setting.boolSetting(SETTING_READ_ONLY, false, SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final String SETTING_BLOCKS_READ = "index.blocks.read"; public static final Setting INDEX_BLOCKS_READ_SETTING = - Setting.boolSetting(SETTING_BLOCKS_READ, false, true, SettingsProperty.IndexScope); + Setting.boolSetting(SETTING_BLOCKS_READ, false, SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final String SETTING_BLOCKS_WRITE = "index.blocks.write"; public static final Setting INDEX_BLOCKS_WRITE_SETTING = - Setting.boolSetting(SETTING_BLOCKS_WRITE, false, true, SettingsProperty.IndexScope); + Setting.boolSetting(SETTING_BLOCKS_WRITE, false, SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final String SETTING_BLOCKS_METADATA = "index.blocks.metadata"; public static final Setting INDEX_BLOCKS_METADATA_SETTING = - Setting.boolSetting(SETTING_BLOCKS_METADATA, false, true, SettingsProperty.IndexScope); + Setting.boolSetting(SETTING_BLOCKS_METADATA, false, SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final String SETTING_VERSION_CREATED = "index.version.created"; public static final String SETTING_VERSION_CREATED_STRING = "index.version.created_string"; @@ -192,23 +192,23 @@ public class IndexMetaData implements Diffable, FromXContentBuild public static final String SETTING_CREATION_DATE = "index.creation_date"; public static final String SETTING_PRIORITY = "index.priority"; public static final Setting INDEX_PRIORITY_SETTING = - Setting.intSetting("index.priority", 1, 0, true, SettingsProperty.IndexScope); + Setting.intSetting("index.priority", 1, 0, SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final String SETTING_CREATION_DATE_STRING = "index.creation_date_string"; public static final String SETTING_INDEX_UUID = "index.uuid"; public static final String SETTING_DATA_PATH = "index.data_path"; public static final Setting INDEX_DATA_PATH_SETTING = - new Setting<>(SETTING_DATA_PATH, "", Function.identity(), false, SettingsProperty.IndexScope); + new Setting<>(SETTING_DATA_PATH, "", Function.identity(), SettingsProperty.IndexScope); public static final String SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE = "index.shared_filesystem.recover_on_any_node"; public static final Setting INDEX_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE_SETTING = - Setting.boolSetting(SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false, true, SettingsProperty.IndexScope); + Setting.boolSetting(SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false, SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final String INDEX_UUID_NA_VALUE = "_na_"; public static final Setting INDEX_ROUTING_REQUIRE_GROUP_SETTING = - Setting.groupSetting("index.routing.allocation.require.", true, SettingsProperty.IndexScope); + Setting.groupSetting("index.routing.allocation.require.", SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_ROUTING_INCLUDE_GROUP_SETTING = - Setting.groupSetting("index.routing.allocation.include.", true, SettingsProperty.IndexScope); + Setting.groupSetting("index.routing.allocation.include.", SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_ROUTING_EXCLUDE_GROUP_SETTING = - Setting.groupSetting("index.routing.allocation.exclude.", true, SettingsProperty.IndexScope); + Setting.groupSetting("index.routing.allocation.exclude.", SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final IndexMetaData PROTO = IndexMetaData.builder("") .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index f729cc4cabcb..d74cf06c5330 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -141,7 +141,7 @@ public class MetaData implements Iterable, Diffable, Fr public static final Setting SETTING_READ_ONLY_SETTING = - Setting.boolSetting("cluster.blocks.read_only", false, true, SettingsProperty.ClusterScope); + Setting.boolSetting("cluster.blocks.read_only", false, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final ClusterBlock CLUSTER_READ_ONLY_BLOCK = new ClusterBlock(6, "cluster read-only (api)", false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA_WRITE)); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java b/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java index d7cfe1a39d95..1b7fcf96779b 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java @@ -46,7 +46,7 @@ public class UnassignedInfo implements ToXContent, Writeable { private static final TimeValue DEFAULT_DELAYED_NODE_LEFT_TIMEOUT = TimeValue.timeValueMinutes(1); public static final Setting INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING = - Setting.timeSetting("index.unassigned.node_left.delayed_timeout", DEFAULT_DELAYED_NODE_LEFT_TIMEOUT, true, + Setting.timeSetting("index.unassigned.node_left.delayed_timeout", DEFAULT_DELAYED_NODE_LEFT_TIMEOUT, SettingsProperty.Dynamic, SettingsProperty.IndexScope); /** diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 248d5aa25c93..ccbf47b675e6 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -74,11 +74,12 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; public class BalancedShardsAllocator extends AbstractComponent implements ShardsAllocator { public static final Setting INDEX_BALANCE_FACTOR_SETTING = - Setting.floatSetting("cluster.routing.allocation.balance.index", 0.55f, true, SettingsProperty.ClusterScope); + Setting.floatSetting("cluster.routing.allocation.balance.index", 0.55f, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting SHARD_BALANCE_FACTOR_SETTING = - Setting.floatSetting("cluster.routing.allocation.balance.shard", 0.45f, true, SettingsProperty.ClusterScope); + Setting.floatSetting("cluster.routing.allocation.balance.shard", 0.45f, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting THRESHOLD_SETTING = - Setting.floatSetting("cluster.routing.allocation.balance.threshold", 1.0f, 0.0f, true, SettingsProperty.ClusterScope); + Setting.floatSetting("cluster.routing.allocation.balance.threshold", 1.0f, 0.0f, + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); private volatile WeightFunction weightFunction; private volatile float threshold; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java index 792f670dcf2c..235cfd841866 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java @@ -79,10 +79,10 @@ public class AwarenessAllocationDecider extends AllocationDecider { public static final String NAME = "awareness"; public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING = - new Setting<>("cluster.routing.allocation.awareness.attributes", "", Strings::splitStringByCommaToArray , true, + new Setting<>("cluster.routing.allocation.awareness.attributes", "", Strings::splitStringByCommaToArray , SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING = - Setting.groupSetting("cluster.routing.allocation.awareness.force.", true, SettingsProperty.ClusterScope); + Setting.groupSetting("cluster.routing.allocation.awareness.force.", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); private String[] awarenessAttributes; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java index 2c59fee3af6f..58966dd62a63 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java @@ -51,7 +51,7 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider { public static final String NAME = "cluster_rebalance"; public static final Setting CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING = new Setting<>("cluster.routing.allocation.allow_rebalance", ClusterRebalanceType.INDICES_ALL_ACTIVE.name().toLowerCase(Locale.ROOT), - ClusterRebalanceType::parseString, true, SettingsProperty.ClusterScope); + ClusterRebalanceType::parseString, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); /** * An enum representation for the configured re-balance type. diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java index cda5e628dece..cab73958b757 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java @@ -44,7 +44,8 @@ public class ConcurrentRebalanceAllocationDecider extends AllocationDecider { public static final String NAME = "concurrent_rebalance"; public static final Setting CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING = - Setting.intSetting("cluster.routing.allocation.cluster_concurrent_rebalance", 2, -1, true, SettingsProperty.ClusterScope); + Setting.intSetting("cluster.routing.allocation.cluster_concurrent_rebalance", 2, -1, + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); private volatile int clusterConcurrentRebalance; @Inject diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index 051eab81ec8f..e8b5a3dba04d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -83,16 +83,21 @@ public class DiskThresholdDecider extends AllocationDecider { private volatile TimeValue rerouteInterval; public static final Setting CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING = - Setting.boolSetting("cluster.routing.allocation.disk.threshold_enabled", true, true, SettingsProperty.ClusterScope); + Setting.boolSetting("cluster.routing.allocation.disk.threshold_enabled", true, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING = - new Setting<>("cluster.routing.allocation.disk.watermark.low", "85%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.low"), true, SettingsProperty.ClusterScope); + new Setting<>("cluster.routing.allocation.disk.watermark.low", "85%", + (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.low"), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING = - new Setting<>("cluster.routing.allocation.disk.watermark.high", "90%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.high"), true, SettingsProperty.ClusterScope); + new Setting<>("cluster.routing.allocation.disk.watermark.high", "90%", + (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.high"), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING = - Setting.boolSetting("cluster.routing.allocation.disk.include_relocations", true, true, SettingsProperty.ClusterScope);; + Setting.boolSetting("cluster.routing.allocation.disk.include_relocations", true, + SettingsProperty.Dynamic, SettingsProperty.ClusterScope);; public static final Setting CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING = - Setting.positiveTimeSetting("cluster.routing.allocation.disk.reroute_interval", TimeValue.timeValueSeconds(60), true, - SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("cluster.routing.allocation.disk.reroute_interval", TimeValue.timeValueSeconds(60), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); /** * Listens for a node to go over the high watermark and kicks off an empty diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java index edece247c8b3..0cca4cac4806 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java @@ -62,14 +62,18 @@ public class EnableAllocationDecider extends AllocationDecider { public static final String NAME = "enable"; public static final Setting CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING = - new Setting<>("cluster.routing.allocation.enable", Allocation.ALL.name(), Allocation::parse, true, SettingsProperty.ClusterScope); + new Setting<>("cluster.routing.allocation.enable", Allocation.ALL.name(), Allocation::parse, + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting INDEX_ROUTING_ALLOCATION_ENABLE_SETTING = - new Setting<>("index.routing.allocation.enable", Allocation.ALL.name(), Allocation::parse, true, SettingsProperty.IndexScope); + new Setting<>("index.routing.allocation.enable", Allocation.ALL.name(), Allocation::parse, + SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING = - new Setting<>("cluster.routing.rebalance.enable", Rebalance.ALL.name(), Rebalance::parse, true, SettingsProperty.ClusterScope); + new Setting<>("cluster.routing.rebalance.enable", Rebalance.ALL.name(), Rebalance::parse, + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting INDEX_ROUTING_REBALANCE_ENABLE_SETTING = - new Setting<>("index.routing.rebalance.enable", Rebalance.ALL.name(), Rebalance::parse, true, SettingsProperty.IndexScope); + new Setting<>("index.routing.rebalance.enable", Rebalance.ALL.name(), Rebalance::parse, + SettingsProperty.Dynamic, SettingsProperty.IndexScope); private volatile Rebalance enableRebalance; private volatile Allocation enableAllocation; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java index 59f6ec1531a0..b4c50d1849bd 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java @@ -62,11 +62,11 @@ public class FilterAllocationDecider extends AllocationDecider { public static final String NAME = "filter"; public static final Setting CLUSTER_ROUTING_REQUIRE_GROUP_SETTING = - Setting.groupSetting("cluster.routing.allocation.require.", true, SettingsProperty.ClusterScope); + Setting.groupSetting("cluster.routing.allocation.require.", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting CLUSTER_ROUTING_INCLUDE_GROUP_SETTING = - Setting.groupSetting("cluster.routing.allocation.include.", true, SettingsProperty.ClusterScope); + Setting.groupSetting("cluster.routing.allocation.include.", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING = - Setting.groupSetting("cluster.routing.allocation.exclude.", true, SettingsProperty.ClusterScope); + Setting.groupSetting("cluster.routing.allocation.exclude.", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); private volatile DiscoveryNodeFilters clusterRequireFilters; private volatile DiscoveryNodeFilters clusterIncludeFilters; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java index be6c98d147ba..03a383830ce6 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java @@ -61,14 +61,16 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { * node. Negative values are interpreted as unlimited. */ public static final Setting INDEX_TOTAL_SHARDS_PER_NODE_SETTING = - Setting.intSetting("index.routing.allocation.total_shards_per_node", -1, -1, true, SettingsProperty.IndexScope); + Setting.intSetting("index.routing.allocation.total_shards_per_node", -1, -1, + SettingsProperty.Dynamic, SettingsProperty.IndexScope); /** * Controls the maximum number of shards per node on a global level. * Negative values are interpreted as unlimited. */ public static final Setting CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING = - Setting.intSetting("cluster.routing.allocation.total_shards_per_node", -1, -1, true, SettingsProperty.ClusterScope); + Setting.intSetting("cluster.routing.allocation.total_shards_per_node", -1, -1, + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); @Inject diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java index b4927b6c5c7f..a9d269d3b3ab 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java @@ -41,7 +41,8 @@ public class SnapshotInProgressAllocationDecider extends AllocationDecider { * Disables relocation of shards that are currently being snapshotted. */ public static final Setting CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING = - Setting.boolSetting("cluster.routing.allocation.snapshot.relocation_enabled", false, true, SettingsProperty.ClusterScope); + Setting.boolSetting("cluster.routing.allocation.snapshot.relocation_enabled", false, + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); private volatile boolean enableRelocation = false; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java index 1e12eb406b83..649002362911 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java @@ -54,20 +54,22 @@ public class ThrottlingAllocationDecider extends AllocationDecider { public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_recoveries", Integer.toString(DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES), - (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_recoveries"), true, SettingsProperty.ClusterScope); + (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_recoveries"), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING = Setting.intSetting("cluster.routing.allocation.node_initial_primaries_recoveries", - DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, 0, true, SettingsProperty.ClusterScope); + DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, 0, + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_incoming_recoveries", (s) -> CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getRaw(s), - (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_incoming_recoveries"), true, - SettingsProperty.ClusterScope); + (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_incoming_recoveries"), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_outgoing_recoveries", (s) -> CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getRaw(s), - (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_outgoing_recoveries"), true, - SettingsProperty.ClusterScope); + (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_outgoing_recoveries"), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); private volatile int primariesInitialRecoveries; diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index 0b0fd5e2b994..fbf4d0b67e38 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -95,11 +95,10 @@ import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadF public class InternalClusterService extends AbstractLifecycleComponent implements ClusterService { public static final Setting CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING = - Setting.positiveTimeSetting("cluster.service.slow_task_logging_threshold", TimeValue.timeValueSeconds(30), true, - SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("cluster.service.slow_task_logging_threshold", TimeValue.timeValueSeconds(30), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting CLUSTER_SERVICE_RECONNECT_INTERVAL_SETTING = - Setting.positiveTimeSetting("cluster.service.reconnect_interval", TimeValue.timeValueSeconds(10), false, - SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("cluster.service.reconnect_interval", TimeValue.timeValueSeconds(10), SettingsProperty.ClusterScope); public static final String UPDATE_THREAD_NAME = "clusterService#updateTask"; private final ThreadPool threadPool; diff --git a/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java index 98d75c864828..a5d8e7e4960a 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java +++ b/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java @@ -38,9 +38,10 @@ import java.util.regex.Pattern; public abstract class ESLoggerFactory { public static final Setting LOG_DEFAULT_LEVEL_SETTING = - new Setting<>("logger.level", LogLevel.INFO.name(), LogLevel::parse, false, SettingsProperty.ClusterScope); + new Setting<>("logger.level", LogLevel.INFO.name(), LogLevel::parse, SettingsProperty.ClusterScope); public static final Setting LOG_LEVEL_SETTING = - Setting.dynamicKeySetting("logger.", LogLevel.INFO.name(), LogLevel::parse, true, SettingsProperty.ClusterScope); + Setting.dynamicKeySetting("logger.", LogLevel.INFO.name(), LogLevel::parse, + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); private static volatile ESLoggerFactory defaultFactory = new JdkESLoggerFactory(); diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java index ea2cc1b42676..c79e8dd3af50 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -155,12 +155,11 @@ public class NetworkModule extends AbstractModule { public static final String LOCAL_TRANSPORT = "local"; public static final String NETTY_TRANSPORT = "netty"; - public static final Setting HTTP_TYPE_SETTING = Setting.simpleString("http.type", false, SettingsProperty.ClusterScope); - public static final Setting HTTP_ENABLED = Setting.boolSetting("http.enabled", true, false, SettingsProperty.ClusterScope); + public static final Setting HTTP_TYPE_SETTING = Setting.simpleString("http.type", SettingsProperty.ClusterScope); + public static final Setting HTTP_ENABLED = Setting.boolSetting("http.enabled", true, SettingsProperty.ClusterScope); public static final Setting TRANSPORT_SERVICE_TYPE_SETTING = - Setting.simpleString("transport.service.type", false, SettingsProperty.ClusterScope); - public static final Setting TRANSPORT_TYPE_SETTING = - Setting.simpleString("transport.type", false, SettingsProperty.ClusterScope); + Setting.simpleString("transport.service.type", SettingsProperty.ClusterScope); + public static final Setting TRANSPORT_TYPE_SETTING = Setting.simpleString("transport.type", SettingsProperty.ClusterScope); diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java index abb7795f12a7..fc1922252221 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java @@ -45,32 +45,32 @@ public class NetworkService extends AbstractComponent { public static final String DEFAULT_NETWORK_HOST = "_local_"; public static final Setting> GLOBAL_NETWORK_HOST_SETTING = - Setting.listSetting("network.host", Arrays.asList(DEFAULT_NETWORK_HOST), s -> s, false, SettingsProperty.ClusterScope); + Setting.listSetting("network.host", Arrays.asList(DEFAULT_NETWORK_HOST), s -> s, SettingsProperty.ClusterScope); public static final Setting> GLOBAL_NETWORK_BINDHOST_SETTING = - Setting.listSetting("network.bind_host", GLOBAL_NETWORK_HOST_SETTING, s -> s, false, SettingsProperty.ClusterScope); + Setting.listSetting("network.bind_host", GLOBAL_NETWORK_HOST_SETTING, s -> s, SettingsProperty.ClusterScope); public static final Setting> GLOBAL_NETWORK_PUBLISHHOST_SETTING = - Setting.listSetting("network.publish_host", GLOBAL_NETWORK_HOST_SETTING, s -> s, false, SettingsProperty.ClusterScope); - public static final Setting NETWORK_SERVER = Setting.boolSetting("network.server", true, false, SettingsProperty.ClusterScope); + Setting.listSetting("network.publish_host", GLOBAL_NETWORK_HOST_SETTING, s -> s, SettingsProperty.ClusterScope); + public static final Setting NETWORK_SERVER = Setting.boolSetting("network.server", true, SettingsProperty.ClusterScope); public static final class TcpSettings { public static final Setting TCP_NO_DELAY = - Setting.boolSetting("network.tcp.no_delay", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("network.tcp.no_delay", true, SettingsProperty.ClusterScope); public static final Setting TCP_KEEP_ALIVE = - Setting.boolSetting("network.tcp.keep_alive", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("network.tcp.keep_alive", true, SettingsProperty.ClusterScope); public static final Setting TCP_REUSE_ADDRESS = - Setting.boolSetting("network.tcp.reuse_address", NetworkUtils.defaultReuseAddress(), false, SettingsProperty.ClusterScope); + Setting.boolSetting("network.tcp.reuse_address", NetworkUtils.defaultReuseAddress(), SettingsProperty.ClusterScope); public static final Setting TCP_SEND_BUFFER_SIZE = - Setting.byteSizeSetting("network.tcp.send_buffer_size", new ByteSizeValue(-1), false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("network.tcp.send_buffer_size", new ByteSizeValue(-1), SettingsProperty.ClusterScope); public static final Setting TCP_RECEIVE_BUFFER_SIZE = - Setting.byteSizeSetting("network.tcp.receive_buffer_size", new ByteSizeValue(-1), false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("network.tcp.receive_buffer_size", new ByteSizeValue(-1), SettingsProperty.ClusterScope); public static final Setting TCP_BLOCKING = - Setting.boolSetting("network.tcp.blocking", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("network.tcp.blocking", false, SettingsProperty.ClusterScope); public static final Setting TCP_BLOCKING_SERVER = - Setting.boolSetting("network.tcp.blocking_server", TCP_BLOCKING, false, SettingsProperty.ClusterScope); + Setting.boolSetting("network.tcp.blocking_server", TCP_BLOCKING, SettingsProperty.ClusterScope); public static final Setting TCP_BLOCKING_CLIENT = - Setting.boolSetting("network.tcp.blocking_client", TCP_BLOCKING, false, SettingsProperty.ClusterScope); + Setting.boolSetting("network.tcp.blocking_client", TCP_BLOCKING, SettingsProperty.ClusterScope); public static final Setting TCP_CONNECT_TIMEOUT = - Setting.timeSetting("network.tcp.connect_timeout", new TimeValue(30, TimeUnit.SECONDS), false, SettingsProperty.ClusterScope); + Setting.timeSetting("network.tcp.connect_timeout", new TimeValue(30, TimeUnit.SECONDS), SettingsProperty.ClusterScope); } /** diff --git a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index a8f06fd9cf52..ae056460cd83 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -135,8 +135,8 @@ public final class IndexScopedSettings extends AbstractScopedSettings { EngineConfig.INDEX_CODEC_SETTING, IndexWarmer.INDEX_NORMS_LOADING_SETTING, // this sucks but we can't really validate all the analyzers/similarity in here - Setting.groupSetting("index.similarity.", false, SettingsProperty.IndexScope), // this allows similarity settings to be passed - Setting.groupSetting("index.analysis.", false, SettingsProperty.IndexScope) // this allows analysis settings to be passed + Setting.groupSetting("index.similarity.", SettingsProperty.IndexScope), // this allows similarity settings to be passed + Setting.groupSetting("index.analysis.", SettingsProperty.IndexScope) // this allows analysis settings to be passed ))); diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index ce66eda1766d..ce20c5219322 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -47,12 +47,12 @@ import java.util.stream.Collectors; /** * A setting. Encapsulates typical stuff like default value, parsing, and scope. - * Some (dynamic=true) can by modified at run time using the API. + * Some (SettingsProperty.Dynamic) can by modified at run time using the API. * All settings inside elasticsearch or in any of the plugins should use this type-safe and generic settings infrastructure * together with {@link AbstractScopedSettings}. This class contains several utility methods that makes it straight forward * to add settings for the majority of the cases. For instance a simple boolean settings can be defined like this: *
      {@code
      - * public static final Setting; MY_BOOLEAN = Setting.boolSetting("my.bool.setting", true, false, SettingsProperty.ClusterScope);}
      + * public static final Setting; MY_BOOLEAN = Setting.boolSetting("my.bool.setting", true, SettingsProperty.ClusterScope);}
        * 
      * To retrieve the value of the setting a {@link Settings} object can be passed directly to the {@link Setting#get(Settings)} method. *
      @@ -64,24 +64,48 @@ import java.util.stream.Collectors;
        *     RED, GREEN, BLUE;
        * }
        * public static final Setting MY_BOOLEAN =
      - *     new Setting<>("my.color.setting", Color.RED.toString(), Color::valueOf, false, SettingsProperty.ClusterScope);
      + *     new Setting<>("my.color.setting", Color.RED.toString(), Color::valueOf, SettingsProperty.ClusterScope);
        * }
        * 
      */ public class Setting extends ToXContentToBytes { public enum SettingsProperty { + /** + * should be filtered in some api (mask password/credentials) + */ Filtered, + + /** + * iff this setting can be dynamically updateable + */ Dynamic, + + /** + * Cluster scope. + * @See IndexScope + * @See NodeScope + */ ClusterScope, + + /** + * Node scope. + * @See ClusterScope + * @See IndexScope + */ NodeScope, + + /** + * Index scope. + * @See ClusterScope + * @See NodeScope + */ IndexScope; } private final String key; protected final Function defaultValue; private final Function parser; - private final boolean dynamic; private final EnumSet properties; /** @@ -89,16 +113,13 @@ public class Setting extends ToXContentToBytes { * @param key the settings key for this setting. * @param defaultValue a default value function that returns the default values string representation. * @param parser a parser that parses the string rep into a complex datatype. - * @param dynamic true iff this setting can be dynamically updateable * @param properties properties for this setting like scope, filtering... */ - public Setting(String key, Function defaultValue, Function parser, boolean dynamic, - SettingsProperty... properties) { + public Setting(String key, Function defaultValue, Function parser, SettingsProperty... properties) { assert parser.apply(defaultValue.apply(Settings.EMPTY)) != null || this.isGroupSetting(): "parser returned null"; this.key = key; this.defaultValue = defaultValue; this.parser = parser; - this.dynamic = dynamic; if (properties.length == 0) { this.properties = EnumSet.of(SettingsProperty.NodeScope); } else { @@ -111,11 +132,10 @@ public class Setting extends ToXContentToBytes { * @param key the settings key for this setting. * @param defaultValue a default value. * @param parser a parser that parses the string rep into a complex datatype. - * @param dynamic true iff this setting can be dynamically updateable * @param properties properties for this setting like scope, filtering... */ - public Setting(String key, String defaultValue, Function parser, boolean dynamic, SettingsProperty... properties) { - this(key, s -> defaultValue, parser, dynamic, properties); + public Setting(String key, String defaultValue, Function parser, SettingsProperty... properties) { + this(key, s -> defaultValue, parser, properties); } /** @@ -123,11 +143,10 @@ public class Setting extends ToXContentToBytes { * @param key the settings key for this setting. * @param fallBackSetting a setting to fall back to if the current setting is not set. * @param parser a parser that parses the string rep into a complex datatype. - * @param dynamic true iff this setting can be dynamically updateable * @param properties properties for this setting like scope, filtering... */ - public Setting(String key, Setting fallBackSetting, Function parser, boolean dynamic, SettingsProperty... properties) { - this(key, fallBackSetting::getRaw, parser, dynamic, properties); + public Setting(String key, Setting fallBackSetting, Function parser, SettingsProperty... properties) { + this(key, fallBackSetting::getRaw, parser, properties); } /** @@ -145,7 +164,7 @@ public class Setting extends ToXContentToBytes { * Returns true if this setting is dynamically updateable, otherwise false */ public final boolean isDynamic() { - return dynamic; + return properties.contains(SettingsProperty.Dynamic); } /** @@ -261,7 +280,6 @@ public class Setting extends ToXContentToBytes { builder.startObject(); builder.field("key", key); builder.field("properties", properties); - builder.field("dynamic", dynamic); builder.field("is_group_setting", isGroupSetting()); builder.field("default", defaultValue.apply(Settings.EMPTY)); builder.endObject(); @@ -380,35 +398,34 @@ public class Setting extends ToXContentToBytes { } - public static Setting floatSetting(String key, float defaultValue, boolean dynamic, SettingsProperty... properties) { - return new Setting<>(key, (s) -> Float.toString(defaultValue), Float::parseFloat, dynamic, properties); + public static Setting floatSetting(String key, float defaultValue, SettingsProperty... properties) { + return new Setting<>(key, (s) -> Float.toString(defaultValue), Float::parseFloat, properties); } - public static Setting floatSetting(String key, float defaultValue, float minValue, boolean dynamic, SettingsProperty... properties) { + public static Setting floatSetting(String key, float defaultValue, float minValue, SettingsProperty... properties) { return new Setting<>(key, (s) -> Float.toString(defaultValue), (s) -> { float value = Float.parseFloat(s); if (value < minValue) { throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); } return value; - }, dynamic, properties); + }, properties); } - public static Setting intSetting(String key, int defaultValue, int minValue, int maxValue, boolean dynamic, - SettingsProperty... properties) { - return new Setting<>(key, (s) -> Integer.toString(defaultValue), (s) -> parseInt(s, minValue, maxValue, key), dynamic, properties); + public static Setting intSetting(String key, int defaultValue, int minValue, int maxValue, SettingsProperty... properties) { + return new Setting<>(key, (s) -> Integer.toString(defaultValue), (s) -> parseInt(s, minValue, maxValue, key), properties); } - public static Setting intSetting(String key, int defaultValue, int minValue, boolean dynamic, SettingsProperty... properties) { - return new Setting<>(key, (s) -> Integer.toString(defaultValue), (s) -> parseInt(s, minValue, key), dynamic, properties); + public static Setting intSetting(String key, int defaultValue, int minValue, SettingsProperty... properties) { + return new Setting<>(key, (s) -> Integer.toString(defaultValue), (s) -> parseInt(s, minValue, key), properties); } - public static Setting longSetting(String key, long defaultValue, long minValue, boolean dynamic, SettingsProperty... properties) { - return new Setting<>(key, (s) -> Long.toString(defaultValue), (s) -> parseLong(s, minValue, key), dynamic, properties); + public static Setting longSetting(String key, long defaultValue, long minValue, SettingsProperty... properties) { + return new Setting<>(key, (s) -> Long.toString(defaultValue), (s) -> parseLong(s, minValue, key), properties); } - public static Setting simpleString(String key, boolean dynamic, SettingsProperty... properties) { - return new Setting<>(key, s -> "", Function.identity(), dynamic, properties); + public static Setting simpleString(String key, SettingsProperty... properties) { + return new Setting<>(key, s -> "", Function.identity(), properties); } public static int parseInt(String s, int minValue, String key) { @@ -434,58 +451,57 @@ public class Setting extends ToXContentToBytes { return value; } - public static Setting intSetting(String key, int defaultValue, boolean dynamic, SettingsProperty... properties) { - return intSetting(key, defaultValue, Integer.MIN_VALUE, dynamic, properties); + public static Setting intSetting(String key, int defaultValue, SettingsProperty... properties) { + return intSetting(key, defaultValue, Integer.MIN_VALUE, properties); } - public static Setting boolSetting(String key, boolean defaultValue, boolean dynamic, SettingsProperty... properties) { - return new Setting<>(key, (s) -> Boolean.toString(defaultValue), Booleans::parseBooleanExact, dynamic, properties); + public static Setting boolSetting(String key, boolean defaultValue, SettingsProperty... properties) { + return new Setting<>(key, (s) -> Boolean.toString(defaultValue), Booleans::parseBooleanExact, properties); } - public static Setting boolSetting(String key, Setting fallbackSetting, boolean dynamic, - SettingsProperty... properties) { - return new Setting<>(key, fallbackSetting, Booleans::parseBooleanExact, dynamic, properties); + public static Setting boolSetting(String key, Setting fallbackSetting, SettingsProperty... properties) { + return new Setting<>(key, fallbackSetting, Booleans::parseBooleanExact, properties); } - public static Setting byteSizeSetting(String key, String percentage, boolean dynamic, SettingsProperty... properties) { - return new Setting<>(key, (s) -> percentage, (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, key), dynamic, properties); + public static Setting byteSizeSetting(String key, String percentage, SettingsProperty... properties) { + return new Setting<>(key, (s) -> percentage, (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, key), properties); } - public static Setting byteSizeSetting(String key, ByteSizeValue value, boolean dynamic, SettingsProperty... properties) { - return byteSizeSetting(key, (s) -> value.toString(), dynamic, properties); + public static Setting byteSizeSetting(String key, ByteSizeValue value, SettingsProperty... properties) { + return byteSizeSetting(key, (s) -> value.toString(), properties); } - public static Setting byteSizeSetting(String key, Setting fallbackSettings, boolean dynamic, + public static Setting byteSizeSetting(String key, Setting fallbackSettings, SettingsProperty... properties) { - return byteSizeSetting(key, fallbackSettings::getRaw, dynamic, properties); + return byteSizeSetting(key, fallbackSettings::getRaw, properties); } - public static Setting byteSizeSetting(String key, Function defaultValue, boolean dynamic, + public static Setting byteSizeSetting(String key, Function defaultValue, SettingsProperty... properties) { - return new Setting<>(key, defaultValue, (s) -> ByteSizeValue.parseBytesSizeValue(s, key), dynamic, properties); + return new Setting<>(key, defaultValue, (s) -> ByteSizeValue.parseBytesSizeValue(s, key), properties); } - public static Setting positiveTimeSetting(String key, TimeValue defaultValue, boolean dynamic, SettingsProperty... properties) { - return timeSetting(key, defaultValue, TimeValue.timeValueMillis(0), dynamic, properties); + public static Setting positiveTimeSetting(String key, TimeValue defaultValue, SettingsProperty... properties) { + return timeSetting(key, defaultValue, TimeValue.timeValueMillis(0), properties); } public static Setting> listSetting(String key, List defaultStringValue, Function singleValueParser, - boolean dynamic, SettingsProperty... properties) { - return listSetting(key, (s) -> defaultStringValue, singleValueParser, dynamic, properties); + SettingsProperty... properties) { + return listSetting(key, (s) -> defaultStringValue, singleValueParser, properties); } public static Setting> listSetting(String key, Setting> fallbackSetting, Function singleValueParser, - boolean dynamic, SettingsProperty... properties) { - return listSetting(key, (s) -> parseableStringToList(fallbackSetting.getRaw(s)), singleValueParser, dynamic, properties); + SettingsProperty... properties) { + return listSetting(key, (s) -> parseableStringToList(fallbackSetting.getRaw(s)), singleValueParser, properties); } public static Setting> listSetting(String key, Function> defaultStringValue, - Function singleValueParser, boolean dynamic, SettingsProperty... properties) { + Function singleValueParser, SettingsProperty... properties) { Function> parser = (s) -> parseableStringToList(s).stream().map(singleValueParser).collect(Collectors.toList()); return new Setting>(key, (s) -> arrayToParsableString(defaultStringValue.apply(s).toArray(Strings.EMPTY_ARRAY)), parser, - dynamic, properties) { + properties) { private final Pattern pattern = Pattern.compile(Pattern.quote(key)+"(\\.\\d+)?"); @Override public String getRaw(Settings settings) { @@ -539,11 +555,11 @@ public class Setting extends ToXContentToBytes { } } - public static Setting groupSetting(String key, boolean dynamic, SettingsProperty... properties) { + public static Setting groupSetting(String key, SettingsProperty... properties) { if (key.endsWith(".") == false) { throw new IllegalArgumentException("key must end with a '.'"); } - return new Setting(key, "", (s) -> null, dynamic, properties) { + return new Setting(key, "", (s) -> null, properties) { @Override public boolean isGroupSetting() { @@ -602,7 +618,7 @@ public class Setting extends ToXContentToBytes { }; } - public static Setting timeSetting(String key, Function defaultValue, TimeValue minValue, boolean dynamic, + public static Setting timeSetting(String key, Function defaultValue, TimeValue minValue, SettingsProperty... properties) { return new Setting<>(key, defaultValue, (s) -> { TimeValue timeValue = TimeValue.parseTimeValue(s, null, key); @@ -610,32 +626,29 @@ public class Setting extends ToXContentToBytes { throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); } return timeValue; - }, dynamic, properties); + }, properties); } - public static Setting timeSetting(String key, TimeValue defaultValue, TimeValue minValue, boolean dynamic, - SettingsProperty... properties) { - return timeSetting(key, (s) -> defaultValue.getStringRep(), minValue, dynamic, properties); + public static Setting timeSetting(String key, TimeValue defaultValue, TimeValue minValue, SettingsProperty... properties) { + return timeSetting(key, (s) -> defaultValue.getStringRep(), minValue, properties); } - public static Setting timeSetting(String key, TimeValue defaultValue, boolean dynamic, SettingsProperty... properties) { - return new Setting<>(key, (s) -> defaultValue.toString(), (s) -> TimeValue.parseTimeValue(s, key), dynamic, properties); + public static Setting timeSetting(String key, TimeValue defaultValue, SettingsProperty... properties) { + return new Setting<>(key, (s) -> defaultValue.toString(), (s) -> TimeValue.parseTimeValue(s, key), properties); } - public static Setting timeSetting(String key, Setting fallbackSetting, boolean dynamic, - SettingsProperty... properties) { - return new Setting<>(key, fallbackSetting::getRaw, (s) -> TimeValue.parseTimeValue(s, key), dynamic, properties); + public static Setting timeSetting(String key, Setting fallbackSetting, SettingsProperty... properties) { + return new Setting<>(key, fallbackSetting::getRaw, (s) -> TimeValue.parseTimeValue(s, key), properties); } - public static Setting doubleSetting(String key, double defaultValue, double minValue, boolean dynamic, - SettingsProperty... properties) { + public static Setting doubleSetting(String key, double defaultValue, double minValue, SettingsProperty... properties) { return new Setting<>(key, (s) -> Double.toString(defaultValue), (s) -> { final double d = Double.parseDouble(s); if (d < minValue) { throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); } return d; - }, dynamic, properties); + }, properties); } @Override @@ -656,9 +669,9 @@ public class Setting extends ToXContentToBytes { * can easily be added with this setting. Yet, dynamic key settings don't support updaters our of the box unless {@link #getConcreteSetting(String)} * is used to pull the updater. */ - public static Setting dynamicKeySetting(String key, String defaultValue, Function parser, boolean dynamic, + public static Setting dynamicKeySetting(String key, String defaultValue, Function parser, SettingsProperty... properties) { - return new Setting(key, defaultValue, parser, dynamic, properties) { + return new Setting(key, defaultValue, parser, properties) { @Override boolean isGroupSetting() { @@ -678,7 +691,7 @@ public class Setting extends ToXContentToBytes { @Override public Setting getConcreteSetting(String key) { if (match(key)) { - return new Setting<>(key, defaultValue, parser, dynamic, properties); + return new Setting<>(key, defaultValue, parser, properties); } else { throw new IllegalArgumentException("key must match setting but didn't ["+key +"]"); } diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index 6f055e3cf04e..7f4e9c8b6d11 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -43,7 +43,7 @@ public class EsExecutors { * This is used to adjust thread pools sizes etc. per node. */ public static final Setting PROCESSORS_SETTING = - Setting.intSetting("processors", Math.min(32, Runtime.getRuntime().availableProcessors()), 1, false, SettingsProperty.ClusterScope); + Setting.intSetting("processors", Math.min(32, Runtime.getRuntime().availableProcessors()), 1, SettingsProperty.ClusterScope); /** * Returns the number of processors available but at most 32. diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index 798cd7462afa..47c115a47e0c 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -64,8 +64,7 @@ import java.util.concurrent.atomic.AtomicBoolean; public final class ThreadContext implements Closeable, Writeable{ public static final String PREFIX = "request.headers"; - public static final Setting DEFAULT_HEADERS_SETTING = - Setting.groupSetting(PREFIX + ".", false, SettingsProperty.ClusterScope); + public static final Setting DEFAULT_HEADERS_SETTING = Setting.groupSetting(PREFIX + ".", SettingsProperty.ClusterScope); private final Map defaultHeader; private static final ThreadContextStruct DEFAULT_CONTEXT = new ThreadContextStruct(Collections.emptyMap()); private final ContextThreadLocal threadLocal; diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java b/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java index 57ae63c11049..bc0f97400255 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java @@ -46,10 +46,10 @@ import java.util.function.Function; public class DiscoveryModule extends AbstractModule { public static final Setting DISCOVERY_TYPE_SETTING = - new Setting<>("discovery.type", settings -> DiscoveryNode.localNode(settings) ? "local" : "zen", Function.identity(), false, + new Setting<>("discovery.type", settings -> DiscoveryNode.localNode(settings) ? "local" : "zen", Function.identity(), SettingsProperty.ClusterScope); public static final Setting ZEN_MASTER_SERVICE_TYPE_SETTING = - new Setting<>("discovery.zen.masterservice.type", "zen", Function.identity(), false, SettingsProperty.ClusterScope); + new Setting<>("discovery.zen.masterservice.type", "zen", Function.identity(), SettingsProperty.ClusterScope); private final Settings settings; private final List> unicastHostProviders = new ArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java b/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java index 181ee8253c0e..6360c9b77577 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java @@ -42,10 +42,9 @@ import java.util.concurrent.TimeUnit; public class DiscoveryService extends AbstractLifecycleComponent { public static final Setting INITIAL_STATE_TIMEOUT_SETTING = - Setting.positiveTimeSetting("discovery.initial_state_timeout", TimeValue.timeValueSeconds(30), false, - SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("discovery.initial_state_timeout", TimeValue.timeValueSeconds(30), SettingsProperty.ClusterScope); public static final Setting DISCOVERY_SEED_SETTING = - Setting.longSetting("discovery.id.seed", 0L, Long.MIN_VALUE, false, SettingsProperty.ClusterScope); + Setting.longSetting("discovery.id.seed", 0L, Long.MIN_VALUE, SettingsProperty.ClusterScope); private static class InitialStateListener implements InitialStateDiscoveryListener { diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java b/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java index 64c13fabe7cb..c303882a53a6 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java @@ -45,7 +45,8 @@ public class DiscoverySettings extends AbstractComponent { * will continue to process the next cluster state update after this time has elapsed **/ public static final Setting PUBLISH_TIMEOUT_SETTING = - Setting.positiveTimeSetting("discovery.zen.publish_timeout", TimeValue.timeValueSeconds(30), true, SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("discovery.zen.publish_timeout", TimeValue.timeValueSeconds(30), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); /** * sets the timeout for receiving enough acks for a specific cluster state and committing it. failing @@ -53,12 +54,13 @@ public class DiscoverySettings extends AbstractComponent { */ public static final Setting COMMIT_TIMEOUT_SETTING = new Setting<>("discovery.zen.commit_timeout", (s) -> PUBLISH_TIMEOUT_SETTING.getRaw(s), - (s) -> TimeValue.parseTimeValue(s, TimeValue.timeValueSeconds(30), "discovery.zen.commit_timeout"), true, - SettingsProperty.ClusterScope); + (s) -> TimeValue.parseTimeValue(s, TimeValue.timeValueSeconds(30), "discovery.zen.commit_timeout"), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting NO_MASTER_BLOCK_SETTING = - new Setting<>("discovery.zen.no_master_block", "write", DiscoverySettings::parseNoMasterBlock, true, SettingsProperty.ClusterScope); + new Setting<>("discovery.zen.no_master_block", "write", DiscoverySettings::parseNoMasterBlock, + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting PUBLISH_DIFF_ENABLE_SETTING = - Setting.boolSetting("discovery.zen.publish_diff.enable", true, true, SettingsProperty.ClusterScope); + Setting.boolSetting("discovery.zen.publish_diff.enable", true, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); private volatile ClusterBlock noMasterBlock; private volatile TimeValue publishTimeout; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index 653c77945c98..038d63ee0ad9 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -91,27 +91,27 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; public class ZenDiscovery extends AbstractLifecycleComponent implements Discovery, PingContextProvider { public final static Setting PING_TIMEOUT_SETTING = - Setting.positiveTimeSetting("discovery.zen.ping_timeout", timeValueSeconds(3), false, SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("discovery.zen.ping_timeout", timeValueSeconds(3), SettingsProperty.ClusterScope); public final static Setting JOIN_TIMEOUT_SETTING = Setting.timeSetting("discovery.zen.join_timeout", settings -> TimeValue.timeValueMillis(PING_TIMEOUT_SETTING.get(settings).millis() * 20).toString(), - TimeValue.timeValueMillis(0), false, SettingsProperty.ClusterScope); + TimeValue.timeValueMillis(0), SettingsProperty.ClusterScope); public final static Setting JOIN_RETRY_ATTEMPTS_SETTING = - Setting.intSetting("discovery.zen.join_retry_attempts", 3, 1, false, SettingsProperty.ClusterScope); + Setting.intSetting("discovery.zen.join_retry_attempts", 3, 1, SettingsProperty.ClusterScope); public final static Setting JOIN_RETRY_DELAY_SETTING = - Setting.positiveTimeSetting("discovery.zen.join_retry_delay", TimeValue.timeValueMillis(100), false, SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("discovery.zen.join_retry_delay", TimeValue.timeValueMillis(100), SettingsProperty.ClusterScope); public final static Setting MAX_PINGS_FROM_ANOTHER_MASTER_SETTING = - Setting.intSetting("discovery.zen.max_pings_from_another_master", 3, 1, false, SettingsProperty.ClusterScope); + Setting.intSetting("discovery.zen.max_pings_from_another_master", 3, 1, SettingsProperty.ClusterScope); public final static Setting SEND_LEAVE_REQUEST_SETTING = - Setting.boolSetting("discovery.zen.send_leave_request", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("discovery.zen.send_leave_request", true, SettingsProperty.ClusterScope); public final static Setting MASTER_ELECTION_FILTER_CLIENT_SETTING = - Setting.boolSetting("discovery.zen.master_election.filter_client", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("discovery.zen.master_election.filter_client", true, SettingsProperty.ClusterScope); public final static Setting MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING = Setting.timeSetting("discovery.zen.master_election.wait_for_joins_timeout", settings -> TimeValue.timeValueMillis(JOIN_TIMEOUT_SETTING.get(settings).millis() / 2).toString(), TimeValue.timeValueMillis(0), - false, SettingsProperty.ClusterScope); + SettingsProperty.ClusterScope); public final static Setting MASTER_ELECTION_FILTER_DATA_SETTING = - Setting.boolSetting("discovery.zen.master_election.filter_data", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("discovery.zen.master_election.filter_data", false, SettingsProperty.ClusterScope); public static final String DISCOVERY_REJOIN_ACTION_NAME = "internal:discovery/zen/rejoin"; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java b/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java index cd418e369c48..8a35c6615e7f 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java @@ -42,7 +42,7 @@ import java.util.List; public class ElectMasterService extends AbstractComponent { public static final Setting DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING = - Setting.intSetting("discovery.zen.minimum_master_nodes", -1, true, SettingsProperty.ClusterScope); + Setting.intSetting("discovery.zen.minimum_master_nodes", -1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); // This is the minimum version a master needs to be on, otherwise it gets ignored // This is based on the minimum compatible version of the current version this node is on diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java b/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java index c4247ea15df4..6fc575d51cdf 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java @@ -38,15 +38,15 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; public abstract class FaultDetection extends AbstractComponent { public static final Setting CONNECT_ON_NETWORK_DISCONNECT_SETTING = - Setting.boolSetting("discovery.zen.fd.connect_on_network_disconnect", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("discovery.zen.fd.connect_on_network_disconnect", false, SettingsProperty.ClusterScope); public static final Setting PING_INTERVAL_SETTING = - Setting.positiveTimeSetting("discovery.zen.fd.ping_interval", timeValueSeconds(1), false, SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("discovery.zen.fd.ping_interval", timeValueSeconds(1), SettingsProperty.ClusterScope); public static final Setting PING_TIMEOUT_SETTING = - Setting.timeSetting("discovery.zen.fd.ping_timeout", timeValueSeconds(30), false, SettingsProperty.ClusterScope); + Setting.timeSetting("discovery.zen.fd.ping_timeout", timeValueSeconds(30), SettingsProperty.ClusterScope); public static final Setting PING_RETRIES_SETTING = - Setting.intSetting("discovery.zen.fd.ping_retries", 3, false, SettingsProperty.ClusterScope); + Setting.intSetting("discovery.zen.fd.ping_retries", 3, SettingsProperty.ClusterScope); public static final Setting REGISTER_CONNECTION_LISTENER_SETTING = - Setting.boolSetting("discovery.zen.fd.register_connection_listener", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("discovery.zen.fd.register_connection_listener", true, SettingsProperty.ClusterScope); protected final ThreadPool threadPool; protected final ClusterName clusterName; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java index 24191ccf4fc8..35e5688f2aa7 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java @@ -88,10 +88,10 @@ public class UnicastZenPing extends AbstractLifecycleComponent implemen public static final String ACTION_NAME = "internal:discovery/zen/unicast"; public static final Setting> DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING = - Setting.listSetting("discovery.zen.ping.unicast.hosts", Collections.emptyList(), Function.identity(), false, + Setting.listSetting("discovery.zen.ping.unicast.hosts", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); public static final Setting DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING = - Setting.intSetting("discovery.zen.ping.unicast.concurrent_connects", 10, 0, false, SettingsProperty.ClusterScope); + Setting.intSetting("discovery.zen.ping.unicast.concurrent_connects", 10, 0, SettingsProperty.ClusterScope); // these limits are per-address public static final int LIMIT_FOREIGN_PORTS_COUNT = 1; diff --git a/core/src/main/java/org/elasticsearch/env/Environment.java b/core/src/main/java/org/elasticsearch/env/Environment.java index 143ddf699012..0e0ab1ace200 100644 --- a/core/src/main/java/org/elasticsearch/env/Environment.java +++ b/core/src/main/java/org/elasticsearch/env/Environment.java @@ -47,18 +47,17 @@ import static org.elasticsearch.common.Strings.cleanPath; // TODO: move PathUtils to be package-private here instead of // public+forbidden api! public class Environment { - public static final Setting PATH_HOME_SETTING = Setting.simpleString("path.home", false, SettingsProperty.ClusterScope); - public static final Setting PATH_CONF_SETTING = Setting.simpleString("path.conf", false, SettingsProperty.ClusterScope); - public static final Setting PATH_SCRIPTS_SETTING = Setting.simpleString("path.scripts", false, SettingsProperty.ClusterScope); + public static final Setting PATH_HOME_SETTING = Setting.simpleString("path.home", SettingsProperty.ClusterScope); + public static final Setting PATH_CONF_SETTING = Setting.simpleString("path.conf", SettingsProperty.ClusterScope); + public static final Setting PATH_SCRIPTS_SETTING = Setting.simpleString("path.scripts", SettingsProperty.ClusterScope); public static final Setting> PATH_DATA_SETTING = - Setting.listSetting("path.data", Collections.emptyList(), Function.identity(), false, SettingsProperty.ClusterScope); - public static final Setting PATH_LOGS_SETTING = Setting.simpleString("path.logs", false, SettingsProperty.ClusterScope); - public static final Setting PATH_PLUGINS_SETTING = Setting.simpleString("path.plugins", false, SettingsProperty.ClusterScope); + Setting.listSetting("path.data", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); + public static final Setting PATH_LOGS_SETTING = Setting.simpleString("path.logs", SettingsProperty.ClusterScope); + public static final Setting PATH_PLUGINS_SETTING = Setting.simpleString("path.plugins", SettingsProperty.ClusterScope); public static final Setting> PATH_REPO_SETTING = - Setting.listSetting("path.repo", Collections.emptyList(), Function.identity(), false, SettingsProperty.ClusterScope); - public static final Setting PATH_SHARED_DATA_SETTING = - Setting.simpleString("path.shared_data", false, SettingsProperty.ClusterScope); - public static final Setting PIDFILE_SETTING = Setting.simpleString("pidfile", false, SettingsProperty.ClusterScope); + Setting.listSetting("path.repo", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); + public static final Setting PATH_SHARED_DATA_SETTING = Setting.simpleString("path.shared_data", SettingsProperty.ClusterScope); + public static final Setting PIDFILE_SETTING = Setting.simpleString("pidfile", SettingsProperty.ClusterScope); private final Settings settings; diff --git a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java index f6d64b3406b4..70bcad3c556a 100644 --- a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -135,20 +135,20 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { /** * Maximum number of data nodes that should run in an environment. */ - public static final Setting MAX_LOCAL_STORAGE_NODES_SETTING = Setting.intSetting("node.max_local_storage_nodes", 50, 1, false, + public static final Setting MAX_LOCAL_STORAGE_NODES_SETTING = Setting.intSetting("node.max_local_storage_nodes", 50, 1, SettingsProperty.ClusterScope); /** * If true automatically append node id to custom data paths. */ - public static final Setting ADD_NODE_ID_TO_CUSTOM_PATH = Setting.boolSetting("node.add_id_to_custom_path", true, false, - SettingsProperty.ClusterScope); + public static final Setting ADD_NODE_ID_TO_CUSTOM_PATH = + Setting.boolSetting("node.add_id_to_custom_path", true, SettingsProperty.ClusterScope); /** * If true the [verbose] SegmentInfos.infoStream logging is sent to System.out. */ - public static final Setting ENABLE_LUCENE_SEGMENT_INFOS_TRACE_SETTING = Setting - .boolSetting("node.enable_lucene_segment_infos_trace", false, false, SettingsProperty.ClusterScope); + public static final Setting ENABLE_LUCENE_SEGMENT_INFOS_TRACE_SETTING = + Setting.boolSetting("node.enable_lucene_segment_infos_trace", false, SettingsProperty.ClusterScope); public static final String NODES_FOLDER = "nodes"; public static final String INDICES_FOLDER = "indices"; diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java index 8c17325c08c6..219c0725b6b9 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java @@ -54,19 +54,19 @@ import java.util.concurrent.atomic.AtomicBoolean; public class GatewayService extends AbstractLifecycleComponent implements ClusterStateListener { public static final Setting EXPECTED_NODES_SETTING = - Setting.intSetting("gateway.expected_nodes", -1, -1, false, SettingsProperty.ClusterScope); + Setting.intSetting("gateway.expected_nodes", -1, -1, SettingsProperty.ClusterScope); public static final Setting EXPECTED_DATA_NODES_SETTING = - Setting.intSetting("gateway.expected_data_nodes", -1, -1, false, SettingsProperty.ClusterScope); + Setting.intSetting("gateway.expected_data_nodes", -1, -1, SettingsProperty.ClusterScope); public static final Setting EXPECTED_MASTER_NODES_SETTING = - Setting.intSetting("gateway.expected_master_nodes", -1, -1, false, SettingsProperty.ClusterScope); + Setting.intSetting("gateway.expected_master_nodes", -1, -1, SettingsProperty.ClusterScope); public static final Setting RECOVER_AFTER_TIME_SETTING = - Setting.positiveTimeSetting("gateway.recover_after_time", TimeValue.timeValueMillis(0), false, SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("gateway.recover_after_time", TimeValue.timeValueMillis(0), SettingsProperty.ClusterScope); public static final Setting RECOVER_AFTER_NODES_SETTING = - Setting.intSetting("gateway.recover_after_nodes", -1, -1, false, SettingsProperty.ClusterScope); + Setting.intSetting("gateway.recover_after_nodes", -1, -1, SettingsProperty.ClusterScope); public static final Setting RECOVER_AFTER_DATA_NODES_SETTING = - Setting.intSetting("gateway.recover_after_data_nodes", -1, -1, false, SettingsProperty.ClusterScope); + Setting.intSetting("gateway.recover_after_data_nodes", -1, -1, SettingsProperty.ClusterScope); public static final Setting RECOVER_AFTER_MASTER_NODES_SETTING = - Setting.intSetting("gateway.recover_after_master_nodes", 0, 0, false, SettingsProperty.ClusterScope); + Setting.intSetting("gateway.recover_after_master_nodes", 0, 0, SettingsProperty.ClusterScope); public static final ClusterBlock STATE_NOT_RECOVERED_BLOCK = new ClusterBlock(1, "state not recovered / initialized", true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL); diff --git a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java index 018262c0304b..8c4068fe2625 100644 --- a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java @@ -70,11 +70,11 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { public static final Setting NODE_INITIAL_SHARDS_SETTING = new Setting<>("gateway.initial_shards", (settings) -> settings.get("gateway.local.initial_shards", "quorum"), INITIAL_SHARDS_PARSER, - true, SettingsProperty.ClusterScope); + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); @Deprecated public static final Setting INDEX_RECOVERY_INITIAL_SHARDS_SETTING = - new Setting<>("index.recovery.initial_shards", (settings) -> NODE_INITIAL_SHARDS_SETTING.get(settings) , INITIAL_SHARDS_PARSER, true, - SettingsProperty.IndexScope); + new Setting<>("index.recovery.initial_shards", (settings) -> NODE_INITIAL_SHARDS_SETTING.get(settings) , INITIAL_SHARDS_PARSER, + SettingsProperty.Dynamic, SettingsProperty.IndexScope); public PrimaryShardAllocator(Settings settings) { super(settings); diff --git a/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java b/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java index 2332d8704e64..1a315b717aa9 100644 --- a/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java +++ b/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java @@ -33,50 +33,50 @@ import static org.elasticsearch.common.settings.Setting.listSetting; public final class HttpTransportSettings { public static final Setting SETTING_CORS_ENABLED = - Setting.boolSetting("http.cors.enabled", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("http.cors.enabled", false, SettingsProperty.ClusterScope); public static final Setting SETTING_CORS_ALLOW_ORIGIN = - new Setting("http.cors.allow-origin", "", (value) -> value, false, SettingsProperty.ClusterScope); + new Setting("http.cors.allow-origin", "", (value) -> value, SettingsProperty.ClusterScope); public static final Setting SETTING_CORS_MAX_AGE = - Setting.intSetting("http.cors.max-age", 1728000, false, SettingsProperty.ClusterScope); + Setting.intSetting("http.cors.max-age", 1728000, SettingsProperty.ClusterScope); public static final Setting SETTING_CORS_ALLOW_METHODS = - new Setting("http.cors.allow-methods", "OPTIONS, HEAD, GET, POST, PUT, DELETE", (value) -> value, false, SettingsProperty.ClusterScope); + new Setting("http.cors.allow-methods", "OPTIONS, HEAD, GET, POST, PUT, DELETE", (value) -> value, SettingsProperty.ClusterScope); public static final Setting SETTING_CORS_ALLOW_HEADERS = - new Setting("http.cors.allow-headers", "X-Requested-With, Content-Type, Content-Length", (value) -> value, false, SettingsProperty.ClusterScope); + new Setting("http.cors.allow-headers", "X-Requested-With, Content-Type, Content-Length", (value) -> value, SettingsProperty.ClusterScope); public static final Setting SETTING_CORS_ALLOW_CREDENTIALS = - Setting.boolSetting("http.cors.allow-credentials", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("http.cors.allow-credentials", false, SettingsProperty.ClusterScope); public static final Setting SETTING_PIPELINING = - Setting.boolSetting("http.pipelining", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("http.pipelining", true, SettingsProperty.ClusterScope); public static final Setting SETTING_PIPELINING_MAX_EVENTS = - Setting.intSetting("http.pipelining.max_events", 10000, false, SettingsProperty.ClusterScope); + Setting.intSetting("http.pipelining.max_events", 10000, SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_COMPRESSION = - Setting.boolSetting("http.compression", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("http.compression", false, SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_COMPRESSION_LEVEL = - Setting.intSetting("http.compression_level", 6, false, SettingsProperty.ClusterScope); + Setting.intSetting("http.compression_level", 6, SettingsProperty.ClusterScope); public static final Setting> SETTING_HTTP_HOST = - listSetting("http.host", emptyList(), s -> s, false, SettingsProperty.ClusterScope); + listSetting("http.host", emptyList(), s -> s, SettingsProperty.ClusterScope); public static final Setting> SETTING_HTTP_PUBLISH_HOST = - listSetting("http.publish_host", SETTING_HTTP_HOST, s -> s, false, SettingsProperty.ClusterScope); + listSetting("http.publish_host", SETTING_HTTP_HOST, s -> s, SettingsProperty.ClusterScope); public static final Setting> SETTING_HTTP_BIND_HOST = - listSetting("http.bind_host", SETTING_HTTP_HOST, s -> s, false, SettingsProperty.ClusterScope); + listSetting("http.bind_host", SETTING_HTTP_HOST, s -> s, SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_PORT = - new Setting("http.port", "9200-9300", PortsRange::new, false, SettingsProperty.ClusterScope); + new Setting("http.port", "9200-9300", PortsRange::new, SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_PUBLISH_PORT = - Setting.intSetting("http.publish_port", 0, 0, false, SettingsProperty.ClusterScope); + Setting.intSetting("http.publish_port", 0, 0, SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_DETAILED_ERRORS_ENABLED = - Setting.boolSetting("http.detailed_errors.enabled", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("http.detailed_errors.enabled", true, SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_MAX_CONTENT_LENGTH = - Setting.byteSizeSetting("http.max_content_length", new ByteSizeValue(100, ByteSizeUnit.MB), false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("http.max_content_length", new ByteSizeValue(100, ByteSizeUnit.MB), SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_MAX_CHUNK_SIZE = - Setting.byteSizeSetting("http.max_chunk_size", new ByteSizeValue(8, ByteSizeUnit.KB), false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("http.max_chunk_size", new ByteSizeValue(8, ByteSizeUnit.KB), SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_MAX_HEADER_SIZE = - Setting.byteSizeSetting("http.max_header_size", new ByteSizeValue(8, ByteSizeUnit.KB), false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("http.max_header_size", new ByteSizeValue(8, ByteSizeUnit.KB), SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_MAX_INITIAL_LINE_LENGTH = - Setting.byteSizeSetting("http.max_initial_line_length", new ByteSizeValue(4, ByteSizeUnit.KB), false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("http.max_initial_line_length", new ByteSizeValue(4, ByteSizeUnit.KB), SettingsProperty.ClusterScope); // don't reset cookies by default, since I don't think we really need to // note, parsing cookies was fixed in netty 3.5.1 regarding stack allocation, but still, currently, we don't need cookies public static final Setting SETTING_HTTP_RESET_COOKIES = - Setting.boolSetting("http.reset_cookies", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("http.reset_cookies", false, SettingsProperty.ClusterScope); private HttpTransportSettings() { } diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java index f021ea812f9e..e955c8f14333 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java +++ b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java @@ -117,29 +117,29 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent SETTING_HTTP_NETTY_MAX_CUMULATION_BUFFER_CAPACITY = - Setting.byteSizeSetting("http.netty.max_cumulation_buffer_capacity", new ByteSizeValue(-1), false, + Setting.byteSizeSetting("http.netty.max_cumulation_buffer_capacity", new ByteSizeValue(-1), SettingsProperty.ClusterScope); public static Setting SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = - Setting.intSetting("http.netty.max_composite_buffer_components", -1, false, SettingsProperty.ClusterScope); + Setting.intSetting("http.netty.max_composite_buffer_components", -1, SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_WORKER_COUNT = new Setting<>("http.netty.worker_count", (s) -> Integer.toString(EsExecutors.boundedNumberOfProcessors(s) * 2), - (s) -> Setting.parseInt(s, 1, "http.netty.worker_count"), false, SettingsProperty.ClusterScope); + (s) -> Setting.parseInt(s, 1, "http.netty.worker_count"), SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_TCP_NO_DELAY = - boolSetting("http.tcp_no_delay", NetworkService.TcpSettings.TCP_NO_DELAY, false, SettingsProperty.ClusterScope); + boolSetting("http.tcp_no_delay", NetworkService.TcpSettings.TCP_NO_DELAY, SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_TCP_KEEP_ALIVE = - boolSetting("http.tcp.keep_alive", NetworkService.TcpSettings.TCP_KEEP_ALIVE, false, SettingsProperty.ClusterScope); + boolSetting("http.tcp.keep_alive", NetworkService.TcpSettings.TCP_KEEP_ALIVE, SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_TCP_BLOCKING_SERVER = - boolSetting("http.tcp.blocking_server", NetworkService.TcpSettings.TCP_BLOCKING_SERVER, false, SettingsProperty.ClusterScope); + boolSetting("http.tcp.blocking_server", NetworkService.TcpSettings.TCP_BLOCKING_SERVER, SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_TCP_REUSE_ADDRESS = - boolSetting("http.tcp.reuse_address", NetworkService.TcpSettings.TCP_REUSE_ADDRESS, false, SettingsProperty.ClusterScope); + boolSetting("http.tcp.reuse_address", NetworkService.TcpSettings.TCP_REUSE_ADDRESS, SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_TCP_SEND_BUFFER_SIZE = - Setting.byteSizeSetting("http.tcp.send_buffer_size", NetworkService.TcpSettings.TCP_SEND_BUFFER_SIZE, false, + Setting.byteSizeSetting("http.tcp.send_buffer_size", NetworkService.TcpSettings.TCP_SEND_BUFFER_SIZE, SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_TCP_RECEIVE_BUFFER_SIZE = - Setting.byteSizeSetting("http.tcp.receive_buffer_size", NetworkService.TcpSettings.TCP_RECEIVE_BUFFER_SIZE, false, + Setting.byteSizeSetting("http.tcp.receive_buffer_size", NetworkService.TcpSettings.TCP_RECEIVE_BUFFER_SIZE, SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE = Setting.byteSizeSetting("transport.netty.receive_predictor_size", @@ -152,13 +152,11 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MIN = - byteSizeSetting("http.netty.receive_predictor_min", SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE, false, - SettingsProperty.ClusterScope); + byteSizeSetting("http.netty.receive_predictor_min", SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE, SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MAX = - byteSizeSetting("http.netty.receive_predictor_max", SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE, false, - SettingsProperty.ClusterScope); + byteSizeSetting("http.netty.receive_predictor_max", SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE, SettingsProperty.ClusterScope); protected final NetworkService networkService; diff --git a/core/src/main/java/org/elasticsearch/index/IndexModule.java b/core/src/main/java/org/elasticsearch/index/IndexModule.java index 3586e726a401..36bc5e1d3e98 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/core/src/main/java/org/elasticsearch/index/IndexModule.java @@ -67,15 +67,15 @@ import java.util.function.Function; public final class IndexModule { public static final Setting INDEX_STORE_TYPE_SETTING = - new Setting<>("index.store.type", "", Function.identity(), false, SettingsProperty.IndexScope); + new Setting<>("index.store.type", "", Function.identity(), SettingsProperty.IndexScope); public static final String SIMILARITY_SETTINGS_PREFIX = "index.similarity"; public static final String INDEX_QUERY_CACHE = "index"; public static final String NONE_QUERY_CACHE = "none"; public static final Setting INDEX_QUERY_CACHE_TYPE_SETTING = - new Setting<>("index.queries.cache.type", INDEX_QUERY_CACHE, Function.identity(), false, SettingsProperty.IndexScope); + new Setting<>("index.queries.cache.type", INDEX_QUERY_CACHE, Function.identity(), SettingsProperty.IndexScope); // for test purposes only public static final Setting INDEX_QUERY_CACHE_EVERYTHING_SETTING = - Setting.boolSetting("index.queries.cache.everything", false, false, SettingsProperty.IndexScope); + Setting.boolSetting("index.queries.cache.everything", false, SettingsProperty.IndexScope); private final IndexSettings indexSettings; private final IndexStoreConfig indexStoreConfig; private final AnalysisRegistry analysisRegistry; diff --git a/core/src/main/java/org/elasticsearch/index/IndexSettings.java b/core/src/main/java/org/elasticsearch/index/IndexSettings.java index dbf298e16f13..bb859f04652e 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/core/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -51,25 +51,25 @@ import java.util.function.Predicate; public final class IndexSettings { public static final Setting DEFAULT_FIELD_SETTING = - new Setting<>("index.query.default_field", AllFieldMapper.NAME, Function.identity(), false, SettingsProperty.IndexScope); + new Setting<>("index.query.default_field", AllFieldMapper.NAME, Function.identity(), SettingsProperty.IndexScope); public static final Setting QUERY_STRING_LENIENT_SETTING = - Setting.boolSetting("index.query_string.lenient", false, false, SettingsProperty.IndexScope); + Setting.boolSetting("index.query_string.lenient", false, SettingsProperty.IndexScope); public static final Setting QUERY_STRING_ANALYZE_WILDCARD = - Setting.boolSetting("indices.query.query_string.analyze_wildcard", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("indices.query.query_string.analyze_wildcard", false, SettingsProperty.ClusterScope); public static final Setting QUERY_STRING_ALLOW_LEADING_WILDCARD = - Setting.boolSetting("indices.query.query_string.allowLeadingWildcard", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("indices.query.query_string.allowLeadingWildcard", true, SettingsProperty.ClusterScope); public static final Setting ALLOW_UNMAPPED = - Setting.boolSetting("index.query.parse.allow_unmapped_fields", true, false, SettingsProperty.IndexScope); + Setting.boolSetting("index.query.parse.allow_unmapped_fields", true, SettingsProperty.IndexScope); public static final Setting INDEX_TRANSLOG_SYNC_INTERVAL_SETTING = - Setting.timeSetting("index.translog.sync_interval", TimeValue.timeValueSeconds(5), TimeValue.timeValueMillis(100), false, + Setting.timeSetting("index.translog.sync_interval", TimeValue.timeValueSeconds(5), TimeValue.timeValueMillis(100), SettingsProperty.IndexScope); public static final Setting INDEX_TRANSLOG_DURABILITY_SETTING = new Setting<>("index.translog.durability", Translog.Durability.REQUEST.name(), - (value) -> Translog.Durability.valueOf(value.toUpperCase(Locale.ROOT)), true, SettingsProperty.IndexScope); + (value) -> Translog.Durability.valueOf(value.toUpperCase(Locale.ROOT)), SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_WARMER_ENABLED_SETTING = - Setting.boolSetting("index.warmer.enabled", true, true, SettingsProperty.IndexScope); + Setting.boolSetting("index.warmer.enabled", true, SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_TTL_DISABLE_PURGE_SETTING = - Setting.boolSetting("index.ttl.disable_purge", false, true, SettingsProperty.IndexScope); + Setting.boolSetting("index.ttl.disable_purge", false, SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_CHECK_ON_STARTUP = new Setting<>("index.shard.check_on_startup", "false", (s) -> { switch(s) { case "false": @@ -80,7 +80,7 @@ public final class IndexSettings { default: throw new IllegalArgumentException("unknown value for [index.shard.check_on_startup] must be one of [true, false, fix, checksum] but was: " + s); } - }, false, SettingsProperty.IndexScope); + }, SettingsProperty.IndexScope); /** * Index setting describing the maximum value of from + size on a query. @@ -91,13 +91,13 @@ public final class IndexSettings { * safely. */ public static final Setting MAX_RESULT_WINDOW_SETTING = - Setting.intSetting("index.max_result_window", 10000, 1, true, SettingsProperty.IndexScope); + Setting.intSetting("index.max_result_window", 10000, 1, SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final TimeValue DEFAULT_REFRESH_INTERVAL = new TimeValue(1, TimeUnit.SECONDS); public static final Setting INDEX_REFRESH_INTERVAL_SETTING = - Setting.timeSetting("index.refresh_interval", DEFAULT_REFRESH_INTERVAL, new TimeValue(-1, TimeUnit.MILLISECONDS), true, - SettingsProperty.IndexScope); + Setting.timeSetting("index.refresh_interval", DEFAULT_REFRESH_INTERVAL, new TimeValue(-1, TimeUnit.MILLISECONDS), + SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING = - Setting.byteSizeSetting("index.translog.flush_threshold_size", new ByteSizeValue(512, ByteSizeUnit.MB), true, + Setting.byteSizeSetting("index.translog.flush_threshold_size", new ByteSizeValue(512, ByteSizeUnit.MB), SettingsProperty.Dynamic, SettingsProperty.IndexScope); @@ -107,7 +107,7 @@ public final class IndexSettings { */ public static final TimeValue DEFAULT_GC_DELETES = TimeValue.timeValueSeconds(60); public static final Setting INDEX_GC_DELETES_SETTING = - Setting.timeSetting("index.gc_deletes", DEFAULT_GC_DELETES, new TimeValue(-1, TimeUnit.MILLISECONDS), true, + Setting.timeSetting("index.gc_deletes", DEFAULT_GC_DELETES, new TimeValue(-1, TimeUnit.MILLISECONDS), SettingsProperty.Dynamic, SettingsProperty.IndexScope); private final Index index; diff --git a/core/src/main/java/org/elasticsearch/index/IndexWarmer.java b/core/src/main/java/org/elasticsearch/index/IndexWarmer.java index 1b790240587c..b0b06e7ec67a 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexWarmer.java +++ b/core/src/main/java/org/elasticsearch/index/IndexWarmer.java @@ -57,7 +57,7 @@ public final class IndexWarmer extends AbstractComponent { public static final Setting INDEX_NORMS_LOADING_SETTING = new Setting<>("index.norms.loading", MappedFieldType.Loading.LAZY.toString(), (s) -> MappedFieldType.Loading.parse(s, MappedFieldType.Loading.LAZY), - false, SettingsProperty.IndexScope); + SettingsProperty.IndexScope); private final List listeners; IndexWarmer(Settings settings, ThreadPool threadPool, Listener... listeners) { diff --git a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java index c66e05a6c795..eff27e6e04d6 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java +++ b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java @@ -57,20 +57,20 @@ public final class IndexingSlowLog implements IndexingOperationListener { private static final String INDEX_INDEXING_SLOWLOG_PREFIX = "index.indexing.slowlog"; public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.warn", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.info", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.debug", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.trace", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING = - Setting.boolSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".reformat", true, true, SettingsProperty.IndexScope); + Setting.boolSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".reformat", true, SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_INDEXING_SLOWLOG_LEVEL_SETTING = - new Setting<>(INDEX_INDEXING_SLOWLOG_PREFIX +".level", SlowLogLevel.TRACE.name(), SlowLogLevel::parse, true, + new Setting<>(INDEX_INDEXING_SLOWLOG_PREFIX +".level", SlowLogLevel.TRACE.name(), SlowLogLevel::parse, SettingsProperty.Dynamic, SettingsProperty.IndexScope); /** * Reads how much of the source to log. The user can specify any value they @@ -84,7 +84,7 @@ public final class IndexingSlowLog implements IndexingOperationListener { } catch (NumberFormatException e) { return Booleans.parseBoolean(value, true) ? Integer.MAX_VALUE : 0; } - }, true, SettingsProperty.IndexScope); + }, SettingsProperty.Dynamic, SettingsProperty.IndexScope); IndexingSlowLog(IndexSettings indexSettings) { this(indexSettings, Loggers.getLogger(INDEX_INDEXING_SLOWLOG_PREFIX + ".index"), diff --git a/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java b/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java index 5d2dc7e5bf2d..35ead01981c8 100644 --- a/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java +++ b/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java @@ -129,25 +129,29 @@ public final class MergePolicyConfig { public static final double DEFAULT_RECLAIM_DELETES_WEIGHT = 2.0d; public static final Setting INDEX_COMPOUND_FORMAT_SETTING = new Setting<>("index.compound_format", Double.toString(TieredMergePolicy.DEFAULT_NO_CFS_RATIO), MergePolicyConfig::parseNoCFSRatio, - true, SettingsProperty.IndexScope); + SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING = - Setting.doubleSetting("index.merge.policy.expunge_deletes_allowed", DEFAULT_EXPUNGE_DELETES_ALLOWED, 0.0d, true, - SettingsProperty.IndexScope); + Setting.doubleSetting("index.merge.policy.expunge_deletes_allowed", DEFAULT_EXPUNGE_DELETES_ALLOWED, 0.0d, + SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING = - Setting.byteSizeSetting("index.merge.policy.floor_segment", DEFAULT_FLOOR_SEGMENT, true, SettingsProperty.IndexScope); + Setting.byteSizeSetting("index.merge.policy.floor_segment", DEFAULT_FLOOR_SEGMENT, + SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING = - Setting.intSetting("index.merge.policy.max_merge_at_once", DEFAULT_MAX_MERGE_AT_ONCE, 2, true, SettingsProperty.IndexScope); + Setting.intSetting("index.merge.policy.max_merge_at_once", DEFAULT_MAX_MERGE_AT_ONCE, 2, + SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING = - Setting.intSetting("index.merge.policy.max_merge_at_once_explicit", DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT, 2, true, - SettingsProperty.IndexScope); + Setting.intSetting("index.merge.policy.max_merge_at_once_explicit", DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT, 2, + SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING = - Setting.byteSizeSetting("index.merge.policy.max_merged_segment", DEFAULT_MAX_MERGED_SEGMENT, true, SettingsProperty.IndexScope); + Setting.byteSizeSetting("index.merge.policy.max_merged_segment", DEFAULT_MAX_MERGED_SEGMENT, + SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING = - Setting.doubleSetting("index.merge.policy.segments_per_tier", DEFAULT_SEGMENTS_PER_TIER, 2.0d, true, SettingsProperty.IndexScope); + Setting.doubleSetting("index.merge.policy.segments_per_tier", DEFAULT_SEGMENTS_PER_TIER, 2.0d, + SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING = - Setting.doubleSetting("index.merge.policy.reclaim_deletes_weight", DEFAULT_RECLAIM_DELETES_WEIGHT, 0.0d, true, - SettingsProperty.IndexScope); + Setting.doubleSetting("index.merge.policy.reclaim_deletes_weight", DEFAULT_RECLAIM_DELETES_WEIGHT, 0.0d, + SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final String INDEX_MERGE_ENABLED = "index.merge.enabled"; // don't convert to Setting<> and register... we only set this in tests and register via a plugin diff --git a/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java b/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java index e53315c02496..1cfc5c82a70b 100644 --- a/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java +++ b/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java @@ -55,14 +55,14 @@ public final class MergeSchedulerConfig { public static final Setting MAX_THREAD_COUNT_SETTING = new Setting<>("index.merge.scheduler.max_thread_count", (s) -> Integer.toString(Math.max(1, Math.min(4, EsExecutors.boundedNumberOfProcessors(s) / 2))), - (s) -> Setting.parseInt(s, 1, "index.merge.scheduler.max_thread_count"), true, + (s) -> Setting.parseInt(s, 1, "index.merge.scheduler.max_thread_count"), SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting MAX_MERGE_COUNT_SETTING = new Setting<>("index.merge.scheduler.max_merge_count", (s) -> Integer.toString(MAX_THREAD_COUNT_SETTING.get(s) + 5), - (s) -> Setting.parseInt(s, 1, "index.merge.scheduler.max_merge_count"), true, SettingsProperty.IndexScope); + (s) -> Setting.parseInt(s, 1, "index.merge.scheduler.max_merge_count"), SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting AUTO_THROTTLE_SETTING = - Setting.boolSetting("index.merge.scheduler.auto_throttle", true, true, SettingsProperty.IndexScope); + Setting.boolSetting("index.merge.scheduler.auto_throttle", true, SettingsProperty.Dynamic, SettingsProperty.IndexScope); private volatile boolean autoThrottle; private volatile int maxThreadCount; diff --git a/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java b/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java index ae26eab2de1b..2770f7e6e084 100644 --- a/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java +++ b/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java @@ -53,32 +53,32 @@ public final class SearchSlowLog { private static final String INDEX_SEARCH_SLOWLOG_PREFIX = "index.search.slowlog"; public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.warn", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.info", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.debug", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.trace", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.warn", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.info", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.debug", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.trace", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), true, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_SEARCH_SLOWLOG_REFORMAT = - Setting.boolSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".reformat", true, true, SettingsProperty.IndexScope); + Setting.boolSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".reformat", true, SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_SEARCH_SLOWLOG_LEVEL = - new Setting<>(INDEX_SEARCH_SLOWLOG_PREFIX + ".level", SlowLogLevel.TRACE.name(), SlowLogLevel::parse, true, + new Setting<>(INDEX_SEARCH_SLOWLOG_PREFIX + ".level", SlowLogLevel.TRACE.name(), SlowLogLevel::parse, SettingsProperty.Dynamic, SettingsProperty.IndexScope); public SearchSlowLog(IndexSettings indexSettings) { diff --git a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java index 72435d90fd95..88afe96aff96 100644 --- a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +++ b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java @@ -72,7 +72,7 @@ import java.util.concurrent.Executor; public final class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener>, Closeable { public static final Setting INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING = - Setting.boolSetting("index.load_fixed_bitset_filters_eagerly", true, false, SettingsProperty.IndexScope); + Setting.boolSetting("index.load_fixed_bitset_filters_eagerly", true, SettingsProperty.IndexScope); private final boolean loadRandomAccessFiltersEagerly; private final Cache> loadedFilters; diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java index 47110c62bfbd..e1bcd5bb6984 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java +++ b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java @@ -84,7 +84,7 @@ public final class EngineConfig { } return s; } - }, false, SettingsProperty.IndexScope); + }, SettingsProperty.IndexScope); /** if set to true the engine will start even if the translog id in the commit point can not be found */ public static final String INDEX_FORCE_NEW_TRANSLOG = "index.engine.force_new_translog"; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java index 6d12de8c3952..caa5ce416b0d 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java @@ -67,7 +67,7 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo default: throw new IllegalArgumentException("failed to parse [" + s + "] must be one of [node,node]"); } - }, false, SettingsProperty.IndexScope); + }, SettingsProperty.IndexScope); private static final IndexFieldData.Builder MISSING_DOC_VALUES_BUILDER = (indexProperties, fieldType, cache, breakerService, mapperService1) -> { throw new IllegalStateException("Can't load fielddata on [" + fieldType.name() diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 9fba8c0529ed..9796a26d7408 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -51,9 +51,9 @@ import java.util.stream.StreamSupport; public abstract class FieldMapper extends Mapper implements Cloneable { public static final Setting IGNORE_MALFORMED_SETTING = - Setting.boolSetting("index.mapping.ignore_malformed", false, false, SettingsProperty.IndexScope); + Setting.boolSetting("index.mapping.ignore_malformed", false, SettingsProperty.IndexScope); public static final Setting COERCE_SETTING = - Setting.boolSetting("index.mapping.coerce", false, false, SettingsProperty.IndexScope); + Setting.boolSetting("index.mapping.coerce", false, SettingsProperty.IndexScope); public abstract static class Builder extends Mapper.Builder { protected final MappedFieldType fieldType; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index c86219cc05ac..0acf52b87288 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -83,10 +83,10 @@ public class MapperService extends AbstractIndexComponent implements Closeable { public static final String DEFAULT_MAPPING = "_default_"; public static final Setting INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING = - Setting.longSetting("index.mapping.nested_fields.limit", 50L, 0, true, SettingsProperty.IndexScope); + Setting.longSetting("index.mapping.nested_fields.limit", 50L, 0, SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final boolean INDEX_MAPPER_DYNAMIC_DEFAULT = true; public static final Setting INDEX_MAPPER_DYNAMIC_SETTING = - Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT, false, SettingsProperty.IndexScope); + Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT, SettingsProperty.IndexScope); private static ObjectHashSet META_FIELDS = ObjectHashSet.from( "_uid", "_id", "_type", "_all", "_parent", "_routing", "_index", "_size", "_timestamp", "_ttl" diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java index 5f928043688d..02974157aed1 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java @@ -55,7 +55,7 @@ import java.util.List; public abstract class NumberFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { // this is private since it has a different default private static final Setting COERCE_SETTING = - Setting.boolSetting("index.mapping.coerce", true, false, SettingsProperty.IndexScope); + Setting.boolSetting("index.mapping.coerce", true, SettingsProperty.IndexScope); public static class Defaults { diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java index 62d5da922591..17e54ced15f8 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java @@ -63,7 +63,7 @@ import java.util.concurrent.TimeUnit; public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent implements Closeable { public final static Setting INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING = - Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, false, SettingsProperty.IndexScope); + Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, SettingsProperty.IndexScope); private final ConcurrentMap percolateQueries = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency(); private final QueryShardContext queryShardContext; diff --git a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java index 914979eac05f..4c452d159282 100644 --- a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java +++ b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java @@ -61,7 +61,7 @@ public class FsDirectoryService extends DirectoryService implements StoreRateLim default: throw new IllegalArgumentException("unrecognized [index.store.fs.fs_lock] \"" + s + "\": must be native or simple"); } - }, false, SettingsProperty.IndexScope); + }, SettingsProperty.IndexScope); private final CounterMetric rateLimitingTimeInNanos = new CounterMetric(); private final ShardPath path; diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStore.java b/core/src/main/java/org/elasticsearch/index/store/IndexStore.java index 783ed980646b..be4e69662264 100644 --- a/core/src/main/java/org/elasticsearch/index/store/IndexStore.java +++ b/core/src/main/java/org/elasticsearch/index/store/IndexStore.java @@ -31,9 +31,11 @@ import org.elasticsearch.index.shard.ShardPath; */ public class IndexStore extends AbstractIndexComponent { public static final Setting INDEX_STORE_THROTTLE_TYPE_SETTING = - new Setting<>("index.store.throttle.type", "none", IndexRateLimitingType::fromString, true, SettingsProperty.IndexScope); + new Setting<>("index.store.throttle.type", "none", IndexRateLimitingType::fromString, + SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING = - Setting.byteSizeSetting("index.store.throttle.max_bytes_per_sec", new ByteSizeValue(0), true, SettingsProperty.IndexScope); + Setting.byteSizeSetting("index.store.throttle.max_bytes_per_sec", new ByteSizeValue(0), + SettingsProperty.Dynamic, SettingsProperty.IndexScope); protected final IndexStoreConfig indexStoreConfig; private final StoreRateLimiting rateLimiting = new StoreRateLimiting(); diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java index 328d7604bcfd..28a4c32ab75c 100644 --- a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java +++ b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java @@ -38,13 +38,14 @@ public class IndexStoreConfig { * Configures the node / cluster level throttle type. See {@link StoreRateLimiting.Type}. */ public static final Setting INDICES_STORE_THROTTLE_TYPE_SETTING = - new Setting<>("indices.store.throttle.type", StoreRateLimiting.Type.NONE.name(),StoreRateLimiting.Type::fromString, true, - SettingsProperty.ClusterScope); + new Setting<>("indices.store.throttle.type", StoreRateLimiting.Type.NONE.name(),StoreRateLimiting.Type::fromString, + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); /** * Configures the node / cluster level throttle intensity. The default is 10240 MB */ public static final Setting INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING = - Setting.byteSizeSetting("indices.store.throttle.max_bytes_per_sec", new ByteSizeValue(0), true, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("indices.store.throttle.max_bytes_per_sec", new ByteSizeValue(0), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); private volatile StoreRateLimiting.Type rateLimitingType; private volatile ByteSizeValue rateLimitingThrottle; private final StoreRateLimiting rateLimiting = new StoreRateLimiting(); diff --git a/core/src/main/java/org/elasticsearch/index/store/Store.java b/core/src/main/java/org/elasticsearch/index/store/Store.java index b1d806d520e6..a0cc8f114196 100644 --- a/core/src/main/java/org/elasticsearch/index/store/Store.java +++ b/core/src/main/java/org/elasticsearch/index/store/Store.java @@ -131,7 +131,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref static final int VERSION = VERSION_WRITE_THROWABLE; static final String CORRUPTED = "corrupted_"; public static final Setting INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING = - Setting.timeSetting("index.store.stats_refresh_interval", TimeValue.timeValueSeconds(10), false, SettingsProperty.IndexScope); + Setting.timeSetting("index.store.stats_refresh_interval", TimeValue.timeValueSeconds(10), SettingsProperty.IndexScope); private final AtomicBoolean isClosed = new AtomicBoolean(false); private final StoreDirectory directory; diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java b/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java index 15eb19fc416a..9dbb673fa5bb 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java @@ -49,9 +49,9 @@ import java.util.concurrent.ConcurrentHashMap; public class IndicesQueryCache extends AbstractComponent implements QueryCache, Closeable { public static final Setting INDICES_CACHE_QUERY_SIZE_SETTING = Setting.byteSizeSetting( - "indices.queries.cache.size", "10%", false, SettingsProperty.ClusterScope); + "indices.queries.cache.size", "10%", SettingsProperty.ClusterScope); public static final Setting INDICES_CACHE_QUERY_COUNT_SETTING = Setting.intSetting( - "indices.queries.cache.count", 10000, 1, false, SettingsProperty.ClusterScope); + "indices.queries.cache.count", 10000, 1, SettingsProperty.ClusterScope); private final LRUQueryCache cache; private final ShardCoreKeyMap shardKeyMap = new ShardCoreKeyMap(); diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java b/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java index 8ebe52a2c195..02aa09f138cd 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java @@ -70,11 +70,11 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo * since we are checking on the cluster state IndexMetaData always. */ public static final Setting INDEX_CACHE_REQUEST_ENABLED_SETTING = - Setting.boolSetting("index.requests.cache.enable", false, true, SettingsProperty.IndexScope); + Setting.boolSetting("index.requests.cache.enable", false, SettingsProperty.Dynamic, SettingsProperty.IndexScope); public static final Setting INDICES_CACHE_QUERY_SIZE = - Setting.byteSizeSetting("indices.requests.cache.size", "1%", false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("indices.requests.cache.size", "1%", SettingsProperty.ClusterScope); public static final Setting INDICES_CACHE_QUERY_EXPIRE = - Setting.positiveTimeSetting("indices.requests.cache.expire", new TimeValue(0), false, SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("indices.requests.cache.expire", new TimeValue(0), SettingsProperty.ClusterScope); private final ConcurrentMap registeredClosedListeners = ConcurrentCollections.newConcurrentMap(); private final Set keysToClean = ConcurrentCollections.newConcurrentSet(); diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index d64bb3f0c4ce..585d1d31bc5e 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -117,7 +117,7 @@ public class IndicesService extends AbstractLifecycleComponent i public static final String INDICES_SHARDS_CLOSED_TIMEOUT = "indices.shards_closed_timeout"; public static final Setting INDICES_CACHE_CLEAN_INTERVAL_SETTING = - Setting.positiveTimeSetting("indices.cache.cleanup_interval", TimeValue.timeValueMinutes(1), false, SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("indices.cache.cleanup_interval", TimeValue.timeValueMinutes(1), SettingsProperty.ClusterScope); private final PluginsService pluginsService; private final NodeEnvironment nodeEnv; private final TimeValue shardsClosedTimeout; diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index 3b7b97828490..b1189fbd2862 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -73,11 +73,11 @@ import java.util.function.Function; public class HunspellService extends AbstractComponent { public final static Setting HUNSPELL_LAZY_LOAD = - Setting.boolSetting("indices.analysis.hunspell.dictionary.lazy", Boolean.FALSE, false, SettingsProperty.ClusterScope); + Setting.boolSetting("indices.analysis.hunspell.dictionary.lazy", Boolean.FALSE, SettingsProperty.ClusterScope); public final static Setting HUNSPELL_IGNORE_CASE = - Setting.boolSetting("indices.analysis.hunspell.dictionary.ignore_case", Boolean.FALSE, false, SettingsProperty.ClusterScope); + Setting.boolSetting("indices.analysis.hunspell.dictionary.ignore_case", Boolean.FALSE, SettingsProperty.ClusterScope); public final static Setting HUNSPELL_DICTIONARY_OPTIONS = - Setting.groupSetting("indices.analysis.hunspell.dictionary.", false, SettingsProperty.ClusterScope); + Setting.groupSetting("indices.analysis.hunspell.dictionary.", SettingsProperty.ClusterScope); private final ConcurrentHashMap dictionaries = new ConcurrentHashMap<>(); private final Map knownDictionaries; private final boolean defaultIgnoreCase; diff --git a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index fdee0b033434..3a3fede9af0c 100644 --- a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -48,21 +48,21 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { private final ConcurrentMap breakers = new ConcurrentHashMap(); public static final Setting TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING = - Setting.byteSizeSetting("indices.breaker.total.limit", "70%", true, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("indices.breaker.total.limit", "70%", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING = - Setting.byteSizeSetting("indices.breaker.fielddata.limit", "60%", true, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("indices.breaker.fielddata.limit", "60%", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING = - Setting.doubleSetting("indices.breaker.fielddata.overhead", 1.03d, 0.0d, true, SettingsProperty.ClusterScope); + Setting.doubleSetting("indices.breaker.fielddata.overhead", 1.03d, 0.0d, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING = - new Setting<>("indices.breaker.fielddata.type", "memory", CircuitBreaker.Type::parseValue, false, SettingsProperty.ClusterScope); + new Setting<>("indices.breaker.fielddata.type", "memory", CircuitBreaker.Type::parseValue, SettingsProperty.ClusterScope); public static final Setting REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING = - Setting.byteSizeSetting("indices.breaker.request.limit", "40%", true, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("indices.breaker.request.limit", "40%", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING = - Setting.doubleSetting("indices.breaker.request.overhead", 1.0d, 0.0d, true, SettingsProperty.ClusterScope); + Setting.doubleSetting("indices.breaker.request.overhead", 1.0d, 0.0d, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting REQUEST_CIRCUIT_BREAKER_TYPE_SETTING = - new Setting<>("indices.breaker.request.type", "memory", CircuitBreaker.Type::parseValue, false, SettingsProperty.ClusterScope); + new Setting<>("indices.breaker.request.type", "memory", CircuitBreaker.Type::parseValue, SettingsProperty.ClusterScope); diff --git a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java index dd60c6282239..a5bb86969993 100644 --- a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java +++ b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java @@ -54,7 +54,7 @@ import java.util.function.ToLongBiFunction; public class IndicesFieldDataCache extends AbstractComponent implements RemovalListener, Releasable{ public static final Setting INDICES_FIELDDATA_CACHE_SIZE_KEY = - Setting.byteSizeSetting("indices.fielddata.cache.size", new ByteSizeValue(-1), false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("indices.fielddata.cache.size", new ByteSizeValue(-1), SettingsProperty.ClusterScope); private final IndexFieldDataCache.Listener indicesFieldDataCacheListener; private final Cache cache; diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java index 742b1b789457..f58dc1ca8b87 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java @@ -34,26 +34,26 @@ import org.elasticsearch.common.unit.TimeValue; public class RecoverySettings extends AbstractComponent { public static final Setting INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING = - Setting.byteSizeSetting("indices.recovery.max_bytes_per_sec", new ByteSizeValue(40, ByteSizeUnit.MB), true, - SettingsProperty.ClusterScope); + Setting.byteSizeSetting("indices.recovery.max_bytes_per_sec", new ByteSizeValue(40, ByteSizeUnit.MB), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); /** * how long to wait before retrying after issues cause by cluster state syncing between nodes * i.e., local node is not yet known on remote node, remote shard not yet started etc. */ public static final Setting INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING = - Setting.positiveTimeSetting("indices.recovery.retry_delay_state_sync", TimeValue.timeValueMillis(500), true, - SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("indices.recovery.retry_delay_state_sync", TimeValue.timeValueMillis(500), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); /** how long to wait before retrying after network related issues */ public static final Setting INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING = - Setting.positiveTimeSetting("indices.recovery.retry_delay_network", TimeValue.timeValueSeconds(5), true, - SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("indices.recovery.retry_delay_network", TimeValue.timeValueSeconds(5), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); /** timeout value to use for requests made as part of the recovery process */ public static final Setting INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING = - Setting.positiveTimeSetting("indices.recovery.internal_action_timeout", TimeValue.timeValueMinutes(15), true, - SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("indices.recovery.internal_action_timeout", TimeValue.timeValueMinutes(15), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); /** * timeout value to use for requests made as part of the recovery process that are expected to take long time. @@ -62,7 +62,7 @@ public class RecoverySettings extends AbstractComponent { public static final Setting INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING = Setting.timeSetting("indices.recovery.internal_action_long_timeout", (s) -> TimeValue.timeValueMillis(INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.get(s).millis() * 2).toString(), - TimeValue.timeValueSeconds(0), true, SettingsProperty.ClusterScope); + TimeValue.timeValueSeconds(0), SettingsProperty.Dynamic, SettingsProperty.ClusterScope); /** * recoveries that don't show any activity for more then this interval will be failed. @@ -70,8 +70,8 @@ public class RecoverySettings extends AbstractComponent { */ public static final Setting INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING = Setting.timeSetting("indices.recovery.recovery_activity_timeout", - (s) -> INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.getRaw(s) , TimeValue.timeValueSeconds(0), true, - SettingsProperty.ClusterScope); + (s) -> INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.getRaw(s) , TimeValue.timeValueSeconds(0), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final ByteSizeValue DEFAULT_CHUNK_SIZE = new ByteSizeValue(512, ByteSizeUnit.KB); diff --git a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index 23e007c5366e..3851b4571b6f 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -71,7 +71,7 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe // TODO this class can be foled into either IndicesService and partially into IndicesClusterStateService there is no need for a separate public service public static final Setting INDICES_STORE_DELETE_SHARD_TIMEOUT = - Setting.positiveTimeSetting("indices.store.delete.shard.timeout", new TimeValue(30, TimeUnit.SECONDS), false, + Setting.positiveTimeSetting("indices.store.delete.shard.timeout", new TimeValue(30, TimeUnit.SECONDS), SettingsProperty.ClusterScope); public static final String ACTION_SHARD_EXISTS = "internal:index/shard/exists"; private static final EnumSet ACTIVE_STATES = EnumSet.of(IndexShardState.STARTED, IndexShardState.RELOCATED); diff --git a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java index 6eb34adc9f82..0f9c9d425e1c 100644 --- a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java +++ b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java @@ -69,7 +69,8 @@ import java.util.concurrent.locks.ReentrantLock; public class IndicesTTLService extends AbstractLifecycleComponent { public static final Setting INDICES_TTL_INTERVAL_SETTING = - Setting.positiveTimeSetting("indices.ttl.interval", TimeValue.timeValueSeconds(60), true, SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("indices.ttl.interval", TimeValue.timeValueSeconds(60), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); private final ClusterService clusterService; private final IndicesService indicesService; diff --git a/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java b/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java index be985cb70209..71a9743f78a7 100644 --- a/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java +++ b/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java @@ -38,7 +38,7 @@ public class FsService extends AbstractComponent { private final SingleObjectCache fsStatsCache; public final static Setting REFRESH_INTERVAL_SETTING = - Setting.timeSetting("monitor.fs.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, + Setting.timeSetting("monitor.fs.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), SettingsProperty.ClusterScope); public FsService(Settings settings, NodeEnvironment nodeEnvironment) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java index 03c6c00d5395..301b86674c61 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java @@ -48,13 +48,13 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent ENABLED_SETTING = - Setting.boolSetting("monitor.jvm.gc.enabled", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("monitor.jvm.gc.enabled", true, SettingsProperty.ClusterScope); public final static Setting REFRESH_INTERVAL_SETTING = - Setting.timeSetting("monitor.jvm.gc.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, + Setting.timeSetting("monitor.jvm.gc.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), SettingsProperty.ClusterScope); private static String GC_COLLECTOR_PREFIX = "monitor.jvm.gc.collector."; - public final static Setting GC_SETTING = Setting.groupSetting(GC_COLLECTOR_PREFIX, false, SettingsProperty.ClusterScope); + public final static Setting GC_SETTING = Setting.groupSetting(GC_COLLECTOR_PREFIX, SettingsProperty.ClusterScope); static class GcThreshold { public final String name; diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java index e816e51911e9..5e03ab3e31c9 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java @@ -37,7 +37,7 @@ public class JvmService extends AbstractComponent { private JvmStats jvmStats; public final static Setting REFRESH_INTERVAL_SETTING = - Setting.timeSetting("monitor.jvm.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, + Setting.timeSetting("monitor.jvm.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), SettingsProperty.ClusterScope); public JvmService(Settings settings) { diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsService.java b/core/src/main/java/org/elasticsearch/monitor/os/OsService.java index 1cd0910ab3e9..df750c7247cb 100644 --- a/core/src/main/java/org/elasticsearch/monitor/os/OsService.java +++ b/core/src/main/java/org/elasticsearch/monitor/os/OsService.java @@ -39,7 +39,7 @@ public class OsService extends AbstractComponent { private SingleObjectCache osStatsCache; public final static Setting REFRESH_INTERVAL_SETTING = - Setting.timeSetting("monitor.os.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, + Setting.timeSetting("monitor.os.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), SettingsProperty.ClusterScope); public OsService(Settings settings) { diff --git a/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java b/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java index 316c8a8131f4..0370011e7c06 100644 --- a/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java +++ b/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java @@ -36,7 +36,7 @@ public final class ProcessService extends AbstractComponent { private final SingleObjectCache processStatsCache; public final static Setting REFRESH_INTERVAL_SETTING = - Setting.timeSetting("monitor.process.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, + Setting.timeSetting("monitor.process.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), SettingsProperty.ClusterScope); public ProcessService(Settings settings) { diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index 4b0c806749f2..6fc34955dadc 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -126,22 +126,22 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; public class Node implements Closeable { public static final Setting WRITE_PORTS_FIELD_SETTING = - Setting.boolSetting("node.portsfile", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("node.portsfile", false, SettingsProperty.ClusterScope); public static final Setting NODE_CLIENT_SETTING = - Setting.boolSetting("node.client", false, false, SettingsProperty.ClusterScope); - public static final Setting NODE_DATA_SETTING = Setting.boolSetting("node.data", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("node.client", false, SettingsProperty.ClusterScope); + public static final Setting NODE_DATA_SETTING = Setting.boolSetting("node.data", true, SettingsProperty.ClusterScope); public static final Setting NODE_MASTER_SETTING = - Setting.boolSetting("node.master", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("node.master", true, SettingsProperty.ClusterScope); public static final Setting NODE_LOCAL_SETTING = - Setting.boolSetting("node.local", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("node.local", false, SettingsProperty.ClusterScope); public static final Setting NODE_MODE_SETTING = - new Setting<>("node.mode", "network", Function.identity(), false, SettingsProperty.ClusterScope); + new Setting<>("node.mode", "network", Function.identity(), SettingsProperty.ClusterScope); public static final Setting NODE_INGEST_SETTING = - Setting.boolSetting("node.ingest", true, false, SettingsProperty.ClusterScope); - public static final Setting NODE_NAME_SETTING = Setting.simpleString("node.name", false, SettingsProperty.ClusterScope); + Setting.boolSetting("node.ingest", true, SettingsProperty.ClusterScope); + public static final Setting NODE_NAME_SETTING = Setting.simpleString("node.name", SettingsProperty.ClusterScope); // this sucks that folks can mistype client etc and get away with it. // TODO: we should move this to node.attribute.${name} = ${value} instead. - public static final Setting NODE_ATTRIBUTES = Setting.groupSetting("node.", false, SettingsProperty.ClusterScope); + public static final Setting NODE_ATTRIBUTES = Setting.groupSetting("node.", SettingsProperty.ClusterScope); private static final String CLIENT_TYPE = "node"; diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java index 45fd66d6daa2..5f8107ba7585 100644 --- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java @@ -59,7 +59,7 @@ public class InternalSettingsPreparer { public static final String SECRET_PROMPT_VALUE = "${prompt.secret}"; public static final String TEXT_PROMPT_VALUE = "${prompt.text}"; public static final Setting IGNORE_SYSTEM_PROPERTIES_SETTING = - Setting.boolSetting("config.ignore_system_properties", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("config.ignore_system_properties", false, SettingsProperty.ClusterScope); /** * Prepares the settings by gathering all elasticsearch system properties and setting defaults. diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java index 1fb5875109e7..1aafcd0f6fb0 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -73,7 +73,7 @@ public class PluginsService extends AbstractComponent { private final List> plugins; private final PluginsAndModules info; public static final Setting> MANDATORY_SETTING = - Setting.listSetting("plugin.mandatory", Collections.emptyList(), Function.identity(), false, SettingsProperty.ClusterScope); + Setting.listSetting("plugin.mandatory", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); private final Map> onModuleReferences; diff --git a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java index 8b3641057189..8ac297e072f2 100644 --- a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java @@ -53,16 +53,16 @@ public class FsRepository extends BlobStoreRepository { public final static String TYPE = "fs"; public static final Setting LOCATION_SETTING = - new Setting<>("location", "", Function.identity(), false, SettingsProperty.ClusterScope); + new Setting<>("location", "", Function.identity(), SettingsProperty.ClusterScope); public static final Setting REPOSITORIES_LOCATION_SETTING = - new Setting<>("repositories.fs.location", LOCATION_SETTING, Function.identity(), false, SettingsProperty.ClusterScope); + new Setting<>("repositories.fs.location", LOCATION_SETTING, Function.identity(), SettingsProperty.ClusterScope); public static final Setting CHUNK_SIZE_SETTING = - Setting.byteSizeSetting("chunk_size", "-1", false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("chunk_size", "-1", SettingsProperty.ClusterScope); public static final Setting REPOSITORIES_CHUNK_SIZE_SETTING = - Setting.byteSizeSetting("repositories.fs.chunk_size", "-1", false, SettingsProperty.ClusterScope); - public static final Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("repositories.fs.chunk_size", "-1", SettingsProperty.ClusterScope); + public static final Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, SettingsProperty.ClusterScope); public static final Setting REPOSITORIES_COMPRESS_SETTING = - Setting.boolSetting("repositories.fs.compress", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("repositories.fs.compress", false, SettingsProperty.ClusterScope); private final FsBlobStore blobStore; diff --git a/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java b/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java index c5255fd8b5e9..5086902d9fce 100644 --- a/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java @@ -58,22 +58,20 @@ public class URLRepository extends BlobStoreRepository { public static final Setting> SUPPORTED_PROTOCOLS_SETTING = Setting.listSetting("repositories.url.supported_protocols", Arrays.asList("http", "https", "ftp", "file", "jar"), - Function.identity(), false, SettingsProperty.ClusterScope); + Function.identity(), SettingsProperty.ClusterScope); public static final Setting> ALLOWED_URLS_SETTING = - Setting.listSetting("repositories.url.allowed_urls", Collections.emptyList(), URIPattern::new, false, - SettingsProperty.ClusterScope); + Setting.listSetting("repositories.url.allowed_urls", Collections.emptyList(), URIPattern::new, SettingsProperty.ClusterScope); - public static final Setting URL_SETTING = - new Setting<>("url", "http:", URLRepository::parseURL, false, SettingsProperty.ClusterScope); + public static final Setting URL_SETTING = new Setting<>("url", "http:", URLRepository::parseURL, SettingsProperty.ClusterScope); public static final Setting REPOSITORIES_URL_SETTING = - new Setting<>("repositories.url.url", (s) -> s.get("repositories.uri.url", "http:"), URLRepository::parseURL, false, + new Setting<>("repositories.url.url", (s) -> s.get("repositories.uri.url", "http:"), URLRepository::parseURL, SettingsProperty.ClusterScope); public static final Setting LIST_DIRECTORIES_SETTING = - Setting.boolSetting("list_directories", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("list_directories", true, SettingsProperty.ClusterScope); public static final Setting REPOSITORIES_LIST_DIRECTORIES_SETTING = - Setting.boolSetting("repositories.uri.list_directories", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("repositories.uri.list_directories", true, SettingsProperty.ClusterScope); private final List supportedProtocols; diff --git a/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index 1ea87c6c61e8..5066b4884aff 100644 --- a/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -36,7 +36,7 @@ import org.elasticsearch.common.settings.Settings; */ public abstract class BaseRestHandler extends AbstractComponent implements RestHandler { public static final Setting MULTI_ALLOW_EXPLICIT_INDEX = - Setting.boolSetting("rest.action.multi.allow_explicit_index", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("rest.action.multi.allow_explicit_index", true, SettingsProperty.ClusterScope); private final Client client; protected final ParseFieldMatcher parseFieldMatcher; diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index 058deadcf4c7..4ef8b4e4bae9 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -86,12 +86,12 @@ public class ScriptService extends AbstractComponent implements Closeable { static final String DISABLE_DYNAMIC_SCRIPTING_SETTING = "script.disable_dynamic"; public static final Setting SCRIPT_CACHE_SIZE_SETTING = - Setting.intSetting("script.cache.max_size", 100, 0, false, SettingsProperty.ClusterScope); + Setting.intSetting("script.cache.max_size", 100, 0, SettingsProperty.ClusterScope); public static final Setting SCRIPT_CACHE_EXPIRE_SETTING = - Setting.positiveTimeSetting("script.cache.expire", TimeValue.timeValueMillis(0), false, SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("script.cache.expire", TimeValue.timeValueMillis(0), SettingsProperty.ClusterScope); public static final String SCRIPT_INDEX = ".scripts"; public static final Setting SCRIPT_AUTO_RELOAD_ENABLED_SETTING = - Setting.boolSetting("script.auto_reload_enabled", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("script.auto_reload_enabled", true, SettingsProperty.ClusterScope); private final String defaultLang; diff --git a/core/src/main/java/org/elasticsearch/script/ScriptSettings.java b/core/src/main/java/org/elasticsearch/script/ScriptSettings.java index a2ab5f9c269b..26cb7eaa2780 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptSettings.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptSettings.java @@ -45,7 +45,6 @@ public class ScriptSettings { ScriptModes.sourceKey(scriptType), scriptType.getDefaultScriptMode().getMode(), ScriptMode::parse, - false, SettingsProperty.ClusterScope)); } SCRIPT_TYPE_SETTING_MAP = Collections.unmodifiableMap(scriptTypeSettingMap); @@ -67,7 +66,7 @@ public class ScriptSettings { throw new IllegalArgumentException("unregistered default language [" + setting + "]"); } return setting; - }, false, SettingsProperty.ClusterScope); + }, SettingsProperty.ClusterScope); } private static Map> contextSettings(ScriptContextRegistry scriptContextRegistry) { @@ -77,7 +76,6 @@ public class ScriptSettings { ScriptModes.operationKey(scriptContext), ScriptMode.OFF.getMode(), ScriptMode::parse, - false, SettingsProperty.ClusterScope )); } @@ -138,7 +136,6 @@ public class ScriptSettings { ScriptModes.getKey(language, scriptType, scriptContext), defaultSetting, ScriptMode::parse, - false, SettingsProperty.ClusterScope); scriptModeSettings.add(setting); } diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index a4106feb2317..b92e271a1491 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -108,13 +108,13 @@ public class SearchService extends AbstractLifecycleComponent imp // we can have 5 minutes here, since we make sure to clean with search requests and when shard/index closes public static final Setting DEFAULT_KEEPALIVE_SETTING = - Setting.positiveTimeSetting("search.default_keep_alive", timeValueMinutes(5), false, SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("search.default_keep_alive", timeValueMinutes(5), SettingsProperty.ClusterScope); public static final Setting KEEPALIVE_INTERVAL_SETTING = - Setting.positiveTimeSetting("search.keep_alive_interval", timeValueMinutes(1), false, SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("search.keep_alive_interval", timeValueMinutes(1), SettingsProperty.ClusterScope); public static final TimeValue NO_TIMEOUT = timeValueMillis(-1); public static final Setting DEFAULT_SEARCH_TIMEOUT_SETTING = - Setting.timeSetting("search.default_search_timeout", NO_TIMEOUT, true, SettingsProperty.ClusterScope); + Setting.timeSetting("search.default_search_timeout", NO_TIMEOUT, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); private final ThreadPool threadPool; diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 14c52ab627a6..150325ff2423 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -190,7 +190,7 @@ public class ThreadPool extends AbstractComponent implements Closeable { } public static final Setting THREADPOOL_GROUP_SETTING = - Setting.groupSetting("threadpool.", true, SettingsProperty.ClusterScope); + Setting.groupSetting("threadpool.", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); private volatile Map executors; diff --git a/core/src/main/java/org/elasticsearch/transport/Transport.java b/core/src/main/java/org/elasticsearch/transport/Transport.java index c72bcb2bb549..d6dee1953a46 100644 --- a/core/src/main/java/org/elasticsearch/transport/Transport.java +++ b/core/src/main/java/org/elasticsearch/transport/Transport.java @@ -36,7 +36,7 @@ import java.util.Map; public interface Transport extends LifecycleComponent { - Setting TRANSPORT_TCP_COMPRESS = Setting.boolSetting("transport.tcp.compress", false, false, SettingsProperty.ClusterScope); + Setting TRANSPORT_TCP_COMPRESS = Setting.boolSetting("transport.tcp.compress", false, SettingsProperty.ClusterScope); void transportServiceAdapter(TransportServiceAdapter service); diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index 2a03c5162558..8943df12e595 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -99,10 +99,10 @@ public class TransportService extends AbstractLifecycleComponent> TRACE_LOG_INCLUDE_SETTING = - listSetting("transport.tracer.include", emptyList(), Function.identity(), true, SettingsProperty.ClusterScope); + listSetting("transport.tracer.include", emptyList(), Function.identity(), SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static final Setting> TRACE_LOG_EXCLUDE_SETTING = listSetting("transport.tracer.exclude", Arrays.asList("internal:discovery/zen/fd*", TransportLivenessAction.NAME), - Function.identity(), true, SettingsProperty.ClusterScope); + Function.identity(), SettingsProperty.Dynamic, SettingsProperty.ClusterScope); private final ESLogger tracerLog; diff --git a/core/src/main/java/org/elasticsearch/transport/TransportSettings.java b/core/src/main/java/org/elasticsearch/transport/TransportSettings.java index e5fb9f7e14dd..eaa3f0041888 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportSettings.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportSettings.java @@ -36,18 +36,18 @@ import static org.elasticsearch.common.settings.Setting.listSetting; final public class TransportSettings { public static final Setting> HOST = - listSetting("transport.host", emptyList(), s -> s, false, SettingsProperty.ClusterScope); + listSetting("transport.host", emptyList(), s -> s, SettingsProperty.ClusterScope); public static final Setting> PUBLISH_HOST = - listSetting("transport.publish_host", HOST, s -> s, false, SettingsProperty.ClusterScope); + listSetting("transport.publish_host", HOST, s -> s, SettingsProperty.ClusterScope); public static final Setting> BIND_HOST = - listSetting("transport.bind_host", HOST, s -> s, false, SettingsProperty.ClusterScope); + listSetting("transport.bind_host", HOST, s -> s, SettingsProperty.ClusterScope); public static final Setting PORT = - new Setting<>("transport.tcp.port", "9300-9400", s -> s, false, SettingsProperty.ClusterScope); + new Setting<>("transport.tcp.port", "9300-9400", s -> s, SettingsProperty.ClusterScope); public static final Setting PUBLISH_PORT = - intSetting("transport.publish_port", -1, -1, false, SettingsProperty.ClusterScope); + intSetting("transport.publish_port", -1, -1, SettingsProperty.ClusterScope); public static final String DEFAULT_PROFILE = "default"; public static final Setting TRANSPORT_PROFILES_SETTING = - groupSetting("transport.profiles.", true, SettingsProperty.ClusterScope); + groupSetting("transport.profiles.", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); private TransportSettings() { diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java index dd250fabd1d3..0e2b38eee671 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java @@ -150,43 +150,42 @@ public class NettyTransport extends AbstractLifecycleComponent implem public static final Setting WORKER_COUNT = new Setting<>("transport.netty.worker_count", (s) -> Integer.toString(EsExecutors.boundedNumberOfProcessors(s) * 2), - (s) -> Setting.parseInt(s, 1, "transport.netty.worker_count"), false, SettingsProperty.ClusterScope); + (s) -> Setting.parseInt(s, 1, "transport.netty.worker_count"), SettingsProperty.ClusterScope); public static final Setting CONNECTIONS_PER_NODE_RECOVERY = - intSetting("transport.connections_per_node.recovery", 2, 1, false, SettingsProperty.ClusterScope); + intSetting("transport.connections_per_node.recovery", 2, 1, SettingsProperty.ClusterScope); public static final Setting CONNECTIONS_PER_NODE_BULK = - intSetting("transport.connections_per_node.bulk", 3, 1, false, SettingsProperty.ClusterScope); + intSetting("transport.connections_per_node.bulk", 3, 1, SettingsProperty.ClusterScope); public static final Setting CONNECTIONS_PER_NODE_REG = - intSetting("transport.connections_per_node.reg", 6, 1, false, SettingsProperty.ClusterScope); + intSetting("transport.connections_per_node.reg", 6, 1, SettingsProperty.ClusterScope); public static final Setting CONNECTIONS_PER_NODE_STATE = - intSetting("transport.connections_per_node.state", 1, 1, false, SettingsProperty.ClusterScope); + intSetting("transport.connections_per_node.state", 1, 1, SettingsProperty.ClusterScope); public static final Setting CONNECTIONS_PER_NODE_PING = - intSetting("transport.connections_per_node.ping", 1, 1, false, SettingsProperty.ClusterScope); + intSetting("transport.connections_per_node.ping", 1, 1, SettingsProperty.ClusterScope); // the scheduled internal ping interval setting, defaults to disabled (-1) public static final Setting PING_SCHEDULE = - timeSetting("transport.ping_schedule", TimeValue.timeValueSeconds(-1), false, SettingsProperty.ClusterScope); + timeSetting("transport.ping_schedule", TimeValue.timeValueSeconds(-1), SettingsProperty.ClusterScope); public static final Setting TCP_BLOCKING_CLIENT = - boolSetting("transport.tcp.blocking_client", TcpSettings.TCP_BLOCKING_CLIENT, false, SettingsProperty.ClusterScope); + boolSetting("transport.tcp.blocking_client", TcpSettings.TCP_BLOCKING_CLIENT, SettingsProperty.ClusterScope); public static final Setting TCP_CONNECT_TIMEOUT = - timeSetting("transport.tcp.connect_timeout", TcpSettings.TCP_CONNECT_TIMEOUT, false, SettingsProperty.ClusterScope); + timeSetting("transport.tcp.connect_timeout", TcpSettings.TCP_CONNECT_TIMEOUT, SettingsProperty.ClusterScope); public static final Setting TCP_NO_DELAY = - boolSetting("transport.tcp_no_delay", TcpSettings.TCP_NO_DELAY, false, SettingsProperty.ClusterScope); + boolSetting("transport.tcp_no_delay", TcpSettings.TCP_NO_DELAY, SettingsProperty.ClusterScope); public static final Setting TCP_KEEP_ALIVE = - boolSetting("transport.tcp.keep_alive", TcpSettings.TCP_KEEP_ALIVE, false, SettingsProperty.ClusterScope); + boolSetting("transport.tcp.keep_alive", TcpSettings.TCP_KEEP_ALIVE, SettingsProperty.ClusterScope); public static final Setting TCP_BLOCKING_SERVER = - boolSetting("transport.tcp.blocking_server", TcpSettings.TCP_BLOCKING_SERVER, false, SettingsProperty.ClusterScope); + boolSetting("transport.tcp.blocking_server", TcpSettings.TCP_BLOCKING_SERVER, SettingsProperty.ClusterScope); public static final Setting TCP_REUSE_ADDRESS = - boolSetting("transport.tcp.reuse_address", TcpSettings.TCP_REUSE_ADDRESS, false, SettingsProperty.ClusterScope); + boolSetting("transport.tcp.reuse_address", TcpSettings.TCP_REUSE_ADDRESS, SettingsProperty.ClusterScope); public static final Setting TCP_SEND_BUFFER_SIZE = - Setting.byteSizeSetting("transport.tcp.send_buffer_size", TcpSettings.TCP_SEND_BUFFER_SIZE, false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("transport.tcp.send_buffer_size", TcpSettings.TCP_SEND_BUFFER_SIZE, SettingsProperty.ClusterScope); public static final Setting TCP_RECEIVE_BUFFER_SIZE = - Setting.byteSizeSetting("transport.tcp.receive_buffer_size", TcpSettings.TCP_RECEIVE_BUFFER_SIZE, false, - SettingsProperty.ClusterScope); + Setting.byteSizeSetting("transport.tcp.receive_buffer_size", TcpSettings.TCP_RECEIVE_BUFFER_SIZE, SettingsProperty.ClusterScope); public static final Setting NETTY_MAX_CUMULATION_BUFFER_CAPACITY = - Setting.byteSizeSetting("transport.netty.max_cumulation_buffer_capacity", new ByteSizeValue(-1), false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("transport.netty.max_cumulation_buffer_capacity", new ByteSizeValue(-1), SettingsProperty.ClusterScope); public static final Setting NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = - Setting.intSetting("transport.netty.max_composite_buffer_components", -1, -1, false, SettingsProperty.ClusterScope); + Setting.intSetting("transport.netty.max_composite_buffer_components", -1, -1, SettingsProperty.ClusterScope); // See AdaptiveReceiveBufferSizePredictor#DEFAULT_XXX for default values in netty..., we can use higher ones for us, even fixed one public static final Setting NETTY_RECEIVE_PREDICTOR_SIZE = Setting.byteSizeSetting( @@ -199,13 +198,13 @@ public class NettyTransport extends AbstractLifecycleComponent implem defaultReceiverPredictor = Math.min(defaultReceiverPredictor, Math.max(l, 64 * 1024)); } return new ByteSizeValue(defaultReceiverPredictor).toString(); - }, false, SettingsProperty.ClusterScope); + }, SettingsProperty.ClusterScope); public static final Setting NETTY_RECEIVE_PREDICTOR_MIN = - byteSizeSetting("transport.netty.receive_predictor_min", NETTY_RECEIVE_PREDICTOR_SIZE, false, SettingsProperty.ClusterScope); + byteSizeSetting("transport.netty.receive_predictor_min", NETTY_RECEIVE_PREDICTOR_SIZE, SettingsProperty.ClusterScope); public static final Setting NETTY_RECEIVE_PREDICTOR_MAX = - byteSizeSetting("transport.netty.receive_predictor_max", NETTY_RECEIVE_PREDICTOR_SIZE, false, SettingsProperty.ClusterScope); + byteSizeSetting("transport.netty.receive_predictor_max", NETTY_RECEIVE_PREDICTOR_SIZE, SettingsProperty.ClusterScope); public static final Setting NETTY_BOSS_COUNT = - intSetting("transport.netty.boss_count", 1, 1, false, SettingsProperty.ClusterScope); + intSetting("transport.netty.boss_count", 1, 1, SettingsProperty.ClusterScope); protected final NetworkService networkService; protected final Version version; diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index 0b4f99457eb6..fb03153b2158 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -122,7 +122,7 @@ public class TribeService extends AbstractLifecycleComponent { } // internal settings only - public static final Setting TRIBE_NAME_SETTING = Setting.simpleString("tribe.name", false, SettingsProperty.ClusterScope); + public static final Setting TRIBE_NAME_SETTING = Setting.simpleString("tribe.name", SettingsProperty.ClusterScope); private final ClusterService clusterService; private final String[] blockIndicesWrite; private final String[] blockIndicesRead; @@ -141,21 +141,18 @@ public class TribeService extends AbstractLifecycleComponent { throw new IllegalArgumentException( "Invalid value for [tribe.on_conflict] must be either [any, drop or start with prefer_] but was: [" + s + "]"); } - }, false, SettingsProperty.ClusterScope); + }, SettingsProperty.ClusterScope); public static final Setting BLOCKS_METADATA_SETTING = - Setting.boolSetting("tribe.blocks.metadata", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("tribe.blocks.metadata", false, SettingsProperty.ClusterScope); public static final Setting BLOCKS_WRITE_SETTING = - Setting.boolSetting("tribe.blocks.write", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("tribe.blocks.write", false, SettingsProperty.ClusterScope); public static final Setting> BLOCKS_WRITE_INDICES_SETTING = - Setting.listSetting("tribe.blocks.write.indices", Collections.emptyList(), Function.identity(), false, - SettingsProperty.ClusterScope); + Setting.listSetting("tribe.blocks.write.indices", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); public static final Setting> BLOCKS_READ_INDICES_SETTING = - Setting.listSetting("tribe.blocks.read.indices", Collections.emptyList(), Function.identity(), false, - SettingsProperty.ClusterScope); + Setting.listSetting("tribe.blocks.read.indices", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); public static final Setting> BLOCKS_METADATA_INDICES_SETTING = - Setting.listSetting("tribe.blocks.metadata.indices", Collections.emptyList(), Function.identity(), false, - SettingsProperty.ClusterScope); + Setting.listSetting("tribe.blocks.metadata.indices", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); public static final Set TRIBE_SETTING_KEYS = Sets.newHashSet(TRIBE_NAME_SETTING.getKey(), ON_CONFLICT_SETTING.getKey(), BLOCKS_METADATA_INDICES_SETTING.getKey(), BLOCKS_METADATA_SETTING.getKey(), BLOCKS_READ_INDICES_SETTING.getKey(), BLOCKS_WRITE_INDICES_SETTING.getKey(), BLOCKS_WRITE_SETTING.getKey()); diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java index 9efccd0afd2a..cdf4185c94dd 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java @@ -84,7 +84,7 @@ public class ClusterModuleTests extends ModuleTestCase { public void testRegisterClusterDynamicSetting() { SettingsModule module = new SettingsModule(Settings.EMPTY); - module.registerSetting(Setting.boolSetting("foo.bar", false, true, SettingsProperty.ClusterScope)); + module.registerSetting(Setting.boolSetting("foo.bar", false, SettingsProperty.Dynamic, SettingsProperty.ClusterScope)); assertInstanceBinding(module, ClusterSettings.class, service -> service.hasDynamicSetting("foo.bar")); } @@ -99,7 +99,7 @@ public class ClusterModuleTests extends ModuleTestCase { public void testRegisterIndexDynamicSetting() { SettingsModule module = new SettingsModule(Settings.EMPTY); - module.registerSetting(Setting.boolSetting("foo.bar", false, true, SettingsProperty.IndexScope)); + module.registerSetting(Setting.boolSetting("foo.bar", false, SettingsProperty.Dynamic, SettingsProperty.IndexScope)); assertInstanceBinding(module, IndexScopedSettings.class, service -> service.hasDynamicSetting("foo.bar")); } diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java index 9590d214d5be..811ddc7ae5a4 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java @@ -47,9 +47,9 @@ public class SettingsFilteringIT extends ESIntegTestCase { public static class SettingsFilteringPlugin extends Plugin { public static final Setting SOME_NODE_SETTING = - Setting.boolSetting("some.node.setting", false, false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.boolSetting("some.node.setting", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); public static final Setting SOME_OTHER_NODE_SETTING = - Setting.boolSetting("some.other.node.setting", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("some.other.node.setting", false, SettingsProperty.ClusterScope); /** * The name of the plugin. @@ -75,7 +75,7 @@ public class SettingsFilteringIT extends ESIntegTestCase { public void onModule(SettingsModule module) { module.registerSetting(SOME_NODE_SETTING); module.registerSetting(SOME_OTHER_NODE_SETTING); - module.registerSetting(Setting.groupSetting("index.filter_test.", false, SettingsProperty.IndexScope)); + module.registerSetting(Setting.groupSetting("index.filter_test.", SettingsProperty.IndexScope)); module.registerSettingsFilter("index.filter_test.foo"); module.registerSettingsFilter("index.filter_test.bar*"); } diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 7a2e424393bd..41367621f955 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -39,8 +39,8 @@ import java.util.concurrent.atomic.AtomicReference; public class ScopedSettingsTests extends ESTestCase { public void testAddConsumer() { - Setting testSetting = Setting.intSetting("foo.bar", 1, true, SettingsProperty.ClusterScope); - Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, true, SettingsProperty.ClusterScope); + Setting testSetting = Setting.intSetting("foo.bar", 1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY, Collections.singleton(testSetting)); AtomicInteger consumer = new AtomicInteger(); @@ -67,8 +67,8 @@ public class ScopedSettingsTests extends ESTestCase { } public void testApply() { - Setting testSetting = Setting.intSetting("foo.bar", 1, true, SettingsProperty.ClusterScope); - Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, true, SettingsProperty.ClusterScope); + Setting testSetting = Setting.intSetting("foo.bar", 1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(testSetting, testSetting2))); AtomicInteger consumer = new AtomicInteger(); @@ -139,8 +139,8 @@ public class ScopedSettingsTests extends ESTestCase { public void testIsDynamic(){ ClusterSettings settings = new ClusterSettings(Settings.EMPTY, - new HashSet<>(Arrays.asList(Setting.intSetting("foo.bar", 1, true, SettingsProperty.ClusterScope), - Setting.intSetting("foo.bar.baz", 1, false, SettingsProperty.ClusterScope)))); + new HashSet<>(Arrays.asList(Setting.intSetting("foo.bar", 1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope), + Setting.intSetting("foo.bar.baz", 1, SettingsProperty.ClusterScope)))); assertFalse(settings.hasDynamicSetting("foo.bar.baz")); assertTrue(settings.hasDynamicSetting("foo.bar")); assertNotNull(settings.get("foo.bar.baz")); @@ -151,8 +151,8 @@ public class ScopedSettingsTests extends ESTestCase { } public void testDiff() throws IOException { - Setting foobarbaz = Setting.intSetting("foo.bar.baz", 1, false, SettingsProperty.ClusterScope); - Setting foobar = Setting.intSetting("foo.bar", 1, true, SettingsProperty.ClusterScope); + Setting foobarbaz = Setting.intSetting("foo.bar.baz", 1, SettingsProperty.ClusterScope); + Setting foobar = Setting.intSetting("foo.bar", 1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); ClusterSettings settings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(foobar, foobarbaz))); Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.EMPTY); assertEquals(diff.getAsMap().size(), 1); @@ -241,22 +241,22 @@ public class ScopedSettingsTests extends ESTestCase { try { new IndexScopedSettings( - Settings.EMPTY, Collections.singleton(Setting.groupSetting("boo .", false, SettingsProperty.IndexScope))); + Settings.EMPTY, Collections.singleton(Setting.groupSetting("boo .", SettingsProperty.IndexScope))); fail(); } catch (IllegalArgumentException e) { assertEquals("illegal settings key: [boo .]", e.getMessage()); } new IndexScopedSettings( - Settings.EMPTY, Collections.singleton(Setting.groupSetting("boo.", false, SettingsProperty.IndexScope))); + Settings.EMPTY, Collections.singleton(Setting.groupSetting("boo.", SettingsProperty.IndexScope))); try { new IndexScopedSettings( - Settings.EMPTY, Collections.singleton(Setting.boolSetting("boo.", true, false, SettingsProperty.IndexScope))); + Settings.EMPTY, Collections.singleton(Setting.boolSetting("boo.", true, SettingsProperty.IndexScope))); fail(); } catch (IllegalArgumentException e) { assertEquals("illegal settings key: [boo.]", e.getMessage()); } new IndexScopedSettings( - Settings.EMPTY, Collections.singleton(Setting.boolSetting("boo", true, false, SettingsProperty.IndexScope))); + Settings.EMPTY, Collections.singleton(Setting.boolSetting("boo", true, SettingsProperty.IndexScope))); } public void testLoggingUpdates() { diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index ed9213d392af..b916783b316b 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -36,20 +36,20 @@ public class SettingTests extends ESTestCase { public void testGet() { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, SettingsProperty.ClusterScope); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); assertFalse(booleanSetting.get(Settings.EMPTY)); assertFalse(booleanSetting.get(Settings.builder().put("foo.bar", false).build())); - assertTrue(booleanSetting.get(Settings.builder().put("foo.bar", true).build())); + assertTrue(booleanSetting.get(Settings.builder().put("foo.bar", SettingsProperty.Dynamic).build())); } public void testByteSize() { Setting byteSizeValueSetting = - Setting.byteSizeSetting("a.byte.size", new ByteSizeValue(1024), true, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("a.byte.size", new ByteSizeValue(1024), SettingsProperty.Dynamic, SettingsProperty.ClusterScope); assertFalse(byteSizeValueSetting.isGroupSetting()); ByteSizeValue byteSizeValue = byteSizeValueSetting.get(Settings.EMPTY); assertEquals(byteSizeValue.bytes(), 1024); - byteSizeValueSetting = Setting.byteSizeSetting("a.byte.size", s -> "2048b", true, SettingsProperty.ClusterScope); + byteSizeValueSetting = Setting.byteSizeSetting("a.byte.size", s -> "2048b", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); byteSizeValue = byteSizeValueSetting.get(Settings.EMPTY); assertEquals(byteSizeValue.bytes(), 2048); @@ -68,7 +68,7 @@ public class SettingTests extends ESTestCase { } public void testSimpleUpdate() { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, SettingsProperty.ClusterScope); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); AtomicReference atomicBoolean = new AtomicReference<>(null); ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(atomicBoolean::set, logger); Settings build = Settings.builder().put("foo.bar", false).build(); @@ -89,7 +89,7 @@ public class SettingTests extends ESTestCase { } public void testUpdateNotDynamic() { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, false, SettingsProperty.ClusterScope); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, SettingsProperty.ClusterScope); assertFalse(booleanSetting.isGroupSetting()); AtomicReference atomicBoolean = new AtomicReference<>(null); try { @@ -101,7 +101,7 @@ public class SettingTests extends ESTestCase { } public void testUpdaterIsIsolated() { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, SettingsProperty.ClusterScope); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); AtomicReference ab1 = new AtomicReference<>(null); AtomicReference ab2 = new AtomicReference<>(null); ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(ab1::set, logger); @@ -113,7 +113,7 @@ public class SettingTests extends ESTestCase { public void testDefault() { TimeValue defautlValue = TimeValue.timeValueMillis(randomIntBetween(0, 1000000)); Setting setting = - Setting.positiveTimeSetting("my.time.value", defautlValue, randomBoolean(), SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("my.time.value", defautlValue, SettingsProperty.ClusterScope); assertFalse(setting.isGroupSetting()); String aDefault = setting.getDefaultRaw(Settings.EMPTY); assertEquals(defautlValue.millis() + "ms", aDefault); @@ -121,18 +121,19 @@ public class SettingTests extends ESTestCase { assertEquals(defautlValue, setting.getDefault(Settings.EMPTY)); Setting secondaryDefault = - new Setting<>("foo.bar", (s) -> s.get("old.foo.bar", "some_default"), (s) -> s, randomBoolean(), SettingsProperty.ClusterScope); + new Setting<>("foo.bar", (s) -> s.get("old.foo.bar", "some_default"), (s) -> s, SettingsProperty.ClusterScope); assertEquals("some_default", secondaryDefault.get(Settings.EMPTY)); assertEquals("42", secondaryDefault.get(Settings.builder().put("old.foo.bar", 42).build())); Setting secondaryDefaultViaSettings = - new Setting<>("foo.bar", secondaryDefault, (s) -> s, randomBoolean(), SettingsProperty.ClusterScope); + new Setting<>("foo.bar", secondaryDefault, (s) -> s, SettingsProperty.ClusterScope); assertEquals("some_default", secondaryDefaultViaSettings.get(Settings.EMPTY)); assertEquals("42", secondaryDefaultViaSettings.get(Settings.builder().put("old.foo.bar", 42).build())); } public void testComplexType() { AtomicReference ref = new AtomicReference<>(null); - Setting setting = new Setting<>("foo.bar", (s) -> "", (s) -> new ComplexType(s), true, SettingsProperty.ClusterScope); + Setting setting = new Setting<>("foo.bar", (s) -> "", (s) -> new ComplexType(s), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); assertFalse(setting.isGroupSetting()); ref.set(setting.get(Settings.EMPTY)); ComplexType type = ref.get(); @@ -153,11 +154,11 @@ public class SettingTests extends ESTestCase { } public void testType() { - Setting integerSetting = Setting.intSetting("foo.int.bar", 1, true, SettingsProperty.ClusterScope); + Setting integerSetting = Setting.intSetting("foo.int.bar", 1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); assertThat(integerSetting.hasClusterScope(), is(true)); assertThat(integerSetting.hasIndexScope(), is(false)); assertThat(integerSetting.hasNodeScope(), is(false)); - integerSetting = Setting.intSetting("foo.int.bar", 1, true, SettingsProperty.IndexScope); + integerSetting = Setting.intSetting("foo.int.bar", 1, SettingsProperty.Dynamic, SettingsProperty.IndexScope); assertThat(integerSetting.hasIndexScope(), is(true)); assertThat(integerSetting.hasClusterScope(), is(false)); assertThat(integerSetting.hasNodeScope(), is(false)); @@ -165,7 +166,7 @@ public class SettingTests extends ESTestCase { public void testGroups() { AtomicReference ref = new AtomicReference<>(null); - Setting setting = Setting.groupSetting("foo.bar.", true, SettingsProperty.ClusterScope); + Setting setting = Setting.groupSetting("foo.bar.", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); assertTrue(setting.isGroupSetting()); ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger); @@ -243,8 +244,8 @@ public class SettingTests extends ESTestCase { public void testComposite() { Composite c = new Composite(); - Setting a = Setting.intSetting("foo.int.bar.a", 1, true, SettingsProperty.ClusterScope); - Setting b = Setting.intSetting("foo.int.bar.b", 1, true, SettingsProperty.ClusterScope); + Setting a = Setting.intSetting("foo.int.bar.a", 1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting b = Setting.intSetting("foo.int.bar.b", 1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); ClusterSettings.SettingUpdater> settingUpdater = Setting.compoundUpdater(c::set, a, b, logger); assertFalse(settingUpdater.apply(Settings.EMPTY, Settings.EMPTY)); assertNull(c.a); @@ -272,7 +273,8 @@ public class SettingTests extends ESTestCase { } public void testListSettings() { - Setting> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(), true, SettingsProperty.ClusterScope); + Setting> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); List value = listSetting.get(Settings.EMPTY); assertEquals(1, value.size()); assertEquals("foo,bar", value.get(0)); @@ -311,7 +313,8 @@ public class SettingTests extends ESTestCase { assertEquals(1, ref.get().size()); assertEquals("foo,bar", ref.get().get(0)); - Setting> otherSettings = Setting.listSetting("foo.bar", Collections.emptyList(), Integer::parseInt, true, SettingsProperty.ClusterScope); + Setting> otherSettings = Setting.listSetting("foo.bar", Collections.emptyList(), Integer::parseInt, + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); List defaultValue = otherSettings.get(Settings.EMPTY); assertEquals(0, defaultValue.size()); List intValues = otherSettings.get(Settings.builder().put("foo.bar", "0,1,2,3").build()); @@ -320,7 +323,8 @@ public class SettingTests extends ESTestCase { assertEquals(i, intValues.get(i).intValue()); } - Setting> settingWithFallback = Setting.listSetting("foo.baz", listSetting, s -> s, true, SettingsProperty.ClusterScope); + Setting> settingWithFallback = Setting.listSetting("foo.baz", listSetting, s -> s, + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); value = settingWithFallback.get(Settings.EMPTY); assertEquals(1, value.size()); assertEquals("foo,bar", value.get(0)); @@ -342,7 +346,8 @@ public class SettingTests extends ESTestCase { } public void testListSettingAcceptsNumberSyntax() { - Setting> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(), true, SettingsProperty.ClusterScope); + Setting> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(), + SettingsProperty.Dynamic, SettingsProperty.ClusterScope); List input = Arrays.asList("test", "test1, test2", "test", ",,,,"); Settings.Builder builder = Settings.builder().putArray("foo.bar", input.toArray(new String[0])); // try to parse this really annoying format @@ -360,7 +365,7 @@ public class SettingTests extends ESTestCase { } public void testDynamicKeySetting() { - Setting setting = Setting.dynamicKeySetting("foo.", "false", Boolean::parseBoolean, false, SettingsProperty.ClusterScope); + Setting setting = Setting.dynamicKeySetting("foo.", "false", Boolean::parseBoolean, SettingsProperty.ClusterScope); assertTrue(setting.hasComplexMatcher()); assertTrue(setting.match("foo.bar")); assertFalse(setting.match("foo")); @@ -377,7 +382,7 @@ public class SettingTests extends ESTestCase { } public void testMinMaxInt() { - Setting integerSetting = Setting.intSetting("foo.bar", 1, 0, 10, false, SettingsProperty.ClusterScope); + Setting integerSetting = Setting.intSetting("foo.bar", 1, 0, 10, SettingsProperty.ClusterScope); try { integerSetting.get(Settings.builder().put("foo.bar", 11).build()); fail(); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java index 3cd2bb2d0214..f5fd760297ba 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java @@ -46,13 +46,13 @@ public class SettingsModuleTests extends ModuleTestCase { { Settings settings = Settings.builder().put("some.custom.setting", "2.0").build(); SettingsModule module = new SettingsModule(settings); - module.registerSetting(Setting.floatSetting("some.custom.setting", 1.0f, false, SettingsProperty.ClusterScope)); + module.registerSetting(Setting.floatSetting("some.custom.setting", 1.0f, SettingsProperty.ClusterScope)); assertInstanceBinding(module, Settings.class, (s) -> s == settings); } { Settings settings = Settings.builder().put("some.custom.setting", "false").build(); SettingsModule module = new SettingsModule(settings); - module.registerSetting(Setting.floatSetting("some.custom.setting", 1.0f, false, SettingsProperty.ClusterScope)); + module.registerSetting(Setting.floatSetting("some.custom.setting", 1.0f, SettingsProperty.ClusterScope)); try { assertInstanceBinding(module, Settings.class, (s) -> s == settings); fail(); @@ -132,9 +132,9 @@ public class SettingsModuleTests extends ModuleTestCase { public void testRegisterSettingsFilter() { Settings settings = Settings.builder().put("foo.bar", "false").put("bar.foo", false).put("bar.baz", false).build(); SettingsModule module = new SettingsModule(settings); - module.registerSetting(Setting.boolSetting("foo.bar", true, false, SettingsProperty.ClusterScope)); - module.registerSetting(Setting.boolSetting("bar.foo", true, false, SettingsProperty.ClusterScope, SettingsProperty.Filtered)); - module.registerSetting(Setting.boolSetting("bar.baz", true, false, SettingsProperty.ClusterScope)); + module.registerSetting(Setting.boolSetting("foo.bar", true, SettingsProperty.ClusterScope)); + module.registerSetting(Setting.boolSetting("bar.foo", true, SettingsProperty.ClusterScope, SettingsProperty.Filtered)); + module.registerSetting(Setting.boolSetting("bar.baz", true, SettingsProperty.ClusterScope)); module.registerSettingsFilter("foo.*"); try { diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 5a17caff67d8..7da2b0aaa2d5 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -195,9 +195,9 @@ public class IndexModuleTests extends ESTestCase { public void testListener() throws IOException { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, SettingsProperty.IndexScope); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, SettingsProperty.Dynamic, SettingsProperty.IndexScope); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings, booleanSetting), null, new AnalysisRegistry(null, environment)); - Setting booleanSetting2 = Setting.boolSetting("foo.bar.baz", false, true, SettingsProperty.IndexScope); + Setting booleanSetting2 = Setting.boolSetting("foo.bar.baz", false, SettingsProperty.Dynamic, SettingsProperty.IndexScope); AtomicBoolean atomicBoolean = new AtomicBoolean(false); module.addSettingsUpdateConsumer(booleanSetting, atomicBoolean::set); diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index ad8edee61bfb..18af7e13f7d3 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -45,7 +45,8 @@ public class IndexSettingsTests extends ESTestCase { Version version = VersionUtils.getPreviousVersion(); Settings theSettings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build(); final AtomicInteger integer = new AtomicInteger(0); - Setting integerSetting = Setting.intSetting("index.test.setting.int", -1, true, SettingsProperty.IndexScope); + Setting integerSetting = Setting.intSetting("index.test.setting.int", -1, + SettingsProperty.Dynamic, SettingsProperty.IndexScope); IndexMetaData metaData = newIndexMeta("index", theSettings); IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, integerSetting); settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, integer::set); @@ -66,8 +67,10 @@ public class IndexSettingsTests extends ESTestCase { .put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build(); final AtomicInteger integer = new AtomicInteger(0); final StringBuilder builder = new StringBuilder(); - Setting integerSetting = Setting.intSetting("index.test.setting.int", -1, true, SettingsProperty.IndexScope); - Setting notUpdated = new Setting<>("index.not.updated", "", Function.identity(), true, SettingsProperty.IndexScope); + Setting integerSetting = Setting.intSetting("index.test.setting.int", -1, + SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting notUpdated = new Setting<>("index.not.updated", "", Function.identity(), + SettingsProperty.Dynamic, SettingsProperty.IndexScope); IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, integerSetting, notUpdated); settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, integer::set); @@ -129,7 +132,7 @@ public class IndexSettingsTests extends ESTestCase { Settings nodeSettings = Settings.settingsBuilder().put("index.foo.bar", 43).build(); final AtomicInteger indexValue = new AtomicInteger(0); - Setting integerSetting = Setting.intSetting("index.foo.bar", -1, true, SettingsProperty.IndexScope); + Setting integerSetting = Setting.intSetting("index.foo.bar", -1, SettingsProperty.Dynamic, SettingsProperty.IndexScope); IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), nodeSettings, integerSetting); settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, indexValue::set); assertEquals(numReplicas, settings.getNumberOfReplicas()); diff --git a/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java b/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java index 69f5316d4d0b..666994e823a8 100644 --- a/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java +++ b/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java @@ -44,7 +44,8 @@ public class SettingsListenerIT extends ESIntegTestCase { public static class SettingsListenerPlugin extends Plugin { private final SettingsTestingService service = new SettingsTestingService(); - private static final Setting SETTING = Setting.intSetting("index.test.new.setting", 0, true, SettingsProperty.IndexScope); + private static final Setting SETTING = Setting.intSetting("index.test.new.setting", 0, + SettingsProperty.Dynamic, SettingsProperty.IndexScope); /** * The name of the plugin. */ @@ -94,7 +95,8 @@ public class SettingsListenerIT extends ESIntegTestCase { public static class SettingsTestingService { public volatile int value; - public static Setting VALUE = Setting.intSetting("index.test.new.setting", -1, -1, true, SettingsProperty.IndexScope); + public static Setting VALUE = Setting.intSetting("index.test.new.setting", -1, -1, + SettingsProperty.Dynamic, SettingsProperty.IndexScope); public void setValue(int value) { this.value = value; diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index 8dee6712833e..23c9fb43715a 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -644,11 +644,11 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { } private static final Setting INDEX_A = - new Setting<>("index.a", "", Function.identity(), true, SettingsProperty.IndexScope); + new Setting<>("index.a", "", Function.identity(), SettingsProperty.Dynamic, SettingsProperty.IndexScope); private static final Setting INDEX_C = - new Setting<>("index.c", "", Function.identity(), true, SettingsProperty.IndexScope); + new Setting<>("index.c", "", Function.identity(), SettingsProperty.Dynamic, SettingsProperty.IndexScope); private static final Setting INDEX_E = - new Setting<>("index.e", "", Function.identity(), false, SettingsProperty.IndexScope); + new Setting<>("index.e", "", Function.identity(), SettingsProperty.IndexScope); public void onModule(SettingsModule module) { diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index d1ad28101ff6..e025246a2f0a 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -199,9 +199,9 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase { public static class RandomExceptionDirectoryReaderWrapper extends MockEngineSupport.DirectoryReaderWrapper { public static final Setting EXCEPTION_TOP_LEVEL_RATIO_SETTING = - Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, SettingsProperty.IndexScope); + Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, SettingsProperty.IndexScope); public static final Setting EXCEPTION_LOW_LEVEL_RATIO_SETTING = - Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, SettingsProperty.IndexScope); + Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, SettingsProperty.IndexScope); public static class TestPlugin extends Plugin { @Override public String name() { diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java index ee260a51b042..f53ca39941a4 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java @@ -154,9 +154,9 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase { public static class TestPlugin extends Plugin { public static final Setting EXCEPTION_TOP_LEVEL_RATIO_SETTING = - Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, SettingsProperty.IndexScope); + Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, SettingsProperty.IndexScope); public static final Setting EXCEPTION_LOW_LEVEL_RATIO_SETTING = - Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, SettingsProperty.IndexScope); + Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, SettingsProperty.IndexScope); @Override public String name() { return "random-exception-reader-wrapper"; diff --git a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java index 02fd27a952d2..8669a38087ee 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java +++ b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java @@ -60,10 +60,9 @@ public class MockRepository extends FsRepository { public static class Plugin extends org.elasticsearch.plugins.Plugin { - public static final Setting USERNAME_SETTING = - Setting.simpleString("secret.mock.username", false, SettingsProperty.ClusterScope); + public static final Setting USERNAME_SETTING = Setting.simpleString("secret.mock.username", SettingsProperty.ClusterScope); public static final Setting PASSWORD_SETTING = - Setting.simpleString("secret.mock.password", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("secret.mock.password", SettingsProperty.ClusterScope, SettingsProperty.Filtered); @Override public String name() { diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java index 0a6a752908f1..a59510b8873c 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java @@ -30,29 +30,28 @@ public interface AzureComputeService { final class Management { public static final Setting SUBSCRIPTION_ID_SETTING = - Setting.simpleString("cloud.azure.management.subscription.id", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("cloud.azure.management.subscription.id", SettingsProperty.ClusterScope, SettingsProperty.Filtered); public static final Setting SERVICE_NAME_SETTING = - Setting.simpleString("cloud.azure.management.cloud.service.name", false, SettingsProperty.ClusterScope); + Setting.simpleString("cloud.azure.management.cloud.service.name", SettingsProperty.ClusterScope); // Keystore settings public static final Setting KEYSTORE_PATH_SETTING = - Setting.simpleString("cloud.azure.management.keystore.path", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("cloud.azure.management.keystore.path", SettingsProperty.ClusterScope, SettingsProperty.Filtered); public static final Setting KEYSTORE_PASSWORD_SETTING = - Setting.simpleString("cloud.azure.management.keystore.password", false, SettingsProperty.ClusterScope, + Setting.simpleString("cloud.azure.management.keystore.password", SettingsProperty.ClusterScope, SettingsProperty.Filtered); public static final Setting KEYSTORE_TYPE_SETTING = - new Setting<>("cloud.azure.management.keystore.type", KeyStoreType.pkcs12.name(), KeyStoreType::fromString, false, + new Setting<>("cloud.azure.management.keystore.type", KeyStoreType.pkcs12.name(), KeyStoreType::fromString, SettingsProperty.ClusterScope, SettingsProperty.Filtered); } final class Discovery { public static final Setting REFRESH_SETTING = - Setting.positiveTimeSetting("discovery.azure.refresh_interval", TimeValue.timeValueSeconds(0), false, - SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("discovery.azure.refresh_interval", TimeValue.timeValueSeconds(0), SettingsProperty.ClusterScope); public static final Setting HOST_TYPE_SETTING = new Setting<>("discovery.azure.host.type", AzureUnicastHostsProvider.HostType.PRIVATE_IP.name(), - AzureUnicastHostsProvider.HostType::fromString, false, SettingsProperty.ClusterScope); + AzureUnicastHostsProvider.HostType::fromString, SettingsProperty.ClusterScope); public static final String ENDPOINT_NAME = "discovery.azure.endpoint.name"; public static final String DEPLOYMENT_NAME = "discovery.azure.deployment.name"; diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java index b88704e18db6..2f2c423afb78 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java @@ -33,8 +33,7 @@ import java.util.Locale; import java.util.function.Function; public interface AwsEc2Service { - Setting AUTO_ATTRIBUTE_SETTING = - Setting.boolSetting("cloud.node.auto_attributes", false, false, SettingsProperty.ClusterScope); + Setting AUTO_ATTRIBUTE_SETTING = Setting.boolSetting("cloud.node.auto_attributes", false, SettingsProperty.ClusterScope); // Global AWS settings (shared between discovery-ec2 and repository-s3) // Each setting starting with `cloud.aws` also exists in repository-s3 project. Don't forget to update @@ -43,43 +42,43 @@ public interface AwsEc2Service { * cloud.aws.access_key: AWS Access key. Shared with repository-s3 plugin */ Setting KEY_SETTING = - Setting.simpleString("cloud.aws.access_key", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("cloud.aws.access_key", SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.secret_key: AWS Secret key. Shared with repository-s3 plugin */ Setting SECRET_SETTING = - Setting.simpleString("cloud.aws.secret_key", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("cloud.aws.secret_key", SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.protocol: Protocol for AWS API: http or https. Defaults to https. Shared with repository-s3 plugin */ Setting PROTOCOL_SETTING = new Setting<>("cloud.aws.protocol", "https", s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), - false, SettingsProperty.ClusterScope); + SettingsProperty.ClusterScope); /** * cloud.aws.proxy.host: In case of proxy, define its hostname/IP. Shared with repository-s3 plugin */ - Setting PROXY_HOST_SETTING = Setting.simpleString("cloud.aws.proxy.host", false, SettingsProperty.ClusterScope); + Setting PROXY_HOST_SETTING = Setting.simpleString("cloud.aws.proxy.host", SettingsProperty.ClusterScope); /** * cloud.aws.proxy.port: In case of proxy, define its port. Defaults to 80. Shared with repository-s3 plugin */ - Setting PROXY_PORT_SETTING = Setting.intSetting("cloud.aws.proxy.port", 80, 0, 1<<16, false, SettingsProperty.ClusterScope); + Setting PROXY_PORT_SETTING = Setting.intSetting("cloud.aws.proxy.port", 80, 0, 1<<16, SettingsProperty.ClusterScope); /** * cloud.aws.proxy.username: In case of proxy with auth, define the username. Shared with repository-s3 plugin */ - Setting PROXY_USERNAME_SETTING = Setting.simpleString("cloud.aws.proxy.username", false, SettingsProperty.ClusterScope); + Setting PROXY_USERNAME_SETTING = Setting.simpleString("cloud.aws.proxy.username", SettingsProperty.ClusterScope); /** * cloud.aws.proxy.password: In case of proxy with auth, define the password. Shared with repository-s3 plugin */ Setting PROXY_PASSWORD_SETTING = - Setting.simpleString("cloud.aws.proxy.password", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("cloud.aws.proxy.password", SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.signer: If you are using an old AWS API version, you can define a Signer. Shared with repository-s3 plugin */ - Setting SIGNER_SETTING = Setting.simpleString("cloud.aws.signer", false, SettingsProperty.ClusterScope); + Setting SIGNER_SETTING = Setting.simpleString("cloud.aws.signer", SettingsProperty.ClusterScope); /** * cloud.aws.region: Region. Shared with repository-s3 plugin */ Setting REGION_SETTING = - new Setting<>("cloud.aws.region", "", s -> s.toLowerCase(Locale.ROOT), false, SettingsProperty.ClusterScope); + new Setting<>("cloud.aws.region", "", s -> s.toLowerCase(Locale.ROOT), SettingsProperty.ClusterScope); /** * Defines specific ec2 settings starting with cloud.aws.ec2. @@ -89,63 +88,63 @@ public interface AwsEc2Service { * cloud.aws.ec2.access_key: AWS Access key specific for EC2 API calls. Defaults to cloud.aws.access_key. * @see AwsEc2Service#KEY_SETTING */ - Setting KEY_SETTING = new Setting<>("cloud.aws.ec2.access_key", AwsEc2Service.KEY_SETTING, Function.identity(), false, + Setting KEY_SETTING = new Setting<>("cloud.aws.ec2.access_key", AwsEc2Service.KEY_SETTING, Function.identity(), SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.ec2.secret_key: AWS Secret key specific for EC2 API calls. Defaults to cloud.aws.secret_key. * @see AwsEc2Service#SECRET_SETTING */ - Setting SECRET_SETTING = new Setting<>("cloud.aws.ec2.secret_key", AwsEc2Service.SECRET_SETTING, Function.identity(), false, + Setting SECRET_SETTING = new Setting<>("cloud.aws.ec2.secret_key", AwsEc2Service.SECRET_SETTING, Function.identity(), SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.ec2.protocol: Protocol for AWS API specific for EC2 API calls: http or https. Defaults to cloud.aws.protocol. * @see AwsEc2Service#PROTOCOL_SETTING */ Setting PROTOCOL_SETTING = new Setting<>("cloud.aws.ec2.protocol", AwsEc2Service.PROTOCOL_SETTING, - s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), false, SettingsProperty.ClusterScope); + s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), SettingsProperty.ClusterScope); /** * cloud.aws.ec2.proxy.host: In case of proxy, define its hostname/IP specific for EC2 API calls. Defaults to cloud.aws.proxy.host. * @see AwsEc2Service#PROXY_HOST_SETTING */ Setting PROXY_HOST_SETTING = new Setting<>("cloud.aws.ec2.proxy.host", AwsEc2Service.PROXY_HOST_SETTING, - Function.identity(), false, SettingsProperty.ClusterScope); + Function.identity(), SettingsProperty.ClusterScope); /** * cloud.aws.ec2.proxy.port: In case of proxy, define its port specific for EC2 API calls. Defaults to cloud.aws.proxy.port. * @see AwsEc2Service#PROXY_PORT_SETTING */ Setting PROXY_PORT_SETTING = new Setting<>("cloud.aws.ec2.proxy.port", AwsEc2Service.PROXY_PORT_SETTING, - s -> Setting.parseInt(s, 0, 1<<16, "cloud.aws.ec2.proxy.port"), false, SettingsProperty.ClusterScope); + s -> Setting.parseInt(s, 0, 1<<16, "cloud.aws.ec2.proxy.port"), SettingsProperty.ClusterScope); /** * cloud.aws.ec2.proxy.username: In case of proxy with auth, define the username specific for EC2 API calls. * Defaults to cloud.aws.proxy.username. * @see AwsEc2Service#PROXY_USERNAME_SETTING */ Setting PROXY_USERNAME_SETTING = new Setting<>("cloud.aws.ec2.proxy.username", AwsEc2Service.PROXY_USERNAME_SETTING, - Function.identity(), false, SettingsProperty.ClusterScope); + Function.identity(), SettingsProperty.ClusterScope); /** * cloud.aws.ec2.proxy.password: In case of proxy with auth, define the password specific for EC2 API calls. * Defaults to cloud.aws.proxy.password. * @see AwsEc2Service#PROXY_PASSWORD_SETTING */ Setting PROXY_PASSWORD_SETTING = new Setting<>("cloud.aws.ec2.proxy.password", AwsEc2Service.PROXY_PASSWORD_SETTING, - Function.identity(), false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Function.identity(), SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.ec2.signer: If you are using an old AWS API version, you can define a Signer. Specific for EC2 API calls. * Defaults to cloud.aws.signer. * @see AwsEc2Service#SIGNER_SETTING */ Setting SIGNER_SETTING = new Setting<>("cloud.aws.ec2.signer", AwsEc2Service.SIGNER_SETTING, Function.identity(), - false, SettingsProperty.ClusterScope); + SettingsProperty.ClusterScope); /** * cloud.aws.ec2.region: Region specific for EC2 API calls. Defaults to cloud.aws.region. * @see AwsEc2Service#REGION_SETTING */ Setting REGION_SETTING = new Setting<>("cloud.aws.ec2.region", AwsEc2Service.REGION_SETTING, - s -> s.toLowerCase(Locale.ROOT), false, SettingsProperty.ClusterScope); + s -> s.toLowerCase(Locale.ROOT), SettingsProperty.ClusterScope); /** * cloud.aws.ec2.endpoint: Endpoint. If not set, endpoint will be guessed based on region setting. */ - Setting ENDPOINT_SETTING = Setting.simpleString("cloud.aws.ec2.endpoint", false, SettingsProperty.ClusterScope); + Setting ENDPOINT_SETTING = Setting.simpleString("cloud.aws.ec2.endpoint", SettingsProperty.ClusterScope); } /** @@ -164,32 +163,32 @@ public interface AwsEc2Service { * Can be one of private_ip, public_ip, private_dns, public_dns. Defaults to private_ip. */ Setting HOST_TYPE_SETTING = - new Setting<>("discovery.ec2.host_type", HostType.PRIVATE_IP.name(), s -> HostType.valueOf(s.toUpperCase(Locale.ROOT)), false, + new Setting<>("discovery.ec2.host_type", HostType.PRIVATE_IP.name(), s -> HostType.valueOf(s.toUpperCase(Locale.ROOT)), SettingsProperty.ClusterScope); /** * discovery.ec2.any_group: If set to false, will require all security groups to be present for the instance to be used for the * discovery. Defaults to true. */ Setting ANY_GROUP_SETTING = - Setting.boolSetting("discovery.ec2.any_group", true, false, SettingsProperty.ClusterScope); + Setting.boolSetting("discovery.ec2.any_group", true, SettingsProperty.ClusterScope); /** * discovery.ec2.groups: Either a comma separated list or array based list of (security) groups. Only instances with the provided * security groups will be used in the cluster discovery. (NOTE: You could provide either group NAME or group ID.) */ Setting> GROUPS_SETTING = - Setting.listSetting("discovery.ec2.groups", new ArrayList<>(), s -> s.toString(), false, SettingsProperty.ClusterScope); + Setting.listSetting("discovery.ec2.groups", new ArrayList<>(), s -> s.toString(), SettingsProperty.ClusterScope); /** * discovery.ec2.availability_zones: Either a comma separated list or array based list of availability zones. Only instances within * the provided availability zones will be used in the cluster discovery. */ Setting> AVAILABILITY_ZONES_SETTING = - Setting.listSetting("discovery.ec2.availability_zones", Collections.emptyList(), s -> s.toString(), false, + Setting.listSetting("discovery.ec2.availability_zones", Collections.emptyList(), s -> s.toString(), SettingsProperty.ClusterScope); /** * discovery.ec2.node_cache_time: How long the list of hosts is cached to prevent further requests to the AWS API. Defaults to 10s. */ Setting NODE_CACHE_TIME_SETTING = - Setting.timeSetting("discovery.ec2.node_cache_time", TimeValue.timeValueSeconds(10), false, SettingsProperty.ClusterScope); + Setting.timeSetting("discovery.ec2.node_cache_time", TimeValue.timeValueSeconds(10), SettingsProperty.ClusterScope); /** * discovery.ec2.tag.*: The ec2 discovery can filter machines to include in the cluster based on tags (and not just groups). @@ -197,7 +196,7 @@ public interface AwsEc2Service { * instances with a tag key set to stage, and a value of dev. Several tags set will require all of those tags to be set for the * instance to be included. */ - Setting TAG_SETTING = Setting.groupSetting("discovery.ec2.tag.", false, SettingsProperty.ClusterScope); + Setting TAG_SETTING = Setting.groupSetting("discovery.ec2.tag.", SettingsProperty.ClusterScope); } AmazonEC2 client(); diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java index 53e0c10d058f..66d8365476b4 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java @@ -73,11 +73,11 @@ public class AttachmentMapper extends FieldMapper { private static ESLogger logger = ESLoggerFactory.getLogger("mapper.attachment"); public static final Setting INDEX_ATTACHMENT_IGNORE_ERRORS_SETTING = - Setting.boolSetting("index.mapping.attachment.ignore_errors", true, false, SettingsProperty.IndexScope); + Setting.boolSetting("index.mapping.attachment.ignore_errors", true, SettingsProperty.IndexScope); public static final Setting INDEX_ATTACHMENT_DETECT_LANGUAGE_SETTING = - Setting.boolSetting("index.mapping.attachment.detect_language", false, false, SettingsProperty.IndexScope); + Setting.boolSetting("index.mapping.attachment.detect_language", false, SettingsProperty.IndexScope); public static final Setting INDEX_ATTACHMENT_INDEXED_CHARS_SETTING = - Setting.intSetting("index.mapping.attachment.indexed_chars", 100000, false, SettingsProperty.IndexScope); + Setting.intSetting("index.mapping.attachment.indexed_chars", 100000, SettingsProperty.IndexScope); public static final String CONTENT_TYPE = "attachment"; diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java index f16e9b6729cf..197bde69a1ce 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java @@ -42,19 +42,19 @@ public interface AzureStorageService { final class Storage { public static final String PREFIX = "cloud.azure.storage."; public static final Setting TIMEOUT_SETTING = - Setting.timeSetting("cloud.azure.storage.timeout", TimeValue.timeValueMinutes(5), false, SettingsProperty.ClusterScope); + Setting.timeSetting("cloud.azure.storage.timeout", TimeValue.timeValueMinutes(5), SettingsProperty.ClusterScope); public static final Setting ACCOUNT_SETTING = - Setting.simpleString("repositories.azure.account", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("repositories.azure.account", SettingsProperty.ClusterScope, SettingsProperty.Filtered); public static final Setting CONTAINER_SETTING = - Setting.simpleString("repositories.azure.container", false, SettingsProperty.ClusterScope); + Setting.simpleString("repositories.azure.container", SettingsProperty.ClusterScope); public static final Setting BASE_PATH_SETTING = - Setting.simpleString("repositories.azure.base_path", false, SettingsProperty.ClusterScope); + Setting.simpleString("repositories.azure.base_path", SettingsProperty.ClusterScope); public static final Setting LOCATION_MODE_SETTING = - Setting.simpleString("repositories.azure.location_mode", false, SettingsProperty.ClusterScope); + Setting.simpleString("repositories.azure.location_mode", SettingsProperty.ClusterScope); public static final Setting CHUNK_SIZE_SETTING = - Setting.byteSizeSetting("repositories.azure.chunk_size", new ByteSizeValue(-1), false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("repositories.azure.chunk_size", new ByteSizeValue(-1), SettingsProperty.ClusterScope); public static final Setting COMPRESS_SETTING = - Setting.boolSetting("repositories.azure.compress", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("repositories.azure.compress", false, SettingsProperty.ClusterScope); } boolean doesContainerExist(String account, LocationMode mode, String container); diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index 013007a84a7f..56b2d9fc2536 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -68,17 +68,14 @@ public class AzureRepository extends BlobStoreRepository { public final static String TYPE = "azure"; public static final class Repository { - public static final Setting ACCOUNT_SETTING = - Setting.simpleString("account", false, SettingsProperty.ClusterScope); + public static final Setting ACCOUNT_SETTING = Setting.simpleString("account", SettingsProperty.ClusterScope); public static final Setting CONTAINER_SETTING = - new Setting<>("container", "elasticsearch-snapshots", Function.identity(), false, SettingsProperty.ClusterScope); - public static final Setting BASE_PATH_SETTING = Setting.simpleString("base_path", false, SettingsProperty.ClusterScope); - public static final Setting LOCATION_MODE_SETTING = - Setting.simpleString("location_mode", false, SettingsProperty.ClusterScope); + new Setting<>("container", "elasticsearch-snapshots", Function.identity(), SettingsProperty.ClusterScope); + public static final Setting BASE_PATH_SETTING = Setting.simpleString("base_path", SettingsProperty.ClusterScope); + public static final Setting LOCATION_MODE_SETTING = Setting.simpleString("location_mode", SettingsProperty.ClusterScope); public static final Setting CHUNK_SIZE_SETTING = - Setting.byteSizeSetting("chunk_size", MAX_CHUNK_SIZE, false, SettingsProperty.ClusterScope); - public static final Setting COMPRESS_SETTING = - Setting.boolSetting("compress", false, false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("chunk_size", MAX_CHUNK_SIZE, SettingsProperty.ClusterScope); + public static final Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, SettingsProperty.ClusterScope); } private final AzureBlobStore blobStore; diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java index 22bc136523ba..6f18bd3e6fd3 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java @@ -40,43 +40,43 @@ public interface AwsS3Service extends LifecycleComponent { * cloud.aws.access_key: AWS Access key. Shared with discovery-ec2 plugin */ Setting KEY_SETTING = - Setting.simpleString("cloud.aws.access_key", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("cloud.aws.access_key", SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.secret_key: AWS Secret key. Shared with discovery-ec2 plugin */ Setting SECRET_SETTING = - Setting.simpleString("cloud.aws.secret_key", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("cloud.aws.secret_key", SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.protocol: Protocol for AWS API: http or https. Defaults to https. Shared with discovery-ec2 plugin */ Setting PROTOCOL_SETTING = new Setting<>("cloud.aws.protocol", "https", s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), - false, SettingsProperty.ClusterScope); + SettingsProperty.ClusterScope); /** * cloud.aws.proxy.host: In case of proxy, define its hostname/IP. Shared with discovery-ec2 plugin */ - Setting PROXY_HOST_SETTING = Setting.simpleString("cloud.aws.proxy.host", false, SettingsProperty.ClusterScope); + Setting PROXY_HOST_SETTING = Setting.simpleString("cloud.aws.proxy.host", SettingsProperty.ClusterScope); /** * cloud.aws.proxy.port: In case of proxy, define its port. Defaults to 80. Shared with discovery-ec2 plugin */ - Setting PROXY_PORT_SETTING = Setting.intSetting("cloud.aws.proxy.port", 80, 0, 1<<16, false, SettingsProperty.ClusterScope); + Setting PROXY_PORT_SETTING = Setting.intSetting("cloud.aws.proxy.port", 80, 0, 1<<16, SettingsProperty.ClusterScope); /** * cloud.aws.proxy.username: In case of proxy with auth, define the username. Shared with discovery-ec2 plugin */ - Setting PROXY_USERNAME_SETTING = Setting.simpleString("cloud.aws.proxy.username", false, SettingsProperty.ClusterScope); + Setting PROXY_USERNAME_SETTING = Setting.simpleString("cloud.aws.proxy.username", SettingsProperty.ClusterScope); /** * cloud.aws.proxy.password: In case of proxy with auth, define the password. Shared with discovery-ec2 plugin */ Setting PROXY_PASSWORD_SETTING = - Setting.simpleString("cloud.aws.proxy.password", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("cloud.aws.proxy.password", SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.signer: If you are using an old AWS API version, you can define a Signer. Shared with discovery-ec2 plugin */ - Setting SIGNER_SETTING = Setting.simpleString("cloud.aws.signer", false, SettingsProperty.ClusterScope); + Setting SIGNER_SETTING = Setting.simpleString("cloud.aws.signer", SettingsProperty.ClusterScope); /** * cloud.aws.region: Region. Shared with discovery-ec2 plugin */ Setting REGION_SETTING = - new Setting<>("cloud.aws.region", "", s -> s.toLowerCase(Locale.ROOT), false, SettingsProperty.ClusterScope); + new Setting<>("cloud.aws.region", "", s -> s.toLowerCase(Locale.ROOT), SettingsProperty.ClusterScope); /** * Defines specific s3 settings starting with cloud.aws.s3. @@ -87,28 +87,28 @@ public interface AwsS3Service extends LifecycleComponent { * @see AwsS3Service#KEY_SETTING */ Setting KEY_SETTING = - new Setting<>("cloud.aws.s3.access_key", AwsS3Service.KEY_SETTING, Function.identity(), false, + new Setting<>("cloud.aws.s3.access_key", AwsS3Service.KEY_SETTING, Function.identity(), SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.s3.secret_key: AWS Secret key specific for S3 API calls. Defaults to cloud.aws.secret_key. * @see AwsS3Service#SECRET_SETTING */ Setting SECRET_SETTING = - new Setting<>("cloud.aws.s3.secret_key", AwsS3Service.SECRET_SETTING, Function.identity(), false, + new Setting<>("cloud.aws.s3.secret_key", AwsS3Service.SECRET_SETTING, Function.identity(), SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.s3.protocol: Protocol for AWS API specific for S3 API calls: http or https. Defaults to cloud.aws.protocol. * @see AwsS3Service#PROTOCOL_SETTING */ Setting PROTOCOL_SETTING = - new Setting<>("cloud.aws.s3.protocol", AwsS3Service.PROTOCOL_SETTING, s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), false, + new Setting<>("cloud.aws.s3.protocol", AwsS3Service.PROTOCOL_SETTING, s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), SettingsProperty.ClusterScope); /** * cloud.aws.s3.proxy.host: In case of proxy, define its hostname/IP specific for S3 API calls. Defaults to cloud.aws.proxy.host. * @see AwsS3Service#PROXY_HOST_SETTING */ Setting PROXY_HOST_SETTING = - new Setting<>("cloud.aws.s3.proxy.host", AwsS3Service.PROXY_HOST_SETTING, Function.identity(), false, + new Setting<>("cloud.aws.s3.proxy.host", AwsS3Service.PROXY_HOST_SETTING, Function.identity(), SettingsProperty.ClusterScope); /** * cloud.aws.s3.proxy.port: In case of proxy, define its port specific for S3 API calls. Defaults to cloud.aws.proxy.port. @@ -116,14 +116,14 @@ public interface AwsS3Service extends LifecycleComponent { */ Setting PROXY_PORT_SETTING = new Setting<>("cloud.aws.s3.proxy.port", AwsS3Service.PROXY_PORT_SETTING, - s -> Setting.parseInt(s, 0, 1<<16, "cloud.aws.s3.proxy.port"), false, SettingsProperty.ClusterScope); + s -> Setting.parseInt(s, 0, 1<<16, "cloud.aws.s3.proxy.port"), SettingsProperty.ClusterScope); /** * cloud.aws.s3.proxy.username: In case of proxy with auth, define the username specific for S3 API calls. * Defaults to cloud.aws.proxy.username. * @see AwsS3Service#PROXY_USERNAME_SETTING */ Setting PROXY_USERNAME_SETTING = - new Setting<>("cloud.aws.s3.proxy.username", AwsS3Service.PROXY_USERNAME_SETTING, Function.identity(), false, + new Setting<>("cloud.aws.s3.proxy.username", AwsS3Service.PROXY_USERNAME_SETTING, Function.identity(), SettingsProperty.ClusterScope); /** * cloud.aws.s3.proxy.password: In case of proxy with auth, define the password specific for S3 API calls. @@ -131,7 +131,7 @@ public interface AwsS3Service extends LifecycleComponent { * @see AwsS3Service#PROXY_PASSWORD_SETTING */ Setting PROXY_PASSWORD_SETTING = - new Setting<>("cloud.aws.s3.proxy.password", AwsS3Service.PROXY_PASSWORD_SETTING, Function.identity(), false, + new Setting<>("cloud.aws.s3.proxy.password", AwsS3Service.PROXY_PASSWORD_SETTING, Function.identity(), SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * cloud.aws.s3.signer: If you are using an old AWS API version, you can define a Signer. Specific for S3 API calls. @@ -139,19 +139,18 @@ public interface AwsS3Service extends LifecycleComponent { * @see AwsS3Service#SIGNER_SETTING */ Setting SIGNER_SETTING = - new Setting<>("cloud.aws.s3.signer", AwsS3Service.SIGNER_SETTING, Function.identity(), false, SettingsProperty.ClusterScope); + new Setting<>("cloud.aws.s3.signer", AwsS3Service.SIGNER_SETTING, Function.identity(), SettingsProperty.ClusterScope); /** * cloud.aws.s3.region: Region specific for S3 API calls. Defaults to cloud.aws.region. * @see AwsS3Service#REGION_SETTING */ Setting REGION_SETTING = - new Setting<>("cloud.aws.s3.region", AwsS3Service.REGION_SETTING, s -> s.toLowerCase(Locale.ROOT), false, + new Setting<>("cloud.aws.s3.region", AwsS3Service.REGION_SETTING, s -> s.toLowerCase(Locale.ROOT), SettingsProperty.ClusterScope); /** * cloud.aws.s3.endpoint: Endpoint. If not set, endpoint will be guessed based on region setting. */ - Setting ENDPOINT_SETTING = - Setting.simpleString("cloud.aws.s3.endpoint", false, SettingsProperty.ClusterScope); + Setting ENDPOINT_SETTING = Setting.simpleString("cloud.aws.s3.endpoint", SettingsProperty.ClusterScope); } AmazonS3 client(String endpoint, Protocol protocol, String region, String account, String key, Integer maxRetries); diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index 3fdc8a487aab..dc0915cd2769 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -67,41 +67,41 @@ public class S3Repository extends BlobStoreRepository { * @see CLOUD_S3#KEY_SETTING */ Setting KEY_SETTING = - new Setting<>("repositories.s3.access_key", CLOUD_S3.KEY_SETTING, Function.identity(), false, SettingsProperty.ClusterScope); + new Setting<>("repositories.s3.access_key", CLOUD_S3.KEY_SETTING, Function.identity(), SettingsProperty.ClusterScope); /** * repositories.s3.secret_key: AWS Secret key specific for all S3 Repositories API calls. Defaults to cloud.aws.s3.secret_key. * @see CLOUD_S3#SECRET_SETTING */ Setting SECRET_SETTING = - new Setting<>("repositories.s3.secret_key", CLOUD_S3.SECRET_SETTING, Function.identity(), false, SettingsProperty.ClusterScope); + new Setting<>("repositories.s3.secret_key", CLOUD_S3.SECRET_SETTING, Function.identity(), SettingsProperty.ClusterScope); /** * repositories.s3.region: Region specific for all S3 Repositories API calls. Defaults to cloud.aws.s3.region. * @see CLOUD_S3#REGION_SETTING */ Setting REGION_SETTING = - new Setting<>("repositories.s3.region", CLOUD_S3.REGION_SETTING, s -> s.toLowerCase(Locale.ROOT), false, SettingsProperty.ClusterScope); + new Setting<>("repositories.s3.region", CLOUD_S3.REGION_SETTING, s -> s.toLowerCase(Locale.ROOT), SettingsProperty.ClusterScope); /** * repositories.s3.endpoint: Endpoint specific for all S3 Repositories API calls. Defaults to cloud.aws.s3.endpoint. * @see CLOUD_S3#ENDPOINT_SETTING */ Setting ENDPOINT_SETTING = - new Setting<>("repositories.s3.endpoint", CLOUD_S3.ENDPOINT_SETTING, s -> s.toLowerCase(Locale.ROOT), false, SettingsProperty.ClusterScope); + new Setting<>("repositories.s3.endpoint", CLOUD_S3.ENDPOINT_SETTING, s -> s.toLowerCase(Locale.ROOT), SettingsProperty.ClusterScope); /** * repositories.s3.protocol: Protocol specific for all S3 Repositories API calls. Defaults to cloud.aws.s3.protocol. * @see CLOUD_S3#PROTOCOL_SETTING */ Setting PROTOCOL_SETTING = - new Setting<>("repositories.s3.protocol", CLOUD_S3.PROTOCOL_SETTING, s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), false, SettingsProperty.ClusterScope); + new Setting<>("repositories.s3.protocol", CLOUD_S3.PROTOCOL_SETTING, s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), SettingsProperty.ClusterScope); /** * repositories.s3.bucket: The name of the bucket to be used for snapshots. */ - Setting BUCKET_SETTING = Setting.simpleString("repositories.s3.bucket", false, SettingsProperty.ClusterScope); + Setting BUCKET_SETTING = Setting.simpleString("repositories.s3.bucket", SettingsProperty.ClusterScope); /** * repositories.s3.server_side_encryption: When set to true files are encrypted on server side using AES256 algorithm. * Defaults to false. */ Setting SERVER_SIDE_ENCRYPTION_SETTING = - Setting.boolSetting("repositories.s3.server_side_encryption", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("repositories.s3.server_side_encryption", false, SettingsProperty.ClusterScope); /** * repositories.s3.buffer_size: Minimum threshold below which the chunk is uploaded using a single request. Beyond this threshold, * the S3 repository will use the AWS Multipart Upload API to split the chunk into several parts, each of buffer_size length, and @@ -109,35 +109,35 @@ public class S3Repository extends BlobStoreRepository { * use of the Multipart API and may result in upload errors. Defaults to 5mb. */ Setting BUFFER_SIZE_SETTING = - Setting.byteSizeSetting("repositories.s3.buffer_size", S3BlobStore.MIN_BUFFER_SIZE, false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("repositories.s3.buffer_size", S3BlobStore.MIN_BUFFER_SIZE, SettingsProperty.ClusterScope); /** * repositories.s3.max_retries: Number of retries in case of S3 errors. Defaults to 3. */ - Setting MAX_RETRIES_SETTING = Setting.intSetting("repositories.s3.max_retries", 3, false, SettingsProperty.ClusterScope); + Setting MAX_RETRIES_SETTING = Setting.intSetting("repositories.s3.max_retries", 3, SettingsProperty.ClusterScope); /** * repositories.s3.chunk_size: Big files can be broken down into chunks during snapshotting if needed. Defaults to 100m. */ Setting CHUNK_SIZE_SETTING = - Setting.byteSizeSetting("repositories.s3.chunk_size", new ByteSizeValue(100, ByteSizeUnit.MB), false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("repositories.s3.chunk_size", new ByteSizeValue(100, ByteSizeUnit.MB), SettingsProperty.ClusterScope); /** * repositories.s3.compress: When set to true metadata files are stored in compressed format. This setting doesn’t affect index * files that are already compressed by default. Defaults to false. */ - Setting COMPRESS_SETTING = Setting.boolSetting("repositories.s3.compress", false, false, SettingsProperty.ClusterScope); + Setting COMPRESS_SETTING = Setting.boolSetting("repositories.s3.compress", false, SettingsProperty.ClusterScope); /** * repositories.s3.storage_class: Sets the S3 storage class type for the backup files. Values may be standard, reduced_redundancy, * standard_ia. Defaults to standard. */ - Setting STORAGE_CLASS_SETTING = Setting.simpleString("repositories.s3.storage_class", false, SettingsProperty.ClusterScope); + Setting STORAGE_CLASS_SETTING = Setting.simpleString("repositories.s3.storage_class", SettingsProperty.ClusterScope); /** * repositories.s3.canned_acl: The S3 repository supports all S3 canned ACLs : private, public-read, public-read-write, * authenticated-read, log-delivery-write, bucket-owner-read, bucket-owner-full-control. Defaults to private. */ - Setting CANNED_ACL_SETTING = Setting.simpleString("repositories.s3.canned_acl", false, SettingsProperty.ClusterScope); + Setting CANNED_ACL_SETTING = Setting.simpleString("repositories.s3.canned_acl", SettingsProperty.ClusterScope); /** * repositories.s3.base_path: Specifies the path within bucket to repository data. Defaults to root directory. */ - Setting BASE_PATH_SETTING = Setting.simpleString("repositories.s3.base_path", false, SettingsProperty.ClusterScope); + Setting BASE_PATH_SETTING = Setting.simpleString("repositories.s3.base_path", SettingsProperty.ClusterScope); } /** @@ -149,77 +149,75 @@ public class S3Repository extends BlobStoreRepository { * access_key * @see Repositories#KEY_SETTING */ - Setting KEY_SETTING = - Setting.simpleString("access_key", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting KEY_SETTING = Setting.simpleString("access_key", SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * secret_key * @see Repositories#SECRET_SETTING */ - Setting SECRET_SETTING = - Setting.simpleString("secret_key", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting SECRET_SETTING = Setting.simpleString("secret_key", SettingsProperty.ClusterScope, SettingsProperty.Filtered); /** * bucket * @see Repositories#BUCKET_SETTING */ - Setting BUCKET_SETTING = Setting.simpleString("bucket", false, SettingsProperty.ClusterScope); + Setting BUCKET_SETTING = Setting.simpleString("bucket", SettingsProperty.ClusterScope); /** * endpoint * @see Repositories#ENDPOINT_SETTING */ - Setting ENDPOINT_SETTING = Setting.simpleString("endpoint", false, SettingsProperty.ClusterScope); + Setting ENDPOINT_SETTING = Setting.simpleString("endpoint", SettingsProperty.ClusterScope); /** * protocol * @see Repositories#PROTOCOL_SETTING */ Setting PROTOCOL_SETTING = - new Setting<>("protocol", "https", s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), false, SettingsProperty.ClusterScope); + new Setting<>("protocol", "https", s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), SettingsProperty.ClusterScope); /** * region * @see Repositories#REGION_SETTING */ - Setting REGION_SETTING = new Setting<>("region", "", s -> s.toLowerCase(Locale.ROOT), false, SettingsProperty.ClusterScope); + Setting REGION_SETTING = new Setting<>("region", "", s -> s.toLowerCase(Locale.ROOT), SettingsProperty.ClusterScope); /** * server_side_encryption * @see Repositories#SERVER_SIDE_ENCRYPTION_SETTING */ Setting SERVER_SIDE_ENCRYPTION_SETTING = - Setting.boolSetting("server_side_encryption", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("server_side_encryption", false, SettingsProperty.ClusterScope); /** * buffer_size * @see Repositories#BUFFER_SIZE_SETTING */ Setting BUFFER_SIZE_SETTING = - Setting.byteSizeSetting("buffer_size", S3BlobStore.MIN_BUFFER_SIZE, false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("buffer_size", S3BlobStore.MIN_BUFFER_SIZE, SettingsProperty.ClusterScope); /** * max_retries * @see Repositories#MAX_RETRIES_SETTING */ - Setting MAX_RETRIES_SETTING = Setting.intSetting("max_retries", 3, false, SettingsProperty.ClusterScope); + Setting MAX_RETRIES_SETTING = Setting.intSetting("max_retries", 3, SettingsProperty.ClusterScope); /** * chunk_size * @see Repositories#CHUNK_SIZE_SETTING */ - Setting CHUNK_SIZE_SETTING = Setting.byteSizeSetting("chunk_size", "-1", false, SettingsProperty.ClusterScope); + Setting CHUNK_SIZE_SETTING = Setting.byteSizeSetting("chunk_size", "-1", SettingsProperty.ClusterScope); /** * compress * @see Repositories#COMPRESS_SETTING */ - Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, false, SettingsProperty.ClusterScope); + Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, SettingsProperty.ClusterScope); /** * storage_class * @see Repositories#STORAGE_CLASS_SETTING */ - Setting STORAGE_CLASS_SETTING = Setting.simpleString("storage_class", false, SettingsProperty.ClusterScope); + Setting STORAGE_CLASS_SETTING = Setting.simpleString("storage_class", SettingsProperty.ClusterScope); /** * canned_acl * @see Repositories#CANNED_ACL_SETTING */ - Setting CANNED_ACL_SETTING = Setting.simpleString("canned_acl", false, SettingsProperty.ClusterScope); + Setting CANNED_ACL_SETTING = Setting.simpleString("canned_acl", SettingsProperty.ClusterScope); /** * base_path * @see Repositories#BASE_PATH_SETTING */ - Setting BASE_PATH_SETTING = Setting.simpleString("base_path", false, SettingsProperty.ClusterScope); + Setting BASE_PATH_SETTING = Setting.simpleString("base_path", SettingsProperty.ClusterScope); } private final S3BlobStore blobStore; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 4273c2027790..22a06957d387 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -270,7 +270,7 @@ public abstract class ESIntegTestCase extends ESTestCase { * It's set once per test via a generic index template. */ public static final Setting INDEX_TEST_SEED_SETTING = - Setting.longSetting("index.tests.seed", 0, Long.MIN_VALUE, false, SettingsProperty.IndexScope); + Setting.longSetting("index.tests.seed", 0, Long.MIN_VALUE, SettingsProperty.IndexScope); /** * A boolean value to enable or disable mock modules. This is useful to test the diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java index 4b4692be90c4..f10039391dbe 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java @@ -36,11 +36,11 @@ public final class InternalSettingsPlugin extends Plugin { } public static final Setting VERSION_CREATED = - Setting.intSetting("index.version.created", 0, false, SettingsProperty.IndexScope); + Setting.intSetting("index.version.created", 0, SettingsProperty.IndexScope); public static final Setting MERGE_ENABLED = - Setting.boolSetting("index.merge.enabled", true, false, SettingsProperty.IndexScope); + Setting.boolSetting("index.merge.enabled", true, SettingsProperty.IndexScope); public static final Setting INDEX_CREATION_DATE_SETTING = - Setting.longSetting(IndexMetaData.SETTING_CREATION_DATE, -1, -1, false, SettingsProperty.IndexScope); + Setting.longSetting(IndexMetaData.SETTING_CREATION_DATE, -1, -1, SettingsProperty.IndexScope); public void onModule(SettingsModule module) { module.registerSetting(VERSION_CREATED); diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java b/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java index b94e7c7e8548..d7bc9a7e0db5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java @@ -64,7 +64,7 @@ public final class MockIndexEventListener { /** * For tests to pass in to fail on listener invocation */ - public static final Setting INDEX_FAIL = Setting.boolSetting("index.fail", false, false, SettingsProperty.IndexScope); + public static final Setting INDEX_FAIL = Setting.boolSetting("index.fail", false, SettingsProperty.IndexScope); public void onModule(SettingsModule module) { module.registerSetting(INDEX_FAIL); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java index cde26a5b55f2..2fad1fc05e88 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java @@ -57,12 +57,12 @@ public final class MockEngineSupport { * slow if {@link org.apache.lucene.index.AssertingDirectoryReader} is used. */ public static final Setting WRAP_READER_RATIO = - Setting.doubleSetting("index.engine.mock.random.wrap_reader_ratio", 0.0d, 0.0d, false, SettingsProperty.IndexScope); + Setting.doubleSetting("index.engine.mock.random.wrap_reader_ratio", 0.0d, 0.0d, SettingsProperty.IndexScope); /** * Allows tests to prevent an engine from being flushed on close ie. to test translog recovery... */ public static final Setting DISABLE_FLUSH_ON_CLOSE = - Setting.boolSetting("index.mock.disable_flush_on_close", false, false, SettingsProperty.IndexScope); + Setting.boolSetting("index.mock.disable_flush_on_close", false, SettingsProperty.IndexScope); private final AtomicBoolean closing = new AtomicBoolean(false); diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java index 6f0e6d51d10f..2cb5367e642e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java @@ -63,15 +63,15 @@ import java.util.Random; public class MockFSDirectoryService extends FsDirectoryService { public static final Setting RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING = - Setting.doubleSetting("index.store.mock.random.io_exception_rate_on_open", 0.0d, 0.0d, false, SettingsProperty.IndexScope); + Setting.doubleSetting("index.store.mock.random.io_exception_rate_on_open", 0.0d, 0.0d, SettingsProperty.IndexScope); public static final Setting RANDOM_IO_EXCEPTION_RATE_SETTING = - Setting.doubleSetting("index.store.mock.random.io_exception_rate", 0.0d, 0.0d, false, SettingsProperty.IndexScope); + Setting.doubleSetting("index.store.mock.random.io_exception_rate", 0.0d, 0.0d, SettingsProperty.IndexScope); public static final Setting RANDOM_PREVENT_DOUBLE_WRITE_SETTING = - Setting.boolSetting("index.store.mock.random.prevent_double_write", true, false, SettingsProperty.IndexScope);// true is default in MDW + Setting.boolSetting("index.store.mock.random.prevent_double_write", true, SettingsProperty.IndexScope);// true is default in MDW public static final Setting RANDOM_NO_DELETE_OPEN_FILE_SETTING = - Setting.boolSetting("index.store.mock.random.no_delete_open_file", true, false, SettingsProperty.IndexScope);// true is default in MDW + Setting.boolSetting("index.store.mock.random.no_delete_open_file", true, SettingsProperty.IndexScope);// true is default in MDW public static final Setting CRASH_INDEX_SETTING = - Setting.boolSetting("index.store.mock.random.crash_index", true, false, SettingsProperty.IndexScope);// true is default in MDW + Setting.boolSetting("index.store.mock.random.crash_index", true, SettingsProperty.IndexScope);// true is default in MDW private final FsDirectoryService delegateService; private final Random random; diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java index 8d1a2beed89a..3d535d677993 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java @@ -46,7 +46,7 @@ import java.util.Map; public class MockFSIndexStore extends IndexStore { public static final Setting INDEX_CHECK_INDEX_ON_CLOSE_SETTING = - Setting.boolSetting("index.store.mock.check_index_on_close", true, false, SettingsProperty.IndexScope); + Setting.boolSetting("index.store.mock.check_index_on_close", true, SettingsProperty.IndexScope); public static class TestPlugin extends Plugin { @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java index bdafa98b6acd..6009929e38e7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java +++ b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java @@ -35,7 +35,7 @@ import java.util.concurrent.CopyOnWriteArrayList; public class MockTaskManager extends TaskManager { public static final Setting USE_MOCK_TASK_MANAGER_SETTING = - Setting.boolSetting("tests.mock.taskmanager.enabled", false, false, SettingsProperty.ClusterScope); + Setting.boolSetting("tests.mock.taskmanager.enabled", false, SettingsProperty.ClusterScope); private final Collection listeners = new CopyOnWriteArrayList<>(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java index 49a89977e95d..88cdd325448d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java @@ -70,10 +70,10 @@ public class AssertingLocalTransport extends LocalTransport { public static final Setting ASSERTING_TRANSPORT_MIN_VERSION_KEY = new Setting<>("transport.asserting.version.min", Integer.toString(Version.CURRENT.minimumCompatibilityVersion().id), - (s) -> Version.fromId(Integer.parseInt(s)), false, SettingsProperty.ClusterScope); + (s) -> Version.fromId(Integer.parseInt(s)), SettingsProperty.ClusterScope); public static final Setting ASSERTING_TRANSPORT_MAX_VERSION_KEY = new Setting<>("transport.asserting.version.max", Integer.toString(Version.CURRENT.id), - (s) -> Version.fromId(Integer.parseInt(s)), false, SettingsProperty.ClusterScope); + (s) -> Version.fromId(Integer.parseInt(s)), SettingsProperty.ClusterScope); private final Random random; private final Version minVersion; private final Version maxVersion; From 7a7f112e890aa55a3b5e180a762ce47cfb7f14da Mon Sep 17 00:00:00 2001 From: David Pilato Date: Sun, 28 Feb 2016 11:21:20 +0100 Subject: [PATCH 033/320] Check mutually exclusive scopes We want to make sure that a developer does not put more than one scope on a given setting. --- .../common/settings/Setting.java | 12 +++++ .../common/settings/SettingTests.java | 52 +++++++++++++++++++ 2 files changed, 64 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index ce20c5219322..1469d4679cb0 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -125,6 +125,18 @@ public class Setting extends ToXContentToBytes { } else { this.properties = EnumSet.copyOf(Arrays.asList(properties)); } + // We validate scope settings. They are mutually exclusive + int numScopes = 0; + for (SettingsProperty property : properties) { + if (property == SettingsProperty.ClusterScope || + property == SettingsProperty.IndexScope || + property == SettingsProperty.NodeScope) { + numScopes++; + } + } + if (numScopes > 1) { + throw new IllegalArgumentException("More than one scope has been added to the setting [" + key + "]"); + } } /** diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index b916783b316b..f2c76931729e 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -30,6 +30,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; public class SettingTests extends ESTestCase { @@ -400,4 +401,55 @@ public class SettingTests extends ESTestCase { assertEquals(5, integerSetting.get(Settings.builder().put("foo.bar", 5).build()).intValue()); assertEquals(1, integerSetting.get(Settings.EMPTY).intValue()); } + + /** + * Only one single scope can be added to any setting + */ + public void testMutuallyExclusiveScopes() { + // Those should pass + Setting setting = Setting.simpleString("foo.bar", SettingsProperty.ClusterScope); + assertThat(setting.hasClusterScope(), is(true)); + assertThat(setting.hasNodeScope(), is(false)); + assertThat(setting.hasIndexScope(), is(false)); + setting = Setting.simpleString("foo.bar", SettingsProperty.NodeScope); + assertThat(setting.hasNodeScope(), is(true)); + assertThat(setting.hasIndexScope(), is(false)); + assertThat(setting.hasClusterScope(), is(false)); + setting = Setting.simpleString("foo.bar", SettingsProperty.IndexScope); + assertThat(setting.hasIndexScope(), is(true)); + assertThat(setting.hasNodeScope(), is(false)); + assertThat(setting.hasClusterScope(), is(false)); + + // We test the default scope + setting = Setting.simpleString("foo.bar"); + assertThat(setting.hasNodeScope(), is(true)); + assertThat(setting.hasIndexScope(), is(false)); + assertThat(setting.hasClusterScope(), is(false)); + + // Those should fail + try { + Setting.simpleString("foo.bar", SettingsProperty.IndexScope, SettingsProperty.ClusterScope); + fail("Multiple scopes should fail"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("More than one scope has been added to the setting")); + } + try { + Setting.simpleString("foo.bar", SettingsProperty.IndexScope, SettingsProperty.NodeScope); + fail("Multiple scopes should fail"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("More than one scope has been added to the setting")); + } + try { + Setting.simpleString("foo.bar", SettingsProperty.ClusterScope, SettingsProperty.NodeScope); + fail("Multiple scopes should fail"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("More than one scope has been added to the setting")); + } + try { + Setting.simpleString("foo.bar", SettingsProperty.IndexScope, SettingsProperty.ClusterScope, SettingsProperty.NodeScope); + fail("Multiple scopes should fail"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("More than one scope has been added to the setting")); + } + } } From cadd8664bbf98f8fd00df098d2f405040e61f269 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Mon, 29 Feb 2016 10:00:05 +0100 Subject: [PATCH 034/320] Fix regression in test --- .../java/org/elasticsearch/common/settings/SettingTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index f2c76931729e..cd6496d8b2fb 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -40,7 +40,7 @@ public class SettingTests extends ESTestCase { Setting booleanSetting = Setting.boolSetting("foo.bar", false, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); assertFalse(booleanSetting.get(Settings.EMPTY)); assertFalse(booleanSetting.get(Settings.builder().put("foo.bar", false).build())); - assertTrue(booleanSetting.get(Settings.builder().put("foo.bar", SettingsProperty.Dynamic).build())); + assertTrue(booleanSetting.get(Settings.builder().put("foo.bar", true).build())); } public void testByteSize() { From 8cd919c6876f3b380137eb97bc7b37f0058e9c2f Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 29 Feb 2016 19:52:42 -0800 Subject: [PATCH 035/320] Added jopt simple option parser and switched plugin cli to use it --- core/build.gradle | 3 +- .../bootstrap/BootstrapCLIParser.java | 11 +- .../java/org/elasticsearch/cli/Command.java | 111 ++++++++++++++++++ .../java/org/elasticsearch/cli/ExitCodes.java | 42 +++++++ .../org/elasticsearch/cli/MultiCommand.java | 73 ++++++++++++ .../org/elasticsearch/cli/TestCommand.java | 41 +++++++ .../{common => }/cli/UserError.java | 10 +- .../org/elasticsearch/common/cli/CliTool.java | 6 +- .../elasticsearch/common/cli/Terminal.java | 21 +++- .../plugins/InstallPluginCommand.java | 73 +++++++----- .../plugins/ListPluginsCommand.java | 17 ++- .../org/elasticsearch/plugins/PluginCli.java | 103 ++-------------- .../plugins/RemovePluginCommand.java | 51 +++++--- .../elasticsearch/plugins/PluginCliTests.java | 2 + .../bootstrap/BootstrapCliParserTests.java | 3 +- .../common/cli/CliToolTests.java | 4 +- .../plugins/InstallPluginCommandTests.java | 7 +- .../plugins/ListPluginsCommandTests.java | 6 +- .../plugins/RemovePluginCommandTests.java | 6 +- .../common/cli/CliToolTestCase.java | 6 + 20 files changed, 426 insertions(+), 170 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/cli/Command.java create mode 100644 core/src/main/java/org/elasticsearch/cli/ExitCodes.java create mode 100644 core/src/main/java/org/elasticsearch/cli/MultiCommand.java create mode 100644 core/src/main/java/org/elasticsearch/cli/TestCommand.java rename core/src/main/java/org/elasticsearch/{common => }/cli/UserError.java (79%) diff --git a/core/build.gradle b/core/build.gradle index e1511a9cdd1b..f79c2a7623b4 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -48,7 +48,8 @@ dependencies { compile 'org.elasticsearch:securesm:1.0' // utilities - compile 'commons-cli:commons-cli:1.3.1' + compile 'commons-cli:commons-cli:1.3.1' // nocommit: remove the old! + compile 'net.sf.jopt-simple:jopt-simple:4.9' compile 'com.carrotsearch:hppc:0.7.1' // time handling, remove with java 8 time diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java index ca67fc911320..ec11a773cccc 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.cli.CliTool; import org.elasticsearch.common.cli.CliToolConfig; import org.elasticsearch.common.cli.Terminal; -import org.elasticsearch.common.cli.UserError; +import org.elasticsearch.cli.UserError; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.monitor.jvm.JvmInfo; @@ -37,7 +37,6 @@ import java.util.Iterator; import java.util.Locale; import java.util.Map; import java.util.Properties; -import java.util.Set; import static org.elasticsearch.common.cli.CliToolConfig.Builder.cmd; import static org.elasticsearch.common.cli.CliToolConfig.Builder.optionBuilder; @@ -138,11 +137,11 @@ final class BootstrapCLIParser extends CliTool { String arg = iterator.next(); if (!arg.startsWith("--")) { if (arg.startsWith("-D") || arg.startsWith("-d") || arg.startsWith("-p")) { - throw new UserError(ExitStatus.USAGE, + throw new UserError(ExitStatus.USAGE.status(), "Parameter [" + arg + "] starting with \"-D\", \"-d\" or \"-p\" must be before any parameters starting with --" ); } else { - throw new UserError(ExitStatus.USAGE, "Parameter [" + arg + "]does not start with --"); + throw new UserError(ExitStatus.USAGE.status(), "Parameter [" + arg + "]does not start with --"); } } // if there is no = sign, we have to get the next argu @@ -156,11 +155,11 @@ final class BootstrapCLIParser extends CliTool { if (iterator.hasNext()) { String value = iterator.next(); if (value.startsWith("--")) { - throw new UserError(ExitStatus.USAGE, "Parameter [" + arg + "] needs value"); + throw new UserError(ExitStatus.USAGE.status(), "Parameter [" + arg + "] needs value"); } properties.put("es." + arg, value); } else { - throw new UserError(ExitStatus.USAGE, "Parameter [" + arg + "] needs value"); + throw new UserError(ExitStatus.USAGE.status(), "Parameter [" + arg + "] needs value"); } } } diff --git a/core/src/main/java/org/elasticsearch/cli/Command.java b/core/src/main/java/org/elasticsearch/cli/Command.java new file mode 100644 index 000000000000..bc44a8eb6354 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cli/Command.java @@ -0,0 +1,111 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cli; + +import java.io.IOException; +import java.util.Arrays; + +import joptsimple.OptionException; +import joptsimple.OptionParser; +import joptsimple.OptionSet; +import joptsimple.OptionSpec; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.cli.Terminal; + +/** + * An action to execute within a cli. + */ +public abstract class Command { + + /** A description of the command, used in the help output. */ + protected final String description; + + /** The option parser for this command. */ + protected final OptionParser parser = new OptionParser(); + + private final OptionSpec helpOption = parser.acceptsAll(Arrays.asList("h", "help"), "show help").forHelp(); + private final OptionSpec silentOption = parser.acceptsAll(Arrays.asList("s", "silent"), "show minimal output"); + private final OptionSpec verboseOption = parser.acceptsAll(Arrays.asList("v", "verbose"), "show verbose output"); + + public Command(String description) { + this.description = description; + } + + /** Parses options for this command from args and executes it. */ + public final int main(String[] args, Terminal terminal) throws Exception { + + final OptionSet options; + try { + options = parser.parse(args); + } catch (OptionException e) { + printHelp(terminal); + terminal.println(Terminal.Verbosity.SILENT, "ERROR: " + e.getMessage()); + return ExitCodes.USAGE; + } + + if (options.has(helpOption)) { + printHelp(terminal); + return ExitCodes.OK; + } + + if (options.has(silentOption)) { + if (options.has(verboseOption)) { + // mutually exclusive, we can remove this with jopt-simple 5.0, which natively supports it + printHelp(terminal); + terminal.println(Terminal.Verbosity.SILENT, "ERROR: Cannot specify -s and -v together"); + return ExitCodes.USAGE; + } + terminal.setVerbosity(Terminal.Verbosity.SILENT); + } else if (options.has(verboseOption)) { + terminal.setVerbosity(Terminal.Verbosity.VERBOSE); + } else { + terminal.setVerbosity(Terminal.Verbosity.NORMAL); + } + + try { + return execute(terminal, options); + } catch (UserError e) { + terminal.println(Terminal.Verbosity.SILENT, "ERROR: " + e.getMessage()); + return e.exitCode; + } + } + + /** Prints a help message for the command to the terminal. */ + private void printHelp(Terminal terminal) throws IOException { + terminal.println(description); + terminal.println(""); + printAdditionalHelp(terminal); + parser.printHelpOn(terminal.getWriter()); + } + + /** Prints additional help information, specific to the command */ + protected void printAdditionalHelp(Terminal terminal) {} + + @SuppressForbidden(reason = "Allowed to exit explicitly from #main()") + protected static void exit(int status) { + System.exit(status); + } + + /** + * Executes this command. + * + * Any runtime user errors (like an input file that does not exist), should throw a {@link UserError}. */ + protected abstract int execute(Terminal terminal, OptionSet options) throws Exception; +} diff --git a/core/src/main/java/org/elasticsearch/cli/ExitCodes.java b/core/src/main/java/org/elasticsearch/cli/ExitCodes.java new file mode 100644 index 000000000000..d08deb8b1adf --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cli/ExitCodes.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cli; + +/** + * POSIX exit codes. + */ +public class ExitCodes { + public static final int OK = 0; + public static final int USAGE = 64; /* command line usage error */ + public static final int DATA_ERROR = 65; /* data format error */ + public static final int NO_INPUT = 66; /* cannot open input */ + public static final int NO_USER = 67; /* addressee unknown */ + public static final int NO_HOST = 68; /* host name unknown */ + public static final int UNAVAILABLE = 69; /* service unavailable */ + public static final int CODE_ERROR = 70; /* internal software error */ + public static final int CANT_CREATE = 73; /* can't create (user) output file */ + public static final int IO_ERROR = 74; /* input/output error */ + public static final int TEMP_FAILURE = 75; /* temp failure; user is invited to retry */ + public static final int PROTOCOL = 76; /* remote error in protocol */ + public static final int NOPERM = 77; /* permission denied */ + public static final int CONFIG = 78; /* configuration error */ + + private ExitCodes() { /* no instance, just constants */ } +} diff --git a/core/src/main/java/org/elasticsearch/cli/MultiCommand.java b/core/src/main/java/org/elasticsearch/cli/MultiCommand.java new file mode 100644 index 000000000000..94c403d57d0a --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cli/MultiCommand.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cli; + +import java.io.IOException; +import java.util.Arrays; +import java.util.LinkedHashMap; +import java.util.Map; + +import joptsimple.NonOptionArgumentSpec; +import joptsimple.OptionSet; +import org.elasticsearch.common.cli.Terminal; + +/** + * A cli tool which is made up of multiple subcommands. + */ +public class MultiCommand extends Command { + + protected final Map subcommands = new LinkedHashMap<>(); + + private final NonOptionArgumentSpec arguments = parser.nonOptions("command"); + + public MultiCommand(String description) { + super(description); + parser.posixlyCorrect(true); + } + + @Override + protected void printAdditionalHelp(Terminal terminal) { + if (subcommands.isEmpty()) { + throw new IllegalStateException("No subcommands configured"); + } + terminal.println("Commands"); + terminal.println("--------"); + for (Map.Entry subcommand : subcommands.entrySet()) { + terminal.println(subcommand.getKey() + " - " + subcommand.getValue().description); + } + terminal.println(""); + } + + @Override + protected int execute(Terminal terminal, OptionSet options) throws Exception { + if (subcommands.isEmpty()) { + throw new IllegalStateException("No subcommands configured"); + } + String[] args = arguments.values(options).toArray(new String[0]); + if (args.length == 0) { + throw new UserError(ExitCodes.USAGE, "Missing command"); + } + Command subcommand = subcommands.get(args[0]); + if (subcommand == null) { + throw new UserError(ExitCodes.USAGE, "Unknown command [" + args[0] + "]"); + } + return subcommand.main(Arrays.copyOfRange(args, 1, args.length), terminal); + } +} diff --git a/core/src/main/java/org/elasticsearch/cli/TestCommand.java b/core/src/main/java/org/elasticsearch/cli/TestCommand.java new file mode 100644 index 000000000000..fe3fa5c6b8cf --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cli/TestCommand.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cli; + +import joptsimple.OptionSet; +import org.elasticsearch.common.cli.Terminal; + +public class TestCommand extends Command { + + public static void main(String[] args) throws Exception { + exit(new TestCommand().main(args, Terminal.DEFAULT)); + } + + public TestCommand() { + super("some test cli"); + parser.accepts("foo", "some option"); + } + + @Override + protected int execute(Terminal terminal, OptionSet options) throws Exception { + terminal.println("running"); + return ExitCodes.OK; + } +} diff --git a/core/src/main/java/org/elasticsearch/common/cli/UserError.java b/core/src/main/java/org/elasticsearch/cli/UserError.java similarity index 79% rename from core/src/main/java/org/elasticsearch/common/cli/UserError.java rename to core/src/main/java/org/elasticsearch/cli/UserError.java index ad7098308854..2a4f2bf12338 100644 --- a/core/src/main/java/org/elasticsearch/common/cli/UserError.java +++ b/core/src/main/java/org/elasticsearch/cli/UserError.java @@ -17,19 +17,19 @@ * under the License. */ -package org.elasticsearch.common.cli; +package org.elasticsearch.cli; /** - * An exception representing a user fixable problem in {@link CliTool} usage. + * An exception representing a user fixable problem in {@link Command} usage. */ public class UserError extends Exception { /** The exist status the cli should use when catching this user error. */ - public final CliTool.ExitStatus exitStatus; + public final int exitCode; /** Constructs a UserError with an exit status and message to show the user. */ - public UserError(CliTool.ExitStatus exitStatus, String msg) { + public UserError(int exitCode, String msg) { super(msg); - this.exitStatus = exitStatus; + this.exitCode = exitCode; } } diff --git a/core/src/main/java/org/elasticsearch/common/cli/CliTool.java b/core/src/main/java/org/elasticsearch/common/cli/CliTool.java index 2ea01f450686..ba2007813d54 100644 --- a/core/src/main/java/org/elasticsearch/common/cli/CliTool.java +++ b/core/src/main/java/org/elasticsearch/common/cli/CliTool.java @@ -26,6 +26,7 @@ import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.MissingArgumentException; import org.apache.commons.cli.MissingOptionException; import org.apache.commons.cli.UnrecognizedOptionException; +import org.elasticsearch.cli.UserError; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.node.internal.InternalSettingsPreparer; @@ -143,7 +144,8 @@ public abstract class CliTool { return parse(cmd, args).execute(settings, env); } catch (UserError error) { terminal.println(Terminal.Verbosity.SILENT, "ERROR: " + error.getMessage()); - return error.exitStatus; + return ExitStatus.USAGE; + //return error.exitCode; } } @@ -163,7 +165,7 @@ public abstract class CliTool { } catch (AlreadySelectedException|MissingArgumentException|MissingOptionException|UnrecognizedOptionException e) { // intentionally drop the stack trace here as these are really user errors, // the stack trace into cli parsing lib is not important - throw new UserError(ExitStatus.USAGE, e.toString()); + throw new UserError(ExitStatus.USAGE.status(), e.toString()); } if (cli.hasOption("v")) { diff --git a/core/src/main/java/org/elasticsearch/common/cli/Terminal.java b/core/src/main/java/org/elasticsearch/common/cli/Terminal.java index 8d4a8036bdf8..27dd2f7b87f0 100644 --- a/core/src/main/java/org/elasticsearch/common/cli/Terminal.java +++ b/core/src/main/java/org/elasticsearch/common/cli/Terminal.java @@ -23,12 +23,14 @@ import java.io.BufferedReader; import java.io.Console; import java.io.IOException; import java.io.InputStreamReader; +import java.io.PrintWriter; +import java.io.Writer; import java.nio.charset.Charset; import org.elasticsearch.common.SuppressForbidden; /** - * A Terminal wraps access to reading input and writing output for a {@link CliTool}. + * A Terminal wraps access to reading input and writing output for a cli. * * The available methods are similar to those of {@link Console}, with the ability * to read either normal text or a password, and the ability to print a line @@ -53,7 +55,7 @@ public abstract class Terminal { private Verbosity verbosity = Verbosity.NORMAL; /** Sets the verbosity of the terminal. */ - void setVerbosity(Verbosity verbosity) { + public void setVerbosity(Verbosity verbosity) { this.verbosity = verbosity; } @@ -63,6 +65,9 @@ public abstract class Terminal { /** Reads password text from the terminal input. See {@link Console#readPassword()}}. */ public abstract char[] readSecret(String prompt); + /** Returns a Writer which can be used to write to the terminal directly. */ + public abstract PrintWriter getWriter(); + /** Print a message directly to the terminal. */ protected abstract void doPrint(String msg); @@ -86,6 +91,11 @@ public abstract class Terminal { return console != null; } + @Override + public PrintWriter getWriter() { + return console.writer(); + } + @Override public void doPrint(String msg) { console.printf("%s", msg); @@ -105,6 +115,8 @@ public abstract class Terminal { private static class SystemTerminal extends Terminal { + private static final PrintWriter writer = new PrintWriter(System.out); + @Override @SuppressForbidden(reason = "System#out") public void doPrint(String msg) { @@ -112,6 +124,11 @@ public abstract class Terminal { System.out.flush(); } + @Override + public PrintWriter getWriter() { + return writer; + } + @Override public String readText(String text) { doPrint(text); diff --git a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index 767f6d421796..977d89a3418d 100644 --- a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -19,16 +19,19 @@ package org.elasticsearch.plugins; +import joptsimple.OptionSet; +import joptsimple.OptionSpec; import org.apache.lucene.util.IOUtils; import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.bootstrap.JarHell; +import org.elasticsearch.cli.Command; +import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.common.cli.CliTool; import org.elasticsearch.common.cli.Terminal; -import org.elasticsearch.common.cli.UserError; +import org.elasticsearch.cli.UserError; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import java.io.BufferedReader; @@ -88,7 +91,7 @@ import static org.elasticsearch.common.util.set.Sets.newHashSet; * elasticsearch config directory, using the name of the plugin. If any files to be installed * already exist, they will be skipped. */ -class InstallPluginCommand extends CliTool.Command { +class InstallPluginCommand extends Command { private static final String PROPERTY_SUPPORT_STAGING_URLS = "es.plugins.staging"; @@ -119,17 +122,33 @@ class InstallPluginCommand extends CliTool.Command { "repository-s3", "store-smb")); - private final String pluginId; - private final boolean batch; + private final Environment env; + private final OptionSpec batchOption; + private final OptionSpec arguments; - InstallPluginCommand(Terminal terminal, String pluginId, boolean batch) { - super(terminal); - this.pluginId = pluginId; - this.batch = batch; + InstallPluginCommand(Environment env) { + super("Install a plugin"); + this.env = env; + this.batchOption = parser.acceptsAll(Arrays.asList("b", "batch"), + "Enable batch mode explicitly, automatic confirmation of security permission"); + this.arguments = parser.nonOptions("plugin id"); } @Override - public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception { + protected int execute(Terminal terminal, OptionSet options) throws Exception { + // TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args + List args = arguments.values(options); + if (args.size() != 1) { + throw new UserError(ExitCodes.USAGE, "Must supply a single plugin id argument"); + } + String pluginId = args.get(0); + boolean isBatch = options.has(batchOption) || System.console() == null; + execute(terminal, pluginId, isBatch); + return ExitCodes.OK; + } + + // pkg private for testing + void execute(Terminal terminal, String pluginId, boolean isBatch) throws Exception { // TODO: remove this leniency!! is it needed anymore? if (Files.exists(env.pluginsFile()) == false) { @@ -137,15 +156,13 @@ class InstallPluginCommand extends CliTool.Command { Files.createDirectory(env.pluginsFile()); } - Path pluginZip = download(pluginId, env.tmpFile()); + Path pluginZip = download(terminal, pluginId, env.tmpFile()); Path extractedZip = unzip(pluginZip, env.pluginsFile()); - install(extractedZip, env); - - return CliTool.ExitStatus.OK; + install(terminal, isBatch, extractedZip); } /** Downloads the plugin and returns the file it was downloaded to. */ - private Path download(String pluginId, Path tmpDir) throws Exception { + private Path download(Terminal terminal, String pluginId, Path tmpDir) throws Exception { if (OFFICIAL_PLUGINS.contains(pluginId)) { final String version = Version.CURRENT.toString(); final String url; @@ -195,14 +212,14 @@ class InstallPluginCommand extends CliTool.Command { BufferedReader checksumReader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8)); expectedChecksum = checksumReader.readLine(); if (checksumReader.readLine() != null) { - throw new UserError(CliTool.ExitStatus.IO_ERROR, "Invalid checksum file at " + checksumUrl); + throw new UserError(CliTool.ExitStatus.IO_ERROR.status(), "Invalid checksum file at " + checksumUrl); } } byte[] zipbytes = Files.readAllBytes(zip); String gotChecksum = MessageDigests.toHexString(MessageDigests.sha1().digest(zipbytes)); if (expectedChecksum.equals(gotChecksum) == false) { - throw new UserError(CliTool.ExitStatus.IO_ERROR, "SHA1 mismatch, expected " + expectedChecksum + " but got " + gotChecksum); + throw new UserError(CliTool.ExitStatus.IO_ERROR.status(), "SHA1 mismatch, expected " + expectedChecksum + " but got " + gotChecksum); } return zip; @@ -244,13 +261,13 @@ class InstallPluginCommand extends CliTool.Command { Files.delete(zip); if (hasEsDir == false) { IOUtils.rm(target); - throw new UserError(CliTool.ExitStatus.DATA_ERROR, "`elasticsearch` directory is missing in the plugin zip"); + throw new UserError(CliTool.ExitStatus.DATA_ERROR.status(), "`elasticsearch` directory is missing in the plugin zip"); } return target; } /** Load information about the plugin, and verify it can be installed with no errors. */ - private PluginInfo verify(Path pluginRoot, Environment env) throws Exception { + private PluginInfo verify(Terminal terminal, Path pluginRoot, boolean isBatch) throws Exception { // read and validate the plugin descriptor PluginInfo info = PluginInfo.readFromProperties(pluginRoot); terminal.println(VERBOSE, info.toString()); @@ -258,7 +275,7 @@ class InstallPluginCommand extends CliTool.Command { // don't let luser install plugin as a module... // they might be unavoidably in maven central and are packaged up the same way) if (MODULES.contains(info.getName())) { - throw new UserError(CliTool.ExitStatus.USAGE, "plugin '" + info.getName() + "' cannot be installed like this, it is a system module"); + throw new UserError(CliTool.ExitStatus.USAGE.status(), "plugin '" + info.getName() + "' cannot be installed like this, it is a system module"); } // check for jar hell before any copying @@ -268,7 +285,7 @@ class InstallPluginCommand extends CliTool.Command { // if it exists, confirm or warn the user Path policy = pluginRoot.resolve(PluginInfo.ES_PLUGIN_POLICY); if (Files.exists(policy)) { - PluginSecurity.readPolicy(policy, terminal, env, batch); + PluginSecurity.readPolicy(policy, terminal, env, isBatch); } return info; @@ -305,16 +322,16 @@ class InstallPluginCommand extends CliTool.Command { * Installs the plugin from {@code tmpRoot} into the plugins dir. * If the plugin has a bin dir and/or a config dir, those are copied. */ - private void install(Path tmpRoot, Environment env) throws Exception { + private void install(Terminal terminal, boolean isBatch, Path tmpRoot) throws Exception { List deleteOnFailure = new ArrayList<>(); deleteOnFailure.add(tmpRoot); try { - PluginInfo info = verify(tmpRoot, env); + PluginInfo info = verify(terminal, tmpRoot, isBatch); final Path destination = env.pluginsFile().resolve(info.getName()); if (Files.exists(destination)) { - throw new UserError(CliTool.ExitStatus.USAGE, "plugin directory " + destination.toAbsolutePath() + " already exists. To update the plugin, uninstall it first using 'remove " + info.getName() + "' command"); + throw new UserError(CliTool.ExitStatus.USAGE.status(), "plugin directory " + destination.toAbsolutePath() + " already exists. To update the plugin, uninstall it first using 'remove " + info.getName() + "' command"); } Path tmpBinDir = tmpRoot.resolve("bin"); @@ -347,7 +364,7 @@ class InstallPluginCommand extends CliTool.Command { /** Copies the files from {@code tmpBinDir} into {@code destBinDir}, along with permissions from dest dirs parent. */ private void installBin(PluginInfo info, Path tmpBinDir, Path destBinDir) throws Exception { if (Files.isDirectory(tmpBinDir) == false) { - throw new UserError(CliTool.ExitStatus.IO_ERROR, "bin in plugin " + info.getName() + " is not a directory"); + throw new UserError(CliTool.ExitStatus.IO_ERROR.status(), "bin in plugin " + info.getName() + " is not a directory"); } Files.createDirectory(destBinDir); @@ -365,7 +382,7 @@ class InstallPluginCommand extends CliTool.Command { try (DirectoryStream stream = Files.newDirectoryStream(tmpBinDir)) { for (Path srcFile : stream) { if (Files.isDirectory(srcFile)) { - throw new UserError(CliTool.ExitStatus.DATA_ERROR, "Directories not allowed in bin dir for plugin " + info.getName() + ", found " + srcFile.getFileName()); + throw new UserError(CliTool.ExitStatus.DATA_ERROR.status(), "Directories not allowed in bin dir for plugin " + info.getName() + ", found " + srcFile.getFileName()); } Path destFile = destBinDir.resolve(tmpBinDir.relativize(srcFile)); @@ -386,7 +403,7 @@ class InstallPluginCommand extends CliTool.Command { */ private void installConfig(PluginInfo info, Path tmpConfigDir, Path destConfigDir) throws Exception { if (Files.isDirectory(tmpConfigDir) == false) { - throw new UserError(CliTool.ExitStatus.IO_ERROR, "config in plugin " + info.getName() + " is not a directory"); + throw new UserError(CliTool.ExitStatus.IO_ERROR.status(), "config in plugin " + info.getName() + " is not a directory"); } // create the plugin's config dir "if necessary" @@ -395,7 +412,7 @@ class InstallPluginCommand extends CliTool.Command { try (DirectoryStream stream = Files.newDirectoryStream(tmpConfigDir)) { for (Path srcFile : stream) { if (Files.isDirectory(srcFile)) { - throw new UserError(CliTool.ExitStatus.DATA_ERROR, "Directories not allowed in config dir for plugin " + info.getName()); + throw new UserError(CliTool.ExitStatus.DATA_ERROR.status(), "Directories not allowed in config dir for plugin " + info.getName()); } Path destFile = destConfigDir.resolve(tmpConfigDir.relativize(srcFile)); diff --git a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java index 6abed4e6bc22..142a18cbde53 100644 --- a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java @@ -24,22 +24,27 @@ import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; +import joptsimple.OptionSet; +import org.elasticsearch.cli.Command; +import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.common.cli.CliTool; import org.elasticsearch.common.cli.Terminal; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; /** * A command for the plugin cli to list plugins installed in elasticsearch. */ -class ListPluginsCommand extends CliTool.Command { +class ListPluginsCommand extends Command { - ListPluginsCommand(Terminal terminal) { - super(terminal); + private final Environment env; + + ListPluginsCommand(Environment env) { + super("Lists installed elasticsearch plugins"); + this.env = env; } @Override - public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception { + public int execute(Terminal terminal, OptionSet options) throws Exception { if (Files.exists(env.pluginsFile()) == false) { throw new IOException("Plugins directory missing: " + env.pluginsFile()); } @@ -51,6 +56,6 @@ class ListPluginsCommand extends CliTool.Command { } } - return CliTool.ExitStatus.OK; + return ExitCodes.OK; } } diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginCli.java b/core/src/main/java/org/elasticsearch/plugins/PluginCli.java index 30a36501a61c..9f2e432a4386 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginCli.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginCli.java @@ -19,106 +19,29 @@ package org.elasticsearch.plugins; -import org.apache.commons.cli.CommandLine; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.cli.CliTool; -import org.elasticsearch.common.cli.CliToolConfig; +import org.apache.log4j.BasicConfigurator; +import org.apache.log4j.varia.NullAppender; +import org.elasticsearch.cli.MultiCommand; import org.elasticsearch.common.cli.Terminal; -import org.elasticsearch.common.logging.log4j.LogConfigurator; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.node.internal.InternalSettingsPreparer; -import java.util.Locale; - -import static org.elasticsearch.common.cli.CliToolConfig.Builder.cmd; -import static org.elasticsearch.common.cli.CliToolConfig.Builder.option; - /** * A cli tool for adding, removing and listing plugins for elasticsearch. */ -public class PluginCli extends CliTool { +public class PluginCli extends MultiCommand { - // commands - private static final String LIST_CMD_NAME = "list"; - private static final String INSTALL_CMD_NAME = "install"; - private static final String REMOVE_CMD_NAME = "remove"; - - // usage config - private static final CliToolConfig.Cmd LIST_CMD = cmd(LIST_CMD_NAME, ListPluginsCommand.class).build(); - private static final CliToolConfig.Cmd INSTALL_CMD = cmd(INSTALL_CMD_NAME, InstallPluginCommand.class) - .options(option("b", "batch").required(false)) - .build(); - private static final CliToolConfig.Cmd REMOVE_CMD = cmd(REMOVE_CMD_NAME, RemovePluginCommand.class).build(); - - static final CliToolConfig CONFIG = CliToolConfig.config("plugin", PluginCli.class) - .cmds(LIST_CMD, INSTALL_CMD, REMOVE_CMD) - .build(); + public PluginCli(Environment env) { + super("A tool for managing installed elasticsearch plugins"); + subcommands.put("list", new ListPluginsCommand(env)); + subcommands.put("install", new InstallPluginCommand(env)); + subcommands.put("remove", new RemovePluginCommand(env)); + } public static void main(String[] args) throws Exception { - // initialize default for es.logger.level because we will not read the logging.yml - String loggerLevel = System.getProperty("es.logger.level", "INFO"); - // Set the appender for all potential log files to terminal so that other components that use the logger print out the - // same terminal. - // The reason for this is that the plugin cli cannot be configured with a file appender because when the plugin command is - // executed there is no way of knowing where the logfiles should be placed. For example, if elasticsearch - // is run as service then the logs should be at /var/log/elasticsearch but when started from the tar they should be at es.home/logs. - // Therefore we print to Terminal. - Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.builder() - .put("appender.terminal.type", "terminal") - .put("rootLogger", "${es.logger.level}, terminal") - .put("es.logger.level", loggerLevel) - .build(), Terminal.DEFAULT); - // configure but do not read the logging conf file - LogConfigurator.configure(env.settings(), false); - int status = new PluginCli(Terminal.DEFAULT).execute(args).status(); - exit(status); - } - - @SuppressForbidden(reason = "Allowed to exit explicitly from #main()") - private static void exit(int status) { - System.exit(status); - } - - PluginCli(Terminal terminal) { - super(CONFIG, terminal); - } - - @Override - protected Command parse(String cmdName, CommandLine cli) throws Exception { - switch (cmdName.toLowerCase(Locale.ROOT)) { - case LIST_CMD_NAME: - return new ListPluginsCommand(terminal); - case INSTALL_CMD_NAME: - return parseInstallPluginCommand(cli); - case REMOVE_CMD_NAME: - return parseRemovePluginCommand(cli); - default: - assert false : "can't get here as cmd name is validated before this method is called"; - return exitCmd(ExitStatus.USAGE); - } - } - - private Command parseInstallPluginCommand(CommandLine cli) { - String[] args = cli.getArgs(); - if (args.length != 1) { - return exitCmd(ExitStatus.USAGE, terminal, "Must supply a single plugin id argument"); - } - - boolean batch = System.console() == null; - if (cli.hasOption("b")) { - batch = true; - } - - return new InstallPluginCommand(terminal, args[0], batch); - } - - private Command parseRemovePluginCommand(CommandLine cli) { - String[] args = cli.getArgs(); - if (args.length != 1) { - return exitCmd(ExitStatus.USAGE, terminal, "Must supply a single plugin name argument"); - } - - return new RemovePluginCommand(terminal, args[0]); + BasicConfigurator.configure(new NullAppender()); + Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, Terminal.DEFAULT); + exit(new PluginCli(env).main(args, Terminal.DEFAULT)); } } diff --git a/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java index 8ce1056bbfdc..10a73a0fc9a8 100644 --- a/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java @@ -19,40 +19,57 @@ package org.elasticsearch.plugins; -import org.apache.lucene.util.IOUtils; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.cli.CliTool; -import org.elasticsearch.common.cli.Terminal; -import org.elasticsearch.common.cli.UserError; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; - import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.List; +import joptsimple.OptionSet; +import joptsimple.OptionSpec; +import org.apache.lucene.util.IOUtils; +import org.elasticsearch.cli.Command; +import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.UserError; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.cli.CliTool; +import org.elasticsearch.common.cli.Terminal; +import org.elasticsearch.env.Environment; + import static org.elasticsearch.common.cli.Terminal.Verbosity.VERBOSE; /** * A command for the plugin cli to remove a plugin from elasticsearch. */ -class RemovePluginCommand extends CliTool.Command { - private final String pluginName; +class RemovePluginCommand extends Command { - public RemovePluginCommand(Terminal terminal, String pluginName) { - super(terminal); - this.pluginName = pluginName; + private final Environment env; + private final OptionSpec arguments; + + RemovePluginCommand(Environment env) { + super("Removes a plugin from elasticsearch"); + this.env = env; + this.arguments = parser.nonOptions("plugin name"); } @Override - public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception { + public int execute(Terminal terminal, OptionSet options) throws Exception { + // TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args + List args = arguments.values(options); + if (args.size() != 1) { + throw new UserError(ExitCodes.USAGE, "Must supply a single plugin id argument"); + } + execute(terminal, args.get(0)); + return ExitCodes.OK; + } + + // pkg private for testing + void execute(Terminal terminal, String pluginName) throws Exception { terminal.println("-> Removing " + Strings.coalesceToEmpty(pluginName) + "..."); Path pluginDir = env.pluginsFile().resolve(pluginName); if (Files.exists(pluginDir) == false) { - throw new UserError(CliTool.ExitStatus.USAGE, "Plugin " + pluginName + " not found. Run 'plugin list' to get list of installed plugins."); + throw new UserError(CliTool.ExitStatus.USAGE.status(), "Plugin " + pluginName + " not found. Run 'plugin list' to get list of installed plugins."); } List pluginPaths = new ArrayList<>(); @@ -60,7 +77,7 @@ class RemovePluginCommand extends CliTool.Command { Path pluginBinDir = env.binFile().resolve(pluginName); if (Files.exists(pluginBinDir)) { if (Files.isDirectory(pluginBinDir) == false) { - throw new UserError(CliTool.ExitStatus.IO_ERROR, "Bin dir for " + pluginName + " is not a directory"); + throw new UserError(CliTool.ExitStatus.IO_ERROR.status(), "Bin dir for " + pluginName + " is not a directory"); } pluginPaths.add(pluginBinDir); terminal.println(VERBOSE, "Removing: " + pluginBinDir); @@ -72,7 +89,5 @@ class RemovePluginCommand extends CliTool.Command { pluginPaths.add(tmpPluginDir); IOUtils.rm(pluginPaths.toArray(new Path[pluginPaths.size()])); - - return CliTool.ExitStatus.OK; } } diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java index 3a1215900831..8973a9be3e25 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java @@ -28,6 +28,7 @@ import static org.hamcrest.Matchers.is; public class PluginCliTests extends CliToolTestCase { public void testHelpWorks() throws Exception { + /* CliToolTestCase.CaptureOutputTerminal terminal = new CliToolTestCase.CaptureOutputTerminal(); assertThat(new PluginCli(terminal).execute(args("--help")), is(OK_AND_EXIT)); assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/plugins/plugin.help"); @@ -46,5 +47,6 @@ public class PluginCliTests extends CliToolTestCase { terminal.getTerminalOutput().clear(); assertThat(new PluginCli(terminal).execute(args("list -h")), is(OK_AND_EXIT)); assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/plugins/plugin-list.help"); + */ } } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java index 012af99cef01..c1d894710a38 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.cli.CliTool.ExitStatus; import org.elasticsearch.common.cli.CliToolTestCase; -import org.elasticsearch.common.cli.UserError; +import org.elasticsearch.cli.UserError; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.monitor.jvm.JvmInfo; import org.hamcrest.Matcher; @@ -37,7 +37,6 @@ import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.Properties; import static org.elasticsearch.common.cli.CliTool.ExitStatus.OK; import static org.elasticsearch.common.cli.CliTool.ExitStatus.OK_AND_EXIT; diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CliToolTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CliToolTests.java index 5033914632ab..70d507853fa1 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CliToolTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CliToolTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.common.cli; import org.apache.commons.cli.CommandLine; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.cli.UserError; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Settings; @@ -70,7 +70,7 @@ public class CliToolTests extends CliToolTestCase { @Override public CliTool.ExitStatus execute(Settings settings, Environment env) throws UserError { executed.set(true); - throw new UserError(CliTool.ExitStatus.USAGE, "bad usage"); + throw new UserError(CliTool.ExitStatus.USAGE.status(), "bad usage"); } }; SingleCmdTool tool = new SingleCmdTool("tool", terminal, cmd); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index 66dfa67ccbdd..e51b13e969e4 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -36,6 +36,7 @@ import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.PosixFileAttributeView; import java.nio.file.attribute.PosixFileAttributes; import java.nio.file.attribute.PosixFilePermission; +import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -44,10 +45,11 @@ import java.util.zip.ZipOutputStream; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.Version; +import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.common.cli.CliTool; import org.elasticsearch.common.cli.CliToolTestCase; import org.elasticsearch.common.cli.Terminal; -import org.elasticsearch.common.cli.UserError; +import org.elasticsearch.cli.UserError; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; @@ -118,8 +120,7 @@ public class InstallPluginCommandTests extends ESTestCase { static CliToolTestCase.CaptureOutputTerminal installPlugin(String pluginUrl, Environment env) throws Exception { CliToolTestCase.CaptureOutputTerminal terminal = new CliToolTestCase.CaptureOutputTerminal(Terminal.Verbosity.NORMAL); - CliTool.ExitStatus status = new InstallPluginCommand(terminal, pluginUrl, true).execute(env.settings(), env); - assertEquals(CliTool.ExitStatus.OK, status); + new InstallPluginCommand(env).execute(terminal, pluginUrl, true); return terminal; } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java index c68e207c0c38..7ffdd8545df1 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java @@ -26,6 +26,7 @@ import java.util.Collections; import java.util.List; import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.common.cli.CliTool; import org.elasticsearch.common.cli.CliToolTestCase; import org.elasticsearch.common.cli.Terminal; @@ -47,8 +48,9 @@ public class ListPluginsCommandTests extends ESTestCase { static CliToolTestCase.CaptureOutputTerminal listPlugins(Environment env) throws Exception { CliToolTestCase.CaptureOutputTerminal terminal = new CliToolTestCase.CaptureOutputTerminal(Terminal.Verbosity.NORMAL); - CliTool.ExitStatus status = new ListPluginsCommand(terminal).execute(env.settings(), env); - assertEquals(CliTool.ExitStatus.OK, status); + String[] args = {}; + int status = new ListPluginsCommand(env).main(args, terminal); + assertEquals(ExitCodes.OK, status); return terminal; } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java index 10fbc3c26966..6ffe4168de1c 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java @@ -25,10 +25,11 @@ import java.nio.file.Files; import java.nio.file.Path; import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.common.cli.CliTool; import org.elasticsearch.common.cli.CliToolTestCase; import org.elasticsearch.common.cli.Terminal; -import org.elasticsearch.common.cli.UserError; +import org.elasticsearch.cli.UserError; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; @@ -50,8 +51,7 @@ public class RemovePluginCommandTests extends ESTestCase { static CliToolTestCase.CaptureOutputTerminal removePlugin(String name, Environment env) throws Exception { CliToolTestCase.CaptureOutputTerminal terminal = new CliToolTestCase.CaptureOutputTerminal(Terminal.Verbosity.VERBOSE); - CliTool.ExitStatus status = new RemovePluginCommand(terminal, name).execute(env.settings(), env); - assertEquals(CliTool.ExitStatus.OK, status); + new RemovePluginCommand(env).execute(terminal, name); return terminal; } diff --git a/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java index 6d6c176b27d9..21a5e0228f68 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java @@ -28,6 +28,7 @@ import org.junit.After; import org.junit.Before; import java.io.IOException; +import java.io.PrintWriter; import java.util.ArrayList; import java.util.List; @@ -73,6 +74,11 @@ public abstract class CliToolTestCase extends ESTestCase { public char[] readSecret(String prompt) { return new char[0]; } + + @Override + public PrintWriter getWriter() { + return null; + } } /** From cc743049cf41b189aca4ed66088d868f1e1f6688 Mon Sep 17 00:00:00 2001 From: Felipe Forbeck Date: Wed, 27 Jan 2016 23:39:36 -0200 Subject: [PATCH 036/320] Skipping hidden files compilation for script service --- .../elasticsearch/script/ScriptService.java | 5 +++ .../script/ScriptServiceTests.java | 33 +++++++++++++++++++ 2 files changed, 38 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index 8e1ac1c8d77d..2fbfc126380b 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -42,6 +42,7 @@ import org.elasticsearch.common.cache.RemovalNotification; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -533,6 +534,10 @@ public class ScriptService extends AbstractComponent implements Closeable { if (logger.isTraceEnabled()) { logger.trace("Loading script file : [{}]", file); } + if (FileSystemUtils.isHidden(file)) { + logger.warn("--- Hidden file skipped : [{}]", file); + return; + } Tuple scriptNameExt = scriptNameExt(file); if (scriptNameExt != null) { ScriptEngineService engineService = getScriptEngineServiceForFileExt(scriptNameExt.v2()); diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 0825da4d4dfa..f703b9d93648 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -151,6 +151,39 @@ public class ScriptServiceTests extends ESTestCase { } } + public void testHiddenFileSkipped() throws IOException { + ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); + buildScriptService(Settings.EMPTY); + + logger.info("--> setup one hidden test file"); + Path testFileHidden = scriptsFilePath.resolve(".hidden_file"); + Path testRegularFile = scriptsFilePath.resolve("test_file.tst"); + Streams.copy("test_hidden_file".getBytes("UTF-8"), Files.newOutputStream(testFileHidden)); + Streams.copy("test_file".getBytes("UTF-8"), Files.newOutputStream(testRegularFile)); + resourceWatcherService.notifyNow(); + + try { + logger.info("--> verify if hidden_file was skipped"); + scriptService.compile(new Script("hidden_file", ScriptType.FILE, "test", null), + ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap()); + fail("the script hidden_file should not be processed"); + } catch (IllegalArgumentException ex) { + assertThat(ex.getMessage(), containsString("Unable to find on disk file script [hidden_file] using lang [test]")); + } + + logger.info("--> verify if test_file was correctly processed"); + CompiledScript compiledScript = scriptService.compile(new Script("test_file", ScriptType.FILE, "test", null), + ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap()); + assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file")); + + logger.info("--> delete hidden file"); + Files.delete(testFileHidden); + + logger.info("--> delete test file"); + Files.delete(testRegularFile); + resourceWatcherService.notifyNow(); + } + public void testInlineScriptCompiledOnceCache() throws IOException { buildScriptService(Settings.EMPTY); CompiledScript compiledScript1 = scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), From 5478c97a362a9f450b75af6ca6c8b2b0a4cf3f90 Mon Sep 17 00:00:00 2001 From: Felipe Forbeck Date: Tue, 2 Feb 2016 08:29:53 -0200 Subject: [PATCH 037/320] Minor fixes after review --- .../elasticsearch/script/ScriptService.java | 10 ++++---- .../script/ScriptServiceTests.java | 25 +++++++------------ 2 files changed, 14 insertions(+), 21 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index 2fbfc126380b..ae03bde50327 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -56,7 +56,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.TemplateQueryParser; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.watcher.FileChangesListener; import org.elasticsearch.watcher.FileWatcher; @@ -531,13 +530,14 @@ public class ScriptService extends AbstractComponent implements Closeable { @Override public void onFileInit(Path file) { + if (FileSystemUtils.isHidden(file)) { + logger.debug("Hidden script file skipped : [{}]", file); + return; + } if (logger.isTraceEnabled()) { logger.trace("Loading script file : [{}]", file); } - if (FileSystemUtils.isHidden(file)) { - logger.warn("--- Hidden file skipped : [{}]", file); - return; - } + Tuple scriptNameExt = scriptNameExt(file); if (scriptNameExt != null) { ScriptEngineService engineService = getScriptEngineServiceForFileExt(scriptNameExt.v2()); diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index f703b9d93648..e5965ed32974 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -151,36 +151,29 @@ public class ScriptServiceTests extends ESTestCase { } } - public void testHiddenFileSkipped() throws IOException { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); + public void testHiddenScriptFileSkipped() throws IOException { buildScriptService(Settings.EMPTY); - logger.info("--> setup one hidden test file"); - Path testFileHidden = scriptsFilePath.resolve(".hidden_file"); - Path testRegularFile = scriptsFilePath.resolve("test_file.tst"); - Streams.copy("test_hidden_file".getBytes("UTF-8"), Files.newOutputStream(testFileHidden)); - Streams.copy("test_file".getBytes("UTF-8"), Files.newOutputStream(testRegularFile)); + Path testHiddenFile = scriptsFilePath.resolve(".hidden_file"); + Path testFile = scriptsFilePath.resolve("test_file.tst"); + Streams.copy("test_hidden_file".getBytes("UTF-8"), Files.newOutputStream(testHiddenFile)); + Streams.copy("test_file".getBytes("UTF-8"), Files.newOutputStream(testFile)); resourceWatcherService.notifyNow(); try { - logger.info("--> verify if hidden_file was skipped"); scriptService.compile(new Script("hidden_file", ScriptType.FILE, "test", null), - ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap()); + ScriptContext.Standard.SEARCH, Collections.emptyMap()); fail("the script hidden_file should not be processed"); } catch (IllegalArgumentException ex) { assertThat(ex.getMessage(), containsString("Unable to find on disk file script [hidden_file] using lang [test]")); } - logger.info("--> verify if test_file was correctly processed"); CompiledScript compiledScript = scriptService.compile(new Script("test_file", ScriptType.FILE, "test", null), - ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap()); + ScriptContext.Standard.SEARCH, Collections.emptyMap()); assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file")); - logger.info("--> delete hidden file"); - Files.delete(testFileHidden); - - logger.info("--> delete test file"); - Files.delete(testRegularFile); + Files.delete(testHiddenFile); + Files.delete(testFile); resourceWatcherService.notifyNow(); } From f65f84e0ef8cd557b41f37f39a6982d7e9a4c6dc Mon Sep 17 00:00:00 2001 From: Felipe Forbeck Date: Tue, 2 Feb 2016 09:46:30 -0200 Subject: [PATCH 038/320] Ignoring hidden script files and files with invalid names --- .../elasticsearch/script/ScriptService.java | 77 ++++++++++--------- .../script/ScriptServiceTests.java | 51 ++++++++---- 2 files changed, 75 insertions(+), 53 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index ae03bde50327..4d1cfed95442 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -42,7 +42,6 @@ import org.elasticsearch.common.cache.RemovalNotification; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -225,6 +224,8 @@ public class ScriptService extends AbstractComponent implements Closeable { return scriptEngineService; } + + /** * Checks if a script can be executed and compiles it if needed, or returns the previously compiled and cached script. */ @@ -355,6 +356,22 @@ public class ScriptService extends AbstractComponent implements Closeable { + scriptLang + "/" + id + "]"); } + Tuple getScriptNameExt(Path file) { + Path scriptPath = scriptsDirectory.relativize(file); + int extIndex = scriptPath.toString().lastIndexOf('.'); + if (extIndex <= 0) { + return null; + } + + String ext = scriptPath.toString().substring(extIndex + 1); + if (ext.isEmpty()) { + return null; + } + + String scriptName = scriptPath.toString().substring(0, extIndex).replace(scriptPath.getFileSystem().getSeparator(), "_"); + return new Tuple<>(scriptName, ext); + } + private void validate(BytesReference scriptBytes, String scriptLang) { try { XContentParser parser = XContentFactory.xContent(scriptBytes).createParser(scriptBytes); @@ -516,51 +533,37 @@ public class ScriptService extends AbstractComponent implements Closeable { private class ScriptChangesListener extends FileChangesListener { - private Tuple scriptNameExt(Path file) { - Path scriptPath = scriptsDirectory.relativize(file); - int extIndex = scriptPath.toString().lastIndexOf('.'); - if (extIndex != -1) { - String ext = scriptPath.toString().substring(extIndex + 1); - String scriptName = scriptPath.toString().substring(0, extIndex).replace(scriptPath.getFileSystem().getSeparator(), "_"); - return new Tuple<>(scriptName, ext); - } else { - return null; - } - } - @Override public void onFileInit(Path file) { - if (FileSystemUtils.isHidden(file)) { - logger.debug("Hidden script file skipped : [{}]", file); + Tuple scriptNameExt = getScriptNameExt(file); + if (scriptNameExt == null) { + logger.debug("Skipped script with invalid extension : [{}]", file); return; } if (logger.isTraceEnabled()) { logger.trace("Loading script file : [{}]", file); } - Tuple scriptNameExt = scriptNameExt(file); - if (scriptNameExt != null) { - ScriptEngineService engineService = getScriptEngineServiceForFileExt(scriptNameExt.v2()); - if (engineService == null) { - logger.warn("no script engine found for [{}]", scriptNameExt.v2()); - } else { - try { - //we don't know yet what the script will be used for, but if all of the operations for this lang - // with file scripts are disabled, it makes no sense to even compile it and cache it. - if (isAnyScriptContextEnabled(engineService.getTypes().get(0), engineService, ScriptType.FILE)) { - logger.info("compiling script file [{}]", file.toAbsolutePath()); - try(InputStreamReader reader = new InputStreamReader(Files.newInputStream(file), StandardCharsets.UTF_8)) { - String script = Streams.copyToString(reader); - CacheKey cacheKey = new CacheKey(engineService, scriptNameExt.v1(), null, Collections.emptyMap()); - staticCache.put(cacheKey, new CompiledScript(ScriptType.FILE, scriptNameExt.v1(), engineService.getTypes().get(0), engineService.compile(script, Collections.emptyMap()))); - scriptMetrics.onCompilation(); - } - } else { - logger.warn("skipping compile of script file [{}] as all scripted operations are disabled for file scripts", file.toAbsolutePath()); + ScriptEngineService engineService = getScriptEngineServiceForFileExt(scriptNameExt.v2()); + if (engineService == null) { + logger.warn("No script engine found for [{}]", scriptNameExt.v2()); + } else { + try { + //we don't know yet what the script will be used for, but if all of the operations for this lang + // with file scripts are disabled, it makes no sense to even compile it and cache it. + if (isAnyScriptContextEnabled(engineService.getTypes().get(0), engineService, ScriptType.FILE)) { + logger.info("compiling script file [{}]", file.toAbsolutePath()); + try(InputStreamReader reader = new InputStreamReader(Files.newInputStream(file), StandardCharsets.UTF_8)) { + String script = Streams.copyToString(reader); + CacheKey cacheKey = new CacheKey(engineService, scriptNameExt.v1(), null, Collections.emptyMap()); + staticCache.put(cacheKey, new CompiledScript(ScriptType.FILE, scriptNameExt.v1(), engineService.getTypes().get(0), engineService.compile(script, Collections.emptyMap()))); + scriptMetrics.onCompilation(); } - } catch (Throwable e) { - logger.warn("failed to load/compile script [{}]", e, scriptNameExt.v1()); + } else { + logger.warn("skipping compile of script file [{}] as all scripted operations are disabled for file scripts", file.toAbsolutePath()); } + } catch (Throwable e) { + logger.warn("failed to load/compile script [{}]", e, scriptNameExt.v1()); } } } @@ -572,7 +575,7 @@ public class ScriptService extends AbstractComponent implements Closeable { @Override public void onFileDeleted(Path file) { - Tuple scriptNameExt = scriptNameExt(file); + Tuple scriptNameExt = getScriptNameExt(file); if (scriptNameExt != null) { ScriptEngineService engineService = getScriptEngineServiceForFileExt(scriptNameExt.v2()); assert engineService != null; diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index e5965ed32974..60363b57b3bf 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.script; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -122,26 +123,21 @@ public class ScriptServiceTests extends ESTestCase { } public void testScriptsWithoutExtensions() throws IOException { - buildScriptService(Settings.EMPTY); - logger.info("--> setup two test files one with extension and another without"); Path testFileNoExt = scriptsFilePath.resolve("test_no_ext"); Path testFileWithExt = scriptsFilePath.resolve("test_script.tst"); Streams.copy("test_file_no_ext".getBytes("UTF-8"), Files.newOutputStream(testFileNoExt)); Streams.copy("test_file".getBytes("UTF-8"), Files.newOutputStream(testFileWithExt)); resourceWatcherService.notifyNow(); - logger.info("--> verify that file with extension was correctly processed"); CompiledScript compiledScript = scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null), ScriptContext.Standard.SEARCH, Collections.emptyMap()); assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file")); - logger.info("--> delete both files"); Files.delete(testFileNoExt); Files.delete(testFileWithExt); resourceWatcherService.notifyNow(); - logger.info("--> verify that file with extension was correctly removed"); try { scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null), ScriptContext.Standard.SEARCH, Collections.emptyMap()); @@ -151,29 +147,52 @@ public class ScriptServiceTests extends ESTestCase { } } - public void testHiddenScriptFileSkipped() throws IOException { + public void testInvalidScriptNames() throws IOException { + buildScriptService(Settings.EMPTY); + + Path testHiddenFile = scriptsFilePath.resolve(".hidden_file"); + assertThat(scriptService.getScriptNameExt(testHiddenFile), org.hamcrest.Matchers.nullValue()); + + Path testWithoutName = scriptsFilePath.resolve(""); + assertThat(scriptService.getScriptNameExt(testWithoutName), org.hamcrest.Matchers.nullValue()); + + Path testDotName = scriptsFilePath.resolve("."); + assertThat(scriptService.getScriptNameExt(testDotName), org.hamcrest.Matchers.nullValue()); + + Path testWithoutExtension = scriptsFilePath.resolve("test."); + assertThat(scriptService.getScriptNameExt(testWithoutExtension), org.hamcrest.Matchers.nullValue()); + + Path testNameOnly = scriptsFilePath.resolve("test"); + assertThat(scriptService.getScriptNameExt(testNameOnly), org.hamcrest.Matchers.nullValue()); + } + + public void testValidScriptName() throws IOException { + buildScriptService(Settings.EMPTY); + + Path testTestFile = scriptsFilePath.resolve("test.ext"); + Tuple scriptNameExt = scriptService.getScriptNameExt(testTestFile); + assertThat(scriptNameExt.v1(), org.hamcrest.Matchers.equalTo("test")); + assertThat(scriptNameExt.v2(), org.hamcrest.Matchers.equalTo("ext")); + } + + public void testScriptChangesListenerOnceHiddenFileDetected() throws IOException { buildScriptService(Settings.EMPTY); Path testHiddenFile = scriptsFilePath.resolve(".hidden_file"); - Path testFile = scriptsFilePath.resolve("test_file.tst"); Streams.copy("test_hidden_file".getBytes("UTF-8"), Files.newOutputStream(testHiddenFile)); - Streams.copy("test_file".getBytes("UTF-8"), Files.newOutputStream(testFile)); resourceWatcherService.notifyNow(); try { - scriptService.compile(new Script("hidden_file", ScriptType.FILE, "test", null), + String invalidScriptName = ""; + scriptService.compile(new Script(invalidScriptName, ScriptType.FILE, "test", null), ScriptContext.Standard.SEARCH, Collections.emptyMap()); - fail("the script hidden_file should not be processed"); + fail("the script .hidden_file should not be processed"); } catch (IllegalArgumentException ex) { - assertThat(ex.getMessage(), containsString("Unable to find on disk file script [hidden_file] using lang [test]")); + //script without name because it is a hidden file + assertThat(ex.getMessage(), containsString("Unable to find on disk file script [] using lang [test]")); } - CompiledScript compiledScript = scriptService.compile(new Script("test_file", ScriptType.FILE, "test", null), - ScriptContext.Standard.SEARCH, Collections.emptyMap()); - assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file")); - Files.delete(testHiddenFile); - Files.delete(testFile); resourceWatcherService.notifyNow(); } From 95a76a5921ff180999db68e2b4e775dc8fa92d44 Mon Sep 17 00:00:00 2001 From: Felipe Forbeck Date: Tue, 1 Mar 2016 08:37:40 -0300 Subject: [PATCH 039/320] testing script compiled once dot files detected --- .../elasticsearch/script/ScriptService.java | 34 +++++++------- .../script/ScriptServiceTests.java | 47 ++++--------------- 2 files changed, 25 insertions(+), 56 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index 4d1cfed95442..cfc402dbb04b 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -356,22 +356,6 @@ public class ScriptService extends AbstractComponent implements Closeable { + scriptLang + "/" + id + "]"); } - Tuple getScriptNameExt(Path file) { - Path scriptPath = scriptsDirectory.relativize(file); - int extIndex = scriptPath.toString().lastIndexOf('.'); - if (extIndex <= 0) { - return null; - } - - String ext = scriptPath.toString().substring(extIndex + 1); - if (ext.isEmpty()) { - return null; - } - - String scriptName = scriptPath.toString().substring(0, extIndex).replace(scriptPath.getFileSystem().getSeparator(), "_"); - return new Tuple<>(scriptName, ext); - } - private void validate(BytesReference scriptBytes, String scriptLang) { try { XContentParser parser = XContentFactory.xContent(scriptBytes).createParser(scriptBytes); @@ -533,6 +517,22 @@ public class ScriptService extends AbstractComponent implements Closeable { private class ScriptChangesListener extends FileChangesListener { + private Tuple getScriptNameExt(Path file) { + Path scriptPath = scriptsDirectory.relativize(file); + int extIndex = scriptPath.toString().lastIndexOf('.'); + if (extIndex <= 0) { + return null; + } + + String ext = scriptPath.toString().substring(extIndex + 1); + if (ext.isEmpty()) { + return null; + } + + String scriptName = scriptPath.toString().substring(0, extIndex).replace(scriptPath.getFileSystem().getSeparator(), "_"); + return new Tuple<>(scriptName, ext); + } + @Override public void onFileInit(Path file) { Tuple scriptNameExt = getScriptNameExt(file); @@ -553,7 +553,7 @@ public class ScriptService extends AbstractComponent implements Closeable { // with file scripts are disabled, it makes no sense to even compile it and cache it. if (isAnyScriptContextEnabled(engineService.getTypes().get(0), engineService, ScriptType.FILE)) { logger.info("compiling script file [{}]", file.toAbsolutePath()); - try(InputStreamReader reader = new InputStreamReader(Files.newInputStream(file), StandardCharsets.UTF_8)) { + try (InputStreamReader reader = new InputStreamReader(Files.newInputStream(file), StandardCharsets.UTF_8)) { String script = Streams.copyToString(reader); CacheKey cacheKey = new CacheKey(engineService, scriptNameExt.v1(), null, Collections.emptyMap()); staticCache.put(cacheKey, new CompiledScript(ScriptType.FILE, scriptNameExt.v1(), engineService.getTypes().get(0), engineService.compile(script, Collections.emptyMap()))); diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 60363b57b3bf..a369b44e2b16 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.script; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -147,52 +146,22 @@ public class ScriptServiceTests extends ESTestCase { } } - public void testInvalidScriptNames() throws IOException { - buildScriptService(Settings.EMPTY); - - Path testHiddenFile = scriptsFilePath.resolve(".hidden_file"); - assertThat(scriptService.getScriptNameExt(testHiddenFile), org.hamcrest.Matchers.nullValue()); - - Path testWithoutName = scriptsFilePath.resolve(""); - assertThat(scriptService.getScriptNameExt(testWithoutName), org.hamcrest.Matchers.nullValue()); - - Path testDotName = scriptsFilePath.resolve("."); - assertThat(scriptService.getScriptNameExt(testDotName), org.hamcrest.Matchers.nullValue()); - - Path testWithoutExtension = scriptsFilePath.resolve("test."); - assertThat(scriptService.getScriptNameExt(testWithoutExtension), org.hamcrest.Matchers.nullValue()); - - Path testNameOnly = scriptsFilePath.resolve("test"); - assertThat(scriptService.getScriptNameExt(testNameOnly), org.hamcrest.Matchers.nullValue()); - } - - public void testValidScriptName() throws IOException { - buildScriptService(Settings.EMPTY); - - Path testTestFile = scriptsFilePath.resolve("test.ext"); - Tuple scriptNameExt = scriptService.getScriptNameExt(testTestFile); - assertThat(scriptNameExt.v1(), org.hamcrest.Matchers.equalTo("test")); - assertThat(scriptNameExt.v2(), org.hamcrest.Matchers.equalTo("ext")); - } - - public void testScriptChangesListenerOnceHiddenFileDetected() throws IOException { + public void testScriptCompiledOnceHiddenFileDetected() throws IOException { buildScriptService(Settings.EMPTY); Path testHiddenFile = scriptsFilePath.resolve(".hidden_file"); Streams.copy("test_hidden_file".getBytes("UTF-8"), Files.newOutputStream(testHiddenFile)); + + Path testFileScript = scriptsFilePath.resolve("file_script.tst"); + Streams.copy("test_file_script".getBytes("UTF-8"), Files.newOutputStream(testFileScript)); resourceWatcherService.notifyNow(); - try { - String invalidScriptName = ""; - scriptService.compile(new Script(invalidScriptName, ScriptType.FILE, "test", null), - ScriptContext.Standard.SEARCH, Collections.emptyMap()); - fail("the script .hidden_file should not be processed"); - } catch (IllegalArgumentException ex) { - //script without name because it is a hidden file - assertThat(ex.getMessage(), containsString("Unable to find on disk file script [] using lang [test]")); - } + CompiledScript compiledScript = scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), + ScriptContext.Standard.SEARCH, Collections.emptyMap()); + assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file_script")); Files.delete(testHiddenFile); + Files.delete(testFileScript); resourceWatcherService.notifyNow(); } From be8ed737bc3af18d2ae4c9ad332e32900911a85a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 22 Feb 2016 11:46:47 -0800 Subject: [PATCH 040/320] Suggest: Move name of suggestion to SuggestBuilder Currently each suggestion keeps track of its own name. This has the disadvantage of having to pass down the parsed name property in the suggestions fromXContent() and the serialization methods as an argument, since we need it in the ctor. This change moves the naming of the suggestions to the surrounding SuggestBuilder and by this eliminates the need for passind down the names in the parsing and serialization methods. By making `name` a required argument in SuggestBuilder#addSuggestion() we also make sure it is always set and prevent using the same name twice, which wasn't possible before. --- .../action/search/SearchRequestBuilder.java | 4 +- .../action/suggest/SuggestRequestBuilder.java | 6 +- .../rest/action/search/RestSearchAction.java | 6 +- .../search/suggest/SuggestBuilder.java | 53 ++-- .../search/suggest/SuggestBuilders.java | 15 +- .../search/suggest/SuggestionBuilder.java | 36 +-- .../CompletionSuggestionBuilder.java | 12 +- .../phrase/PhraseSuggestionBuilder.java | 15 +- .../suggest/term/TermSuggestionBuilder.java | 26 +- .../index/suggest/stats/SuggestStatsIT.java | 4 +- .../indices/IndicesOptionsIntegrationIT.java | 2 +- .../builder/SearchSourceBuilderTests.java | 9 +- .../AbstractSuggestionBuilderTestCase.java | 6 +- .../suggest/CompletionSuggestSearchIT.java | 104 +++---- .../ContextCompletionSuggestSearchIT.java | 51 +-- .../suggest/CustomSuggesterSearchIT.java | 15 +- .../search/suggest/SuggestBuilderTests.java | 26 +- .../CompletionSuggesterBuilderTests.java | 5 +- .../phrase/PhraseSuggestionBuilderTests.java | 2 +- .../term/TermSuggestionBuilderTests.java | 6 +- .../messy/tests/SuggestSearchTests.java | 294 +++++++++--------- 21 files changed, 341 insertions(+), 356 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index ffca87de22cc..931df24a2560 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -393,9 +393,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder suggestion) { - suggest.addSuggestion(suggestion); + public SuggestRequestBuilder addSuggestion(String name, SuggestionBuilder suggestion) { + suggest.addSuggestion(name, suggestion); return this; } diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index bd014bccde24..76d1dbf9d487 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -48,6 +48,7 @@ import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.SuggestBuilder; +import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; import java.io.IOException; import java.util.Arrays; @@ -56,7 +57,6 @@ import static org.elasticsearch.common.unit.TimeValue.parseTimeValue; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion; -import static org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; /** * @@ -255,8 +255,8 @@ public class RestSearchAction extends BaseRestHandler { String suggestText = request.param("suggest_text", request.param("q")); int suggestSize = request.paramAsInt("suggest_size", 5); String suggestMode = request.param("suggest_mode"); - searchSourceBuilder.suggest(new SuggestBuilder().addSuggestion( - termSuggestion(suggestField).field(suggestField) + searchSourceBuilder.suggest(new SuggestBuilder().addSuggestion(suggestField, + termSuggestion().field(suggestField) .text(suggestText).size(suggestSize) .suggestMode(SuggestMode.resolve(suggestMode)))); } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java index 34880d51af82..f8a8ccdf685a 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java @@ -34,8 +34,9 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; import java.util.Objects; /** @@ -51,10 +52,7 @@ public class SuggestBuilder extends ToXContentToBytes implements Writeable> suggestions = new ArrayList<>(); - - public SuggestBuilder() { - } + private final Map> suggestions = new HashMap<>(); /** * Sets the text to provide suggestions for. The suggest text is a required option that needs @@ -79,23 +77,23 @@ public class SuggestBuilder extends ToXContentToBytes implements WriteableSuggestions are added, is the same as in the response. + * @throws IllegalArgumentException if two suggestions added have the same name */ - public SuggestBuilder addSuggestion(SuggestionBuilder suggestion) { - suggestions.add(suggestion); + public SuggestBuilder addSuggestion(String name, SuggestionBuilder suggestion) { + Objects.requireNonNull(name, "every suggestion needs a name"); + if (suggestions.get(name) == null) { + suggestions.put(name, suggestion); + } else { + throw new IllegalArgumentException("already added another suggestion with name [" + name + "]"); + } return this; } /** - * Get the Suggestions that were added to the globat {@link SuggestBuilder} + * Get all the Suggestions that were added to the global {@link SuggestBuilder}, + * together with their names */ - public List> getSuggestions() { - return suggestions; - } - - /** - * Returns all suggestions with the defined names. - */ - public List> getSuggestion() { + public Map> getSuggestions() { return suggestions; } @@ -105,8 +103,10 @@ public class SuggestBuilder extends ToXContentToBytes implements Writeable suggestion : suggestions) { - builder = suggestion.toXContent(builder, params); + for (Entry> suggestion : suggestions.entrySet()) { + builder.startObject(suggestion.getKey()); + suggestion.getValue().toXContent(builder, params); + builder.endObject(); } builder.endObject(); return builder; @@ -133,7 +133,7 @@ public class SuggestBuilder extends ToXContentToBytes implements Writeable suggestionBuilder : suggestions) { - SuggestionContext suggestionContext = suggestionBuilder.build(context); + for (Entry> suggestion : suggestions.entrySet()) { + SuggestionContext suggestionContext = suggestion.getValue().build(context); if (suggestionContext.getText() == null) { if (globalText == null) { throw new IllegalArgumentException("The required text option is missing"); } suggestionContext.setText(BytesRefs.toBytesRef(globalText)); } - suggestionSearchContext.addSuggestion(suggestionBuilder.name(), suggestionContext); + suggestionSearchContext.addSuggestion(suggestion.getKey(), suggestionContext); } return suggestionSearchContext; } @@ -162,7 +162,7 @@ public class SuggestBuilder extends ToXContentToBytes implements Writeable> suggestion : suggestions.entrySet()) { + out.writeString(suggestion.getKey()); + out.writeSuggestion(suggestion.getValue()); } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilders.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilders.java index 66b917394ffc..33b32744f52f 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilders.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilders.java @@ -31,33 +31,30 @@ public abstract class SuggestBuilders { /** * Creates a term suggestion lookup query with the provided name * - * @param name The suggestion name * @return a {@link org.elasticsearch.search.suggest.term.TermSuggestionBuilder} * instance */ - public static TermSuggestionBuilder termSuggestion(String name) { - return new TermSuggestionBuilder(name); + public static TermSuggestionBuilder termSuggestion() { + return new TermSuggestionBuilder(); } /** * Creates a phrase suggestion lookup query with the provided name * - * @param name The suggestion name * @return a {@link org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder} * instance */ - public static PhraseSuggestionBuilder phraseSuggestion(String name) { - return new PhraseSuggestionBuilder(name); + public static PhraseSuggestionBuilder phraseSuggestion() { + return new PhraseSuggestionBuilder(); } /** * Creates a completion suggestion lookup query with the provided name * - * @param name The suggestion name * @return a {@link org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder} * instance */ - public static CompletionSuggestionBuilder completionSuggestion(String name) { - return new CompletionSuggestionBuilder(name); + public static CompletionSuggestionBuilder completionSuggestion() { + return new CompletionSuggestionBuilder(); } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java index 628ab3f7c4f8..5291be775a66 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java @@ -43,7 +43,6 @@ import java.util.Objects; */ public abstract class SuggestionBuilder> extends ToXContentToBytes implements NamedWriteable { - protected final String name; // TODO this seems mandatory and should be constructor arg protected String fieldname; protected String text; @@ -61,18 +60,6 @@ public abstract class SuggestionBuilder> extends protected static final ParseField SIZE_FIELD = new ParseField("size"); protected static final ParseField SHARDSIZE_FIELD = new ParseField("shard_size"); - public SuggestionBuilder(String name) { - Objects.requireNonNull(name, "Suggester 'name' cannot be null"); - this.name = name; - } - - /** - * get the name for this suggestion - */ - public String name() { - return this.name; - } - /** * Same as in {@link SuggestBuilder#setGlobalText(String)}, but in the suggestion scope. */ @@ -117,7 +104,6 @@ public abstract class SuggestionBuilder> extends @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(name); if (text != null) { builder.field(TEXT_FIELD.getPreferredName(), text); } @@ -143,13 +129,12 @@ public abstract class SuggestionBuilder> extends builder = innerToXContent(builder, params); builder.endObject(); - builder.endObject(); return builder; } protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException; - public static SuggestionBuilder fromXContent(QueryParseContext parseContext, String suggestionName, Suggesters suggesters) + public static SuggestionBuilder fromXContent(QueryParseContext parseContext, Suggesters suggesters) throws IOException { XContentParser parser = parseContext.parser(); ParseFieldMatcher parsefieldMatcher = parseContext.parseFieldMatcher(); @@ -174,14 +159,11 @@ public abstract class SuggestionBuilder> extends throw new IllegalArgumentException("[suggestion] does not support [" + fieldName + "]"); } } else if (token == XContentParser.Token.START_OBJECT) { - if (suggestionName == null) { - throw new IllegalArgumentException("Suggestion must have name"); - } SuggestionBuilder suggestParser = suggesters.getSuggestionPrototype(fieldName); if (suggestParser == null) { throw new IllegalArgumentException("Suggester[" + fieldName + "] not supported"); } - suggestionBuilder = suggestParser.innerFromXContent(parseContext, suggestionName); + suggestionBuilder = suggestParser.innerFromXContent(parseContext); } } if (suggestText != null) { @@ -196,7 +178,7 @@ public abstract class SuggestionBuilder> extends return suggestionBuilder; } - protected abstract SuggestionBuilder innerFromXContent(QueryParseContext parseContext, String name) throws IOException; + protected abstract SuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException; public SuggestionContext build(QueryShardContext context) throws IOException { SuggestionContext suggestionContext = innerBuild(context); @@ -357,8 +339,7 @@ public abstract class SuggestionBuilder> extends @Override public final T readFrom(StreamInput in) throws IOException { - String name = in.readString(); - T suggestionBuilder = doReadFrom(in, name); + T suggestionBuilder = doReadFrom(in); suggestionBuilder.fieldname = in.readOptionalString(); suggestionBuilder.text = in.readOptionalString(); suggestionBuilder.prefix = in.readOptionalString(); @@ -372,13 +353,11 @@ public abstract class SuggestionBuilder> extends /** * Subclass should return a new instance, reading itself from the input string * @param in the input string to read from - * @param name the name of the suggestion (read from stream by {@link SuggestionBuilder} */ - protected abstract T doReadFrom(StreamInput in, String name) throws IOException; + protected abstract T doReadFrom(StreamInput in) throws IOException; @Override public final void writeTo(StreamOutput out) throws IOException { - out.writeString(name); doWriteTo(out); out.writeOptionalString(fieldname); out.writeOptionalString(text); @@ -401,8 +380,7 @@ public abstract class SuggestionBuilder> extends } @SuppressWarnings("unchecked") T other = (T) obj; - return Objects.equals(name, other.name()) && - Objects.equals(text, other.text()) && + return Objects.equals(text, other.text()) && Objects.equals(prefix, other.prefix()) && Objects.equals(regex, other.regex()) && Objects.equals(fieldname, other.field()) && @@ -419,7 +397,7 @@ public abstract class SuggestionBuilder> extends @Override public final int hashCode() { - return Objects.hash(name, text, prefix, regex, fieldname, analyzer, size, shardSize, doHashCode()); + return Objects.hash(text, prefix, regex, fieldname, analyzer, size, shardSize, doHashCode()); } /** diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 3c7bf0850204..d8c7ededb954 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -50,7 +50,7 @@ import java.util.Set; */ public class CompletionSuggestionBuilder extends SuggestionBuilder { - public static final CompletionSuggestionBuilder PROTOTYPE = new CompletionSuggestionBuilder("_na_"); // name doesn't matter + public static final CompletionSuggestionBuilder PROTOTYPE = new CompletionSuggestionBuilder(); static final String SUGGESTION_NAME = "completion"; static final ParseField PAYLOAD_FIELD = new ParseField("payload"); static final ParseField CONTEXTS_FIELD = new ParseField("contexts", "context"); @@ -60,10 +60,6 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder> queryContexts = new HashMap<>(); private final Set payloadFields = new HashSet<>(); - public CompletionSuggestionBuilder(String name) { - super(name); - } - /** * Sets the prefix to provide completions for. * The prefix gets analyzed by the suggest analyzer. @@ -179,7 +175,7 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder> generators = new HashMap<>(); - public PhraseSuggestionBuilder(String name) { - super(name); - } - /** * Sets the gram size for the n-gram model used for this suggester. The * default value is 1 corresponding to unigrams. Use @@ -398,9 +393,9 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder { - public static final TermSuggestionBuilder PROTOTYPE = new TermSuggestionBuilder("_na_"); // name doesn't matter + public static final TermSuggestionBuilder PROTOTYPE = new TermSuggestionBuilder(); private static final String SUGGESTION_NAME = "term"; private SuggestMode suggestMode = SuggestMode.MISSING; @@ -81,14 +81,6 @@ public class TermSuggestionBuilder extends SuggestionBuilder { /** Only suggest terms in the suggest text that aren't in the index. This is the default. */ MISSING { + @Override public org.apache.lucene.search.spell.SuggestMode toLucene() { return org.apache.lucene.search.spell.SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX; } }, /** Only suggest terms that occur in more docs then the original suggest text term. */ POPULAR { + @Override public org.apache.lucene.search.spell.SuggestMode toLucene() { return org.apache.lucene.search.spell.SuggestMode.SUGGEST_MORE_POPULAR; } }, /** Suggest any matching suggest terms based on tokens in the suggest text. */ ALWAYS { + @Override public org.apache.lucene.search.spell.SuggestMode toLucene() { return org.apache.lucene.search.spell.SuggestMode.SUGGEST_ALWAYS; } @@ -516,30 +511,35 @@ public class TermSuggestionBuilder extends SuggestionBuilderdamerau_levenshtein, but highly optimized * for comparing string distance for terms inside the index. */ INTERNAL { + @Override public StringDistance toLucene() { return DirectSpellChecker.INTERNAL_LEVENSHTEIN; } }, /** String distance algorithm based on Damerau-Levenshtein algorithm. */ DAMERAU_LEVENSHTEIN { + @Override public StringDistance toLucene() { return new LuceneLevenshteinDistance(); } }, /** String distance algorithm based on Levenstein edit distance algorithm. */ LEVENSTEIN { + @Override public StringDistance toLucene() { return new LevensteinDistance(); } }, /** String distance algorithm based on Jaro-Winkler algorithm. */ JAROWINKLER { + @Override public StringDistance toLucene() { return new JaroWinklerDistance(); } }, /** String distance algorithm based on character n-grams. */ NGRAM { + @Override public StringDistance toLucene() { return new NGramDistance(); } diff --git a/core/src/test/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java b/core/src/test/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java index 23c9e223972c..da10fb22b8ad 100644 --- a/core/src/test/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java +++ b/core/src/test/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java @@ -139,9 +139,9 @@ public class SuggestStatsIT extends ESIntegTestCase { private SuggestRequestBuilder addSuggestions(SuggestRequestBuilder request, int i) { for (int s = 0; s < randomIntBetween(2, 10); s++) { if (randomBoolean()) { - request.addSuggestion(new PhraseSuggestionBuilder("s" + s).field("f").text("test" + i + " test" + (i - 1))); + request.addSuggestion("s" + s, new PhraseSuggestionBuilder().field("f").text("test" + i + " test" + (i - 1))); } else { - request.addSuggestion(new TermSuggestionBuilder("s" + s).field("f").text("test" + i)); + request.addSuggestion("s" + s, new TermSuggestionBuilder().field("f").text("test" + i)); } } return request; diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index 1d7d833c5000..cd254e8eb57b 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -749,7 +749,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { } private static SuggestRequestBuilder suggest(String... indices) { - return client().prepareSuggest(indices).addSuggestion(SuggestBuilders.termSuggestion("name").field("a")); + return client().prepareSuggest(indices).addSuggestion("name", SuggestBuilders.termSuggestion().field("a")); } private static GetAliasesRequestBuilder getAliases(String... indices) { diff --git a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index b18aa8e2012b..b1767e5d0d81 100644 --- a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -49,15 +49,9 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.EnvironmentModule; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.AbstractQueryTestCase; -import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.EmptyQueryBuilder; -import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryRewriteContext; -import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.index.query.WrapperQueryBuilder; -import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -417,7 +411,8 @@ public class SearchSourceBuilderTests extends ESTestCase { if (randomBoolean()) { // NORELEASE need a random suggest builder method builder.suggest(new SuggestBuilder().setGlobalText(randomAsciiOfLengthBetween(1, 5)).addSuggestion( - SuggestBuilders.termSuggestion(randomAsciiOfLengthBetween(1, 5)))); + randomAsciiOfLengthBetween(1, 5), + SuggestBuilders.termSuggestion())); } if (randomBoolean()) { // NORELEASE need a random inner hits builder method diff --git a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 1b76c2d895ef..45d8131199fe 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -204,10 +204,8 @@ public abstract class AbstractSuggestionBuilderTestCase secondSuggestionBuilder = SuggestionBuilder.fromXContent(context, suggestionBuilder.name(), suggesters); + SuggestionBuilder secondSuggestionBuilder = SuggestionBuilder.fromXContent(context, suggesters); assertNotSame(suggestionBuilder, secondSuggestionBuilder); assertEquals(suggestionBuilder, secondSuggestionBuilder); assertEquals(suggestionBuilder.hashCode(), secondSuggestionBuilder.hashCode()); @@ -253,7 +251,7 @@ public abstract class AbstractSuggestionBuilderTestCase options = completionSuggestion.getEntries().get(0).getOptions(); @@ -334,9 +334,9 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { payloadFields.add("test_field" + i); } - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg") + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg") .size(suggestionSize).payload(payloadFields); - SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(prefix).execute().actionGet(); + SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", prefix).execute().actionGet(); assertNoFailures(suggestResponse); CompletionSuggestion completionSuggestion = suggestResponse.getSuggest().getSuggestion("foo"); CompletionSuggestion.Entry options = completionSuggestion.getEntries().get(0); @@ -434,8 +434,8 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { refresh(); - SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - new CompletionSuggestionBuilder("testSuggestions").field(FIELD).text("test").size(10) + SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("testSuggestions", + new CompletionSuggestionBuilder().field(FIELD).text("test").size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, "testSuggestions", "testing"); @@ -635,16 +635,16 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { .get(); assertThat(putMappingResponse.isAcknowledged(), is(true)); - SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("suggs").field(FIELD + ".suggest").text("f").size(10) + SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("suggs", + SuggestBuilders.completionSuggestion().field(FIELD + ".suggest").text("f").size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, "suggs"); client().prepareIndex(INDEX, TYPE, "1").setRefresh(true).setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject()).get(); ensureGreen(INDEX); - SuggestResponse afterReindexingResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("suggs").field(FIELD + ".suggest").text("f").size(10) + SuggestResponse afterReindexingResponse = client().prepareSuggest(INDEX).addSuggestion("suggs", + SuggestBuilders.completionSuggestion().field(FIELD + ".suggest").text("f").size(10) ).execute().actionGet(); assertSuggestions(afterReindexingResponse, "suggs", "Foo Fighters"); } @@ -660,13 +660,13 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { refresh(); - SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nirv").size(10) + SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", + SuggestBuilders.completionSuggestion().field(FIELD).prefix("Nirv").size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "Nirvana"); - suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nirw", Fuzziness.ONE).size(10) + suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", + SuggestBuilders.completionSuggestion().field(FIELD).prefix("Nirw", Fuzziness.ONE).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "Nirvana"); } @@ -683,14 +683,14 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { refresh(); // edit distance 1 - SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Norw", Fuzziness.ONE).size(10) + SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", + SuggestBuilders.completionSuggestion().field(FIELD).prefix("Norw", Fuzziness.ONE).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo"); // edit distance 2 - suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Norw", Fuzziness.TWO).size(10) + suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", + SuggestBuilders.completionSuggestion().field(FIELD).prefix("Norw", Fuzziness.TWO).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "Nirvana"); } @@ -706,13 +706,13 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { refresh(); - SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nriv", FuzzyOptions.builder().setTranspositions(false).build()).size(10) + SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", + SuggestBuilders.completionSuggestion().field(FIELD).prefix("Nriv", FuzzyOptions.builder().setTranspositions(false).build()).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo"); - suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nriv", Fuzziness.ONE).size(10) + suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", + SuggestBuilders.completionSuggestion().field(FIELD).prefix("Nriv", Fuzziness.ONE).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "Nirvana"); } @@ -728,13 +728,13 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { refresh(); - SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nriva", FuzzyOptions.builder().setFuzzyMinLength(6).build()).size(10) + SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", + SuggestBuilders.completionSuggestion().field(FIELD).prefix("Nriva", FuzzyOptions.builder().setFuzzyMinLength(6).build()).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo"); - suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nrivan", FuzzyOptions.builder().setFuzzyMinLength(6).build()).size(10) + suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", + SuggestBuilders.completionSuggestion().field(FIELD).prefix("Nrivan", FuzzyOptions.builder().setFuzzyMinLength(6).build()).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "Nirvana"); } @@ -750,13 +750,13 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { refresh(); - SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nirw", FuzzyOptions.builder().setFuzzyPrefixLength(4).build()).size(10) + SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", + SuggestBuilders.completionSuggestion().field(FIELD).prefix("Nirw", FuzzyOptions.builder().setFuzzyPrefixLength(4).build()).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo"); - suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nirvo", FuzzyOptions.builder().setFuzzyPrefixLength(4).build()).size(10) + suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", + SuggestBuilders.completionSuggestion().field(FIELD).prefix("Nirvo", FuzzyOptions.builder().setFuzzyPrefixLength(4).build()).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "Nirvana"); } @@ -774,19 +774,19 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { // suggestion with a character, which needs unicode awareness org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder completionSuggestionBuilder = - SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("öööи", FuzzyOptions.builder().setUnicodeAware(true).build()).size(10); + SuggestBuilders.completionSuggestion().field(FIELD).prefix("öööи", FuzzyOptions.builder().setUnicodeAware(true).build()).size(10); - SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(completionSuggestionBuilder).execute().actionGet(); + SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", completionSuggestionBuilder).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "ööööö"); // removing unicode awareness leads to no result - completionSuggestionBuilder = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("öööи", FuzzyOptions.builder().setUnicodeAware(false).build()).size(10); - suggestResponse = client().prepareSuggest(INDEX).addSuggestion(completionSuggestionBuilder).execute().actionGet(); + completionSuggestionBuilder = SuggestBuilders.completionSuggestion().field(FIELD).prefix("öööи", FuzzyOptions.builder().setUnicodeAware(false).build()).size(10); + suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", completionSuggestionBuilder).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo"); // increasing edit distance instead of unicode awareness works again, as this is only a single character - completionSuggestionBuilder = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("öööи", FuzzyOptions.builder().setUnicodeAware(false).setFuzziness(Fuzziness.TWO).build()).size(10); - suggestResponse = client().prepareSuggest(INDEX).addSuggestion(completionSuggestionBuilder).execute().actionGet(); + completionSuggestionBuilder = SuggestBuilders.completionSuggestion().field(FIELD).prefix("öööи", FuzzyOptions.builder().setUnicodeAware(false).setFuzziness(Fuzziness.TWO).build()).size(10); + suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", completionSuggestionBuilder).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "ööööö"); } @@ -815,8 +815,8 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { refresh(); ensureGreen(); // load the fst index into ram - client().prepareSuggest(INDEX).addSuggestion(SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("f")).get(); - client().prepareSuggest(INDEX).addSuggestion(SuggestBuilders.completionSuggestion("foo").field(otherField).prefix("f")).get(); + client().prepareSuggest(INDEX).addSuggestion("foo", SuggestBuilders.completionSuggestion().field(FIELD).prefix("f")).get(); + client().prepareSuggest(INDEX).addSuggestion("foo", SuggestBuilders.completionSuggestion().field(otherField).prefix("f")).get(); // Get all stats IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats(INDEX).setIndices(INDEX).setCompletion(true).get(); @@ -914,21 +914,21 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { public void assertSuggestions(String suggestionName, SuggestionBuilder suggestBuilder, String... suggestions) { - SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(suggestBuilder + SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(suggestionName, suggestBuilder ).execute().actionGet(); assertSuggestions(suggestResponse, suggestionName, suggestions); } public void assertSuggestions(String suggestion, String... suggestions) { String suggestionName = RandomStrings.randomAsciiOfLength(random(), 10); - CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion(suggestionName).field(FIELD).text(suggestion).size(10); + CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion().field(FIELD).text(suggestion).size(10); assertSuggestions(suggestionName, suggestionBuilder, suggestions); } public void assertSuggestionsNotInOrder(String suggestString, String... suggestions) { String suggestionName = RandomStrings.randomAsciiOfLength(random(), 10); - SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion( - SuggestBuilders.completionSuggestion(suggestionName).field(FIELD).text(suggestString).size(10) + SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(suggestionName, + SuggestBuilders.completionSuggestion().field(FIELD).text(suggestString).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, suggestionName, suggestions); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java index ee98973c5a27..bffba38dd15f 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.suggest; import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.apache.lucene.spatial.util.GeoHashUtils; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -89,7 +90,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); } @@ -121,7 +122,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).regex("sugg.*es"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).regex("sugg.*es"); assertSuggestions("foo", prefix, "sugg9estion", "sugg8estion", "sugg7estion", "sugg6estion", "sugg5estion"); } @@ -153,7 +154,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg", Fuzziness.ONE); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg", Fuzziness.ONE); assertSuggestions("foo", prefix, "sugxgestion9", "sugxgestion8", "sugxgestion7", "sugxgestion6", "sugxgestion5"); } @@ -178,7 +179,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg") + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg") .categoryContexts("cat", CategoryQueryContext.builder().setCategory("cat0").build()); assertSuggestions("foo", prefix, "suggestion8", "suggestion6", "suggestion4", "suggestion2", "suggestion0"); @@ -205,7 +206,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg") + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg") .categoryContexts("cat", CategoryQueryContext.builder().setCategory("cat0").setBoost(3).build(), CategoryQueryContext.builder().setCategory("cat1").build() @@ -235,7 +236,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); } @@ -265,17 +266,17 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { ensureYellow(INDEX); // filter only on context cat - CompletionSuggestionBuilder catFilterSuggest = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder catFilterSuggest = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); catFilterSuggest.categoryContexts("cat", CategoryQueryContext.builder().setCategory("cat0").build()); assertSuggestions("foo", catFilterSuggest, "suggestion8", "suggestion6", "suggestion4", "suggestion2", "suggestion0"); // filter only on context type - CompletionSuggestionBuilder typeFilterSuggest = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder typeFilterSuggest = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); typeFilterSuggest.categoryContexts("type", CategoryQueryContext.builder().setCategory("type2").build(), CategoryQueryContext.builder().setCategory("type1").build()); assertSuggestions("foo", typeFilterSuggest, "suggestion9", "suggestion6", "suggestion5", "suggestion2", "suggestion1"); - CompletionSuggestionBuilder multiContextFilterSuggest = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder multiContextFilterSuggest = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); // query context order should never matter if (randomBoolean()) { multiContextFilterSuggest.categoryContexts("type", CategoryQueryContext.builder().setCategory("type2").build()); @@ -313,21 +314,21 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { ensureYellow(INDEX); // boost only on context cat - CompletionSuggestionBuilder catBoostSuggest = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder catBoostSuggest = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); catBoostSuggest.categoryContexts("cat", CategoryQueryContext.builder().setCategory("cat0").setBoost(3).build(), CategoryQueryContext.builder().setCategory("cat1").build()); assertSuggestions("foo", catBoostSuggest, "suggestion8", "suggestion6", "suggestion4", "suggestion9", "suggestion2"); // boost only on context type - CompletionSuggestionBuilder typeBoostSuggest = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder typeBoostSuggest = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); typeBoostSuggest.categoryContexts("type", CategoryQueryContext.builder().setCategory("type2").setBoost(2).build(), CategoryQueryContext.builder().setCategory("type1").setBoost(4).build()); assertSuggestions("foo", typeBoostSuggest, "suggestion9", "suggestion5", "suggestion6", "suggestion1", "suggestion2"); // boost on both contexts - CompletionSuggestionBuilder multiContextBoostSuggest = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder multiContextBoostSuggest = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); // query context order should never matter if (randomBoolean()) { multiContextBoostSuggest.categoryContexts("type", @@ -374,7 +375,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); } @@ -405,7 +406,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); assertSuggestions("foo", prefix, "suggestion0", "suggestion1", "suggestion2", "suggestion3", "suggestion4"); } @@ -431,7 +432,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); } @@ -458,10 +459,10 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); - CompletionSuggestionBuilder geoFilteringPrefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg") + CompletionSuggestionBuilder geoFilteringPrefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg") .geoContexts("geo", GeoQueryContext.builder().setGeoPoint(new GeoPoint(geoPoints[0])).build()); assertSuggestions("foo", geoFilteringPrefix, "suggestion8", "suggestion6", "suggestion4", "suggestion2", "suggestion0"); @@ -490,12 +491,12 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); GeoQueryContext context1 = GeoQueryContext.builder().setGeoPoint(geoPoints[0]).setBoost(2).build(); GeoQueryContext context2 = GeoQueryContext.builder().setGeoPoint(geoPoints[1]).build(); - CompletionSuggestionBuilder geoBoostingPrefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg") + CompletionSuggestionBuilder geoBoostingPrefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg") .geoContexts("geo", context1, context2); assertSuggestions("foo", geoBoostingPrefix, "suggestion8", "suggestion6", "suggestion4", "suggestion9", "suggestion7"); @@ -526,7 +527,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg") + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg") .geoContexts("geo", GeoQueryContext.builder().setGeoPoint(new GeoPoint(52.2263, 4.543)).build()); assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); } @@ -564,10 +565,10 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); - CompletionSuggestionBuilder geoNeighbourPrefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg") + CompletionSuggestionBuilder geoNeighbourPrefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg") .geoContexts("geo", GeoQueryContext.builder().setGeoPoint(GeoPoint.fromGeohash(geohash)).build()); assertSuggestions("foo", geoNeighbourPrefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); @@ -624,16 +625,16 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { refresh(); String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(suggestionName).field(FIELD).text("h").size(10) + CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion().field(FIELD).text("h").size(10) .geoContexts("st", GeoQueryContext.builder().setGeoPoint(new GeoPoint(52.52, 13.4)).build()); - SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(context).get(); + SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(suggestionName, context).get(); assertEquals(suggestResponse.getSuggest().size(), 1); assertEquals("Hotel Amsterdam in Berlin", suggestResponse.getSuggest().getSuggestion(suggestionName).iterator().next().getOptions().iterator().next().getText().string()); } public void assertSuggestions(String suggestionName, SuggestionBuilder suggestBuilder, String... suggestions) { - SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(suggestBuilder + SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(suggestionName, suggestBuilder ).execute().actionGet(); CompletionSuggestSearchIT.assertSuggestions(suggestResponse, suggestionName, suggestions); } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java index c07d47dcf60b..ca8d4fa862e1 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java @@ -67,7 +67,7 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { String randomField = randomAsciiOfLength(10); String randomSuffix = randomAsciiOfLength(10); SuggestBuilder suggestBuilder = new SuggestBuilder(); - suggestBuilder.addSuggestion(new CustomSuggestionBuilder("someName", randomField, randomSuffix).text(randomText)); + suggestBuilder.addSuggestion("someName", new CustomSuggestionBuilder(randomField, randomSuffix).text(randomText)); SearchRequestBuilder searchRequestBuilder = client().prepareSearch("test").setTypes("test").setFrom(0).setSize(1) .suggest(suggestBuilder); @@ -84,13 +84,12 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { static class CustomSuggestionBuilder extends SuggestionBuilder { - public final static CustomSuggestionBuilder PROTOTYPE = new CustomSuggestionBuilder("_na_", "_na_", "_na_"); + public final static CustomSuggestionBuilder PROTOTYPE = new CustomSuggestionBuilder("_na_", "_na_"); private String randomField; private String randomSuffix; - public CustomSuggestionBuilder(String name, String randomField, String randomSuffix) { - super(name); + public CustomSuggestionBuilder(String randomField, String randomSuffix) { this.randomField = randomField; this.randomSuffix = randomSuffix; } @@ -114,8 +113,8 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { } @Override - public CustomSuggestionBuilder doReadFrom(StreamInput in, String name) throws IOException { - return new CustomSuggestionBuilder(in.readString(), in.readString(), in.readString()); + public CustomSuggestionBuilder doReadFrom(StreamInput in) throws IOException { + return new CustomSuggestionBuilder(in.readString(), in.readString()); } @Override @@ -130,10 +129,10 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { } @Override - protected CustomSuggestionBuilder innerFromXContent(QueryParseContext parseContext, String name) + protected CustomSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException { // TODO some parsing - return new CustomSuggestionBuilder(name, randomField, randomSuffix); + return new CustomSuggestionBuilder(randomField, randomSuffix); } @Override diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java index 5d327bb3e4c1..33745ae942da 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java @@ -42,6 +42,7 @@ import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import java.io.IOException; import java.util.Collections; +import java.util.Map.Entry; public class SuggestBuilderTests extends WritableTestCase { @@ -91,7 +92,7 @@ public class SuggestBuilderTests extends WritableTestCase { } int numberOfSuggestions = randomIntBetween(0, 5); for (int i = 0; i < numberOfSuggestions; i++) { - suggestBuilder.addSuggestion(PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder()); + suggestBuilder.addSuggestion(randomAsciiOfLength(10), PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder()); } return suggestBuilder; } @@ -99,17 +100,34 @@ public class SuggestBuilderTests extends WritableTestCase { @Override protected SuggestBuilder createMutation(SuggestBuilder original) throws IOException { SuggestBuilder mutation = new SuggestBuilder().setGlobalText(original.getGlobalText()); - for (SuggestionBuilder suggestionBuilder : original.getSuggestions()) { - mutation.addSuggestion(suggestionBuilder); + for (Entry> suggestionBuilder : original.getSuggestions().entrySet()) { + mutation.addSuggestion(suggestionBuilder.getKey(), suggestionBuilder.getValue()); } if (randomBoolean()) { mutation.setGlobalText(randomAsciiOfLengthBetween(5, 60)); } else { - mutation.addSuggestion(PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder()); + mutation.addSuggestion(randomAsciiOfLength(10), PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder()); } return mutation; } + public void testIllegalSuggestionName() { + try { + new SuggestBuilder().addSuggestion(null, PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder()); + fail("exception expected"); + } catch (NullPointerException e) { + assertEquals("every suggestion needs a name", e.getMessage()); + } + + try { + new SuggestBuilder().addSuggestion("my-suggest", PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder()) + .addSuggestion("my-suggest", PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder()); + fail("exception expected"); + } catch (IllegalArgumentException e) { + assertEquals("already added another suggestion with name [my-suggest]", e.getMessage()); + } + } + @Override protected SuggestBuilder readFrom(StreamInput in) throws IOException { return SuggestBuilder.PROTOTYPE.readFrom(in); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java index 9fc5988c07ce..58e719dcaa2e 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.suggest.completion; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; -import org.elasticsearch.search.suggest.SuggestionSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; @@ -30,7 +29,6 @@ import org.junit.BeforeClass; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -44,7 +42,7 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe @Override protected CompletionSuggestionBuilder randomSuggestionBuilder() { - CompletionSuggestionBuilder testBuilder = new CompletionSuggestionBuilder(randomAsciiOfLength(10)); + CompletionSuggestionBuilder testBuilder = new CompletionSuggestionBuilder(); switch (randomIntBetween(0, 3)) { case 0: testBuilder.prefix(randomAsciiOfLength(10)); @@ -96,6 +94,7 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe // skip for now } + @Override protected void mutateSpecificParameters(CompletionSuggestionBuilder builder) throws IOException { switch (randomIntBetween(0, 5)) { case 0: diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java index 2a553ef8cb9f..cedbb91fc615 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java @@ -47,7 +47,7 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC } public static PhraseSuggestionBuilder randomPhraseSuggestionBuilder() { - PhraseSuggestionBuilder testBuilder = new PhraseSuggestionBuilder(randomAsciiOfLength(10)); + PhraseSuggestionBuilder testBuilder = new PhraseSuggestionBuilder(); maybeSet(testBuilder::maxErrors, randomFloat()); maybeSet(testBuilder::separator, randomAsciiOfLengthBetween(1, 10)); maybeSet(testBuilder::realWordErrorLikelihood, randomFloat()); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java index dfa0ab4bf609..ee0e92279d73 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java @@ -40,7 +40,7 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas */ @Override protected TermSuggestionBuilder randomSuggestionBuilder() { - TermSuggestionBuilder testBuilder = new TermSuggestionBuilder(randomAsciiOfLength(10)); + TermSuggestionBuilder testBuilder = new TermSuggestionBuilder(); maybeSet(testBuilder::suggestMode, randomSuggestMode()); maybeSet(testBuilder::accuracy, randomFloat()); maybeSet(testBuilder::sort, randomSort()); @@ -124,7 +124,7 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas } public void testInvalidParameters() throws IOException { - TermSuggestionBuilder builder = new TermSuggestionBuilder(randomAsciiOfLength(10)); + TermSuggestionBuilder builder = new TermSuggestionBuilder(); // test invalid accuracy values try { builder.accuracy(-0.5f); @@ -237,7 +237,7 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas } public void testDefaultValuesSet() { - TermSuggestionBuilder builder = new TermSuggestionBuilder(randomAsciiOfLength(10)); + TermSuggestionBuilder builder = new TermSuggestionBuilder(); assertThat(builder.accuracy(), notNullValue()); assertThat(builder.maxEdits(), notNullValue()); assertThat(builder.maxInspections(), notNullValue()); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java index f52678f72f0c..fa50601cca78 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java @@ -20,6 +20,38 @@ package org.elasticsearch.messy.tests; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.elasticsearch.index.query.QueryBuilders.matchQuery; +import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; +import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestion; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionPhraseCollateMatchExists; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionSize; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.nullValue; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.concurrent.ExecutionException; + import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -49,37 +81,6 @@ import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; -import java.io.IOException; -import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutionException; - -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; -import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; -import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestion; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionPhraseCollateMatchExists; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionSize; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; -import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.endsWith; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.nullValue; - /** * Integration tests for term and phrase suggestions. Many of these tests many requests that vary only slightly from one another. Where * possible these tests should declare for the first request, make the request, modify the configuration for the next request, make that @@ -103,12 +104,12 @@ public class SuggestSearchTests extends ESIntegTestCase { index("test", "type1", "4", "text", "abcc"); refresh(); - TermSuggestionBuilder termSuggest = termSuggestion("test") + TermSuggestionBuilder termSuggest = termSuggestion() .suggestMode(TermSuggestionBuilder.SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("abcd") .field("text"); logger.info("--> run suggestions with one index"); - searchSuggest( termSuggest); + searchSuggest("test", termSuggest); createIndex("test_1"); ensureGreen(); @@ -117,13 +118,13 @@ public class SuggestSearchTests extends ESIntegTestCase { index("test_1", "type1", "3", "text", "ab bd"); index("test_1", "type1", "4", "text", "ab cc"); refresh(); - termSuggest = termSuggestion("test") + termSuggest = termSuggestion() .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("ab cd") .minWordLength(1) .field("text"); logger.info("--> run suggestions with two indices"); - searchSuggest( termSuggest); + searchSuggest("test", termSuggest); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") @@ -144,14 +145,14 @@ public class SuggestSearchTests extends ESIntegTestCase { index("test_2", "type1", "4", "text", "abcc"); refresh(); - termSuggest = termSuggestion("test") + termSuggest = termSuggestion() .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("ab cd") .minWordLength(1) .field("text"); logger.info("--> run suggestions with three indices"); try { - searchSuggest( termSuggest); + searchSuggest("test", termSuggest); fail(" can not suggest across multiple indices with different analysis chains"); } catch (ReduceSearchPhaseException ex) { assertThat(ex.getCause(), instanceOf(IllegalStateException.class)); @@ -164,14 +165,14 @@ public class SuggestSearchTests extends ESIntegTestCase { } - termSuggest = termSuggestion("test") + termSuggest = termSuggestion() .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("ABCD") .minWordLength(1) .field("text"); logger.info("--> run suggestions with four indices"); try { - searchSuggest( termSuggest); + searchSuggest("test", termSuggest); fail(" can not suggest across multiple indices with different analysis chains"); } catch (ReduceSearchPhaseException ex) { assertThat(ex.getCause(), instanceOf(IllegalStateException.class)); @@ -218,14 +219,14 @@ public class SuggestSearchTests extends ESIntegTestCase { refresh(); DirectCandidateGeneratorBuilder generator = candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2); - PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("did_you_mean").field("name.shingled") + PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion().field("name.shingled") .addCandidateGenerator(generator) .gramSize(3); - Suggest searchSuggest = searchSuggest( "ice tea", phraseSuggestion); + Suggest searchSuggest = searchSuggest("ice tea", "did_you_mean", phraseSuggestion); assertSuggestion(searchSuggest, 0, "did_you_mean", "iced tea"); generator.suggestMode(null); - searchSuggest = searchSuggest( "ice tea", phraseSuggestion); + searchSuggest = searchSuggest( "ice tea", "did_you_mean", phraseSuggestion); assertSuggestionSize(searchSuggest, 0, 0, "did_you_mean"); } @@ -254,16 +255,16 @@ public class SuggestSearchTests extends ESIntegTestCase { SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "spellchecker")).get(); assertThat("didn't ask for suggestions but got some", search.getSuggest(), nullValue()); - TermSuggestionBuilder termSuggestion = termSuggestion("test") + TermSuggestionBuilder termSuggestion = termSuggestion() .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("abcd") .field("text") .size(10); - Suggest suggest = searchSuggest( termSuggestion); + Suggest suggest = searchSuggest("test", termSuggestion); assertSuggestion(suggest, 0, "test", 10, "abc0"); termSuggestion.text("abcd").shardSize(5); - suggest = searchSuggest( termSuggestion); + suggest = searchSuggest("test", termSuggestion); assertSuggestion(suggest, 0, "test", 5, "abc0"); } @@ -297,21 +298,21 @@ public class SuggestSearchTests extends ESIntegTestCase { client().prepareIndex("test", "type1").setSource("name", "I like ice cream.")); refresh(); - PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("did_you_mean").field("name.shingled") + PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion().field("name.shingled") .addCandidateGenerator(candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2)) .gramSize(3); - Suggest searchSuggest = searchSuggest( "ice tea", phraseSuggestion); + Suggest searchSuggest = searchSuggest("ice tea", "did_you_mean", phraseSuggestion); assertSuggestion(searchSuggest, 0, 0, "did_you_mean", "iced tea"); phraseSuggestion.field("nosuchField"); { SearchRequestBuilder searchBuilder = client().prepareSearch().setSize(0); - searchBuilder.suggest(new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion(phraseSuggestion)); + searchBuilder.suggest(new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean", phraseSuggestion)); assertThrows(searchBuilder, SearchPhaseExecutionException.class); } { SearchRequestBuilder searchBuilder = client().prepareSearch().setSize(0); - searchBuilder.suggest(new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion(phraseSuggestion)); + searchBuilder.suggest(new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean", phraseSuggestion)); assertThrows(searchBuilder, SearchPhaseExecutionException.class); } } @@ -329,15 +330,15 @@ public class SuggestSearchTests extends ESIntegTestCase { SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "spellcecker")).get(); assertThat("didn't ask for suggestions but got some", search.getSuggest(), nullValue()); - TermSuggestionBuilder termSuggest = termSuggestion("test") + TermSuggestionBuilder termSuggest = termSuggestion() .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("abcd") .field("text"); - Suggest suggest = searchSuggest( termSuggest); + Suggest suggest = searchSuggest("test", termSuggest); assertSuggestion(suggest, 0, "test", "aacd", "abbd", "abcc"); assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd")); - suggest = searchSuggest( termSuggest); + suggest = searchSuggest("test", termSuggest); assertSuggestion(suggest, 0, "test", "aacd","abbd", "abcc"); assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd")); } @@ -349,15 +350,15 @@ public class SuggestSearchTests extends ESIntegTestCase { index("test", "type1", "1", "foo", "bar"); refresh(); - TermSuggestionBuilder termSuggest = termSuggestion("test") + TermSuggestionBuilder termSuggest = termSuggestion() .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("abcd") .field("text"); - Suggest suggest = searchSuggest( termSuggest); + Suggest suggest = searchSuggest("test", termSuggest); assertSuggestionSize(suggest, 0, 0, "test"); assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd")); - suggest = searchSuggest( termSuggest); + suggest = searchSuggest("test", termSuggest); assertSuggestionSize(suggest, 0, 0, "test"); assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd")); } @@ -372,16 +373,17 @@ public class SuggestSearchTests extends ESIntegTestCase { index("test", "typ1", "4", "field1", "prefix_abcc", "field2", "prefix_eggg"); refresh(); - Suggest suggest = searchSuggest( - termSuggestion("size1") - .size(1).text("prefix_abcd").maxTermFreq(10).prefixLength(1).minDocFreq(0) - .field("field1").suggestMode(SuggestMode.ALWAYS), - termSuggestion("field2") - .field("field2").text("prefix_eeeh prefix_efgh") - .maxTermFreq(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS), - termSuggestion("accuracy") - .field("field2").text("prefix_efgh").accuracy(1f) - .maxTermFreq(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS)); + Map> suggestions = new HashMap<>(); + suggestions.put("size1", termSuggestion() + .size(1).text("prefix_abcd").maxTermFreq(10).prefixLength(1).minDocFreq(0) + .field("field1").suggestMode(SuggestMode.ALWAYS)); + suggestions.put("field2", termSuggestion() + .field("field2").text("prefix_eeeh prefix_efgh") + .maxTermFreq(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS)); + suggestions.put("accuracy", termSuggestion() + .field("field2").text("prefix_efgh").accuracy(1f) + .maxTermFreq(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS)); + Suggest suggest = searchSuggest(null, 0, suggestions); assertSuggestion(suggest, 0, "size1", "prefix_aacd"); assertThat(suggest.getSuggestion("field2").getEntries().get(0).getText().string(), equalTo("prefix_eeeh")); assertSuggestion(suggest, 0, "field2", "prefix_efgh"); @@ -415,17 +417,18 @@ public class SuggestSearchTests extends ESIntegTestCase { } refresh(); - Suggest suggest = searchSuggest( "prefix_abcd", - termSuggestion("size3SortScoreFirst") - .size(3).minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS), - termSuggestion("size10SortScoreFirst") - .size(10).minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS).shardSize(50), - termSuggestion("size3SortScoreFirstMaxEdits1") - .maxEdits(1) - .size(10).minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS), - termSuggestion("size10SortFrequencyFirst") - .size(10).sort(SortBy.FREQUENCY).shardSize(1000) - .minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS)); + Map> suggestions = new HashMap<>(); + suggestions.put("size3SortScoreFirst", termSuggestion() + .size(3).minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS)); + suggestions.put("size10SortScoreFirst", termSuggestion() + .size(10).minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS).shardSize(50)); + suggestions.put("size3SortScoreFirstMaxEdits1", termSuggestion() + .maxEdits(1) + .size(10).minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS)); + suggestions.put("size10SortFrequencyFirst", termSuggestion() + .size(10).sort(SortBy.FREQUENCY).shardSize(1000) + .minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS)); + Suggest suggest = searchSuggest("prefix_abcd", 0, suggestions); // The commented out assertions fail sometimes because suggestions are based off of shard frequencies instead of index frequencies. assertSuggestion(suggest, 0, "size3SortScoreFirst", "prefix_aacd", "prefix_abcc", "prefix_accd"); @@ -449,8 +452,8 @@ public class SuggestSearchTests extends ESIntegTestCase { index("test", "typ1", "1", "body", "this is a test"); refresh(); - Suggest searchSuggest = searchSuggest( "a an the", - phraseSuggestion("simple_phrase").field("body").gramSize(1) + Suggest searchSuggest = searchSuggest( "a an the", "simple_phrase", + phraseSuggestion().field("body").gramSize(1) .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always")) .size(1)); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); @@ -485,14 +488,14 @@ public class SuggestSearchTests extends ESIntegTestCase { index("test", "type1", "3", "body", "hello words"); refresh(); - Suggest searchSuggest = searchSuggest( "hello word", - phraseSuggestion("simple_phrase").field("body") + Suggest searchSuggest = searchSuggest( "hello word", "simple_phrase", + phraseSuggestion().field("body") .addCandidateGenerator(candidateGenerator("body").prefixLength(4).minWordLength(1).suggestMode("always")) .size(1).confidence(1.0f)); assertSuggestion(searchSuggest, 0, "simple_phrase", "hello words"); - searchSuggest = searchSuggest( "hello word", - phraseSuggestion("simple_phrase").field("body") + searchSuggest = searchSuggest( "hello word", "simple_phrase", + phraseSuggestion().field("body") .addCandidateGenerator(candidateGenerator("body").prefixLength(2).minWordLength(1).suggestMode("always")) .size(1).confidence(1.0f)); assertSuggestion(searchSuggest, 0, "simple_phrase", "hello world"); @@ -540,88 +543,88 @@ public class SuggestSearchTests extends ESIntegTestCase { } refresh(); - PhraseSuggestionBuilder phraseSuggest = phraseSuggestion("simple_phrase") + PhraseSuggestionBuilder phraseSuggest = phraseSuggestion() .field("bigram").gramSize(2).analyzer("body") .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always")) .size(1); - Suggest searchSuggest = searchSuggest( "american ame", phraseSuggest); + Suggest searchSuggest = searchSuggest( "american ame", "simple_phrase", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "american ace"); assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("american ame")); phraseSuggest.realWordErrorLikelihood(0.95f); - searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); + searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); // Check the "text" field this one time. assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("Xor the Got-Jewel")); // Ask for highlighting phraseSuggest.highlight("", ""); - searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); + searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getOptions().get(0).getHighlighted().string(), equalTo("xorr the god jewel")); // pass in a correct phrase phraseSuggest.highlight(null, null).confidence(0f).size(1).maxErrors(0.5f); - searchSuggest = searchSuggest( "Xorr the God-Jewel", phraseSuggest); + searchSuggest = searchSuggest( "Xorr the God-Jewel", "simple_phrase", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); // pass in a correct phrase - set confidence to 2 phraseSuggest.confidence(2f); - searchSuggest = searchSuggest( "Xorr the God-Jewel", phraseSuggest); + searchSuggest = searchSuggest( "Xorr the God-Jewel", "simple_phrase", phraseSuggest); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); // pass in a correct phrase - set confidence to 0.99 phraseSuggest.confidence(0.99f); - searchSuggest = searchSuggest( "Xorr the God-Jewel", phraseSuggest); + searchSuggest = searchSuggest( "Xorr the God-Jewel", "simple_phrase", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); //test reverse suggestions with pre & post filter phraseSuggest .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always")) .addCandidateGenerator(candidateGenerator("body_reverse").minWordLength(1).suggestMode("always").preFilter("reverse").postFilter("reverse")); - searchSuggest = searchSuggest( "xor the yod-Jewel", phraseSuggest); + searchSuggest = searchSuggest( "xor the yod-Jewel", "simple_phrase", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); // set all mass to trigrams (not indexed) phraseSuggest.clearCandidateGenerators() .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always")) .smoothingModel(new LinearInterpolation(1,0,0)); - searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); + searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); // set all mass to bigrams phraseSuggest.smoothingModel(new LinearInterpolation(0,1,0)); - searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); + searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); // distribute mass phraseSuggest.smoothingModel(new LinearInterpolation(0.4,0.4,0.2)); - searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); + searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); - searchSuggest = searchSuggest( "american ame", phraseSuggest); + searchSuggest = searchSuggest( "american ame", "simple_phrase", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "american ace"); // try all smoothing methods phraseSuggest.smoothingModel(new LinearInterpolation(0.4,0.4,0.2)); - searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); + searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); phraseSuggest.smoothingModel(new Laplace(0.2)); - searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); + searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); phraseSuggest.smoothingModel(new StupidBackoff(0.1)); - searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); + searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase",phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); // check tokenLimit phraseSuggest.smoothingModel(null).tokenLimit(4); - searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); + searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); phraseSuggest.tokenLimit(15).smoothingModel(new StupidBackoff(0.1)); - searchSuggest = searchSuggest( "Xor the Got-Jewel Xor the Got-Jewel Xor the Got-Jewel", phraseSuggest); + searchSuggest = searchSuggest( "Xor the Got-Jewel Xor the Got-Jewel Xor the Got-Jewel", "simple_phrase", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel xorr the god jewel xorr the god jewel"); // Check the name this time because we're repeating it which is funky assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("Xor the Got-Jewel Xor the Got-Jewel Xor the Got-Jewel")); @@ -677,7 +680,7 @@ public class SuggestSearchTests extends ESIntegTestCase { index("test", "type1", "2", "body", line, "body_reverse", line, "bigram", line); refresh(); - PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("simple_phrase") + PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion() .realWordErrorLikelihood(0.95f) .field("bigram") .gramSize(2) @@ -686,13 +689,13 @@ public class SuggestSearchTests extends ESIntegTestCase { .smoothingModel(new StupidBackoff(0.1)) .maxErrors(1.0f) .size(5); - Suggest searchSuggest = searchSuggest( "Xorr the Gut-Jewel", phraseSuggestion); + Suggest searchSuggest = searchSuggest("Xorr the Gut-Jewel", "simple_phrase", phraseSuggestion); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); // we allow a size of 2 now on the shard generator level so "god" will be found since it's LD2 phraseSuggestion.clearCandidateGenerators() .addCandidateGenerator(candidateGenerator("body").minWordLength(1).prefixLength(1).suggestMode("always").size(2).accuracy(0.1f)); - searchSuggest = searchSuggest( "Xorr the Gut-Jewel", phraseSuggestion); + searchSuggest = searchSuggest( "Xorr the Gut-Jewel", "simple_phrase", phraseSuggestion); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); } @@ -737,51 +740,52 @@ public class SuggestSearchTests extends ESIntegTestCase { NumShards numShards = getNumShards("test"); // Lets make sure some things throw exceptions - PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("simple_phrase") + PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion() .field("bigram") .analyzer("body") .addCandidateGenerator(candidateGenerator("does_not_exist").minWordLength(1).suggestMode("always")) .realWordErrorLikelihood(0.95f) .maxErrors(0.5f) .size(1); + Map> suggestion = new HashMap<>(); + suggestion.put("simple_phrase", phraseSuggestion); try { - searchSuggest( "Xor the Got-Jewel", numShards.numPrimaries, phraseSuggestion); + searchSuggest("Xor the Got-Jewel", numShards.numPrimaries, suggestion); fail("field does not exists"); } catch (SearchPhaseExecutionException e) {} phraseSuggestion.clearCandidateGenerators().analyzer(null); try { - searchSuggest( "Xor the Got-Jewel", numShards.numPrimaries, phraseSuggestion); + searchSuggest("Xor the Got-Jewel", numShards.numPrimaries, suggestion); fail("analyzer does only produce ngrams"); } catch (SearchPhaseExecutionException e) { } phraseSuggestion.analyzer("bigram"); try { - searchSuggest( "Xor the Got-Jewel", numShards.numPrimaries, phraseSuggestion); + searchSuggest("Xor the Got-Jewel", numShards.numPrimaries, suggestion); fail("analyzer does only produce ngrams"); } catch (SearchPhaseExecutionException e) { } // Now we'll make sure some things don't phraseSuggestion.forceUnigrams(false); - searchSuggest( "Xor the Got-Jewel", phraseSuggestion); + searchSuggest( "Xor the Got-Jewel", 0, suggestion); // Field doesn't produce unigrams but the analyzer does phraseSuggestion.forceUnigrams(true).field("bigram").analyzer("ngram"); - searchSuggest( "Xor the Got-Jewel", - phraseSuggestion); + searchSuggest( "Xor the Got-Jewel", 0, suggestion); phraseSuggestion.field("ngram").analyzer("myDefAnalyzer") .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always")); - Suggest suggest = searchSuggest( "Xor the Got-Jewel", phraseSuggestion); + Suggest suggest = searchSuggest( "Xor the Got-Jewel", 0, suggestion); // "xorr the god jewel" and and "xorn the god jewel" have identical scores (we are only using unigrams to score), so we tie break by // earlier term (xorn): assertSuggestion(suggest, 0, "simple_phrase", "xorn the god jewel"); phraseSuggestion.analyzer(null); - suggest = searchSuggest( "Xor the Got-Jewel", phraseSuggestion); + suggest = searchSuggest( "Xor the Got-Jewel", 0, suggestion); // In this case xorr has a better score than xorn because we set the field back to the default (my_shingle2) analyzer, so the // probability that the term is not in the dictionary but is NOT a misspelling is relatively high in this case compared to the @@ -796,8 +800,8 @@ public class SuggestSearchTests extends ESIntegTestCase { client().prepareIndex("test", "type1", "2").setSource("field1", "foobar2").setRouting("2"), client().prepareIndex("test", "type1", "3").setSource("field1", "foobar3").setRouting("3")); - Suggest suggest = searchSuggest( "foobar", - termSuggestion("simple") + Suggest suggest = searchSuggest( "foobar", "simple", + termSuggestion() .size(10).minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS)); ElasticsearchAssertions.assertSuggestionSize(suggest, 0, 3, "simple"); } @@ -837,15 +841,15 @@ public class SuggestSearchTests extends ESIntegTestCase { // When searching on a shard with a non existing mapping, we should fail SearchRequestBuilder request = client().prepareSearch().setSize(0) .suggest( - new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion( - phraseSuggestion("did_you_mean").field("fielddoesnotexist").maxErrors(5.0f))); + new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean", + phraseSuggestion().field("fielddoesnotexist").maxErrors(5.0f))); assertThrows(request, SearchPhaseExecutionException.class); // When searching on a shard which does not hold yet any document of an existing type, we should not fail SearchResponse searchResponse = client().prepareSearch().setSize(0) .suggest( - new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion( - phraseSuggestion("did_you_mean").field("name").maxErrors(5.0f))) + new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean", + phraseSuggestion().field("name").maxErrors(5.0f))) .get(); ElasticsearchAssertions.assertNoFailures(searchResponse); ElasticsearchAssertions.assertSuggestion(searchResponse.getSuggest(), 0, 0, "did_you_mean", "testing suggestions"); @@ -883,8 +887,8 @@ public class SuggestSearchTests extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch() .setSize(0) .suggest( - new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion( - phraseSuggestion("did_you_mean").field("name").maxErrors(5.0f))) + new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean", + phraseSuggestion().field("name").maxErrors(5.0f))) .get(); assertNoFailures(searchResponse); @@ -941,7 +945,7 @@ public class SuggestSearchTests extends ESIntegTestCase { } refresh(); - Suggest searchSuggest = searchSuggest("nobel prize", phraseSuggestion("simple_phrase") + Suggest searchSuggest = searchSuggest("nobel prize", "simple_phrase", phraseSuggestion() .field("body") .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f)) .confidence(2f) @@ -949,7 +953,7 @@ public class SuggestSearchTests extends ESIntegTestCase { .size(1)); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); - searchSuggest = searchSuggest("noble prize", phraseSuggestion("simple_phrase") + searchSuggest = searchSuggest("noble prize", "simple_phrase", phraseSuggestion() .field("body") .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f)) .confidence(2f) @@ -1080,7 +1084,7 @@ public class SuggestSearchTests extends ESIntegTestCase { } indexRandom(true, builders); - PhraseSuggestionBuilder suggest = phraseSuggestion("title") + PhraseSuggestionBuilder suggest = phraseSuggestion() .field("title") .addCandidateGenerator(candidateGenerator("title") .suggestMode("always") @@ -1092,13 +1096,13 @@ public class SuggestSearchTests extends ESIntegTestCase { .maxErrors(2f) .shardSize(30000) .size(30000); - Suggest searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", suggest); + Suggest searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", suggest); assertSuggestion(searchSuggest, 0, 0, "title", "united states house of representatives elections in washington 2006"); assertSuggestionSize(searchSuggest, 0, 25480, "title"); // Just to prove that we've run through a ton of options suggest.size(1); long start = System.currentTimeMillis(); - searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", suggest); + searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", suggest); long total = System.currentTimeMillis() - start; assertSuggestion(searchSuggest, 0, 0, "title", "united states house of representatives elections in washington 2006"); // assertThat(total, lessThan(1000L)); // Takes many seconds without fix - just for debugging @@ -1145,7 +1149,7 @@ public class SuggestSearchTests extends ESIntegTestCase { indexRandom(true, builders); // suggest without collate - PhraseSuggestionBuilder suggest = phraseSuggestion("title") + PhraseSuggestionBuilder suggest = phraseSuggestion() .field("title") .addCandidateGenerator(candidateGenerator("title") .suggestMode("always") @@ -1157,7 +1161,7 @@ public class SuggestSearchTests extends ESIntegTestCase { .maxErrors(2f) .shardSize(30000) .size(10); - Suggest searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", suggest); + Suggest searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", suggest); assertSuggestionSize(searchSuggest, 0, 10, "title"); // suggest with collate @@ -1170,11 +1174,11 @@ public class SuggestSearchTests extends ESIntegTestCase { .string(); PhraseSuggestionBuilder filteredQuerySuggest = suggest.collateQuery(filterString); filteredQuerySuggest.collateParams(Collections.singletonMap("field", "title")); - searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", filteredQuerySuggest); + searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", filteredQuerySuggest); assertSuggestionSize(searchSuggest, 0, 2, "title"); // collate suggest with no result (boundary case) - searchSuggest = searchSuggest("Elections of Representatives Parliament", filteredQuerySuggest); + searchSuggest = searchSuggest("Elections of Representatives Parliament", "title", filteredQuerySuggest); assertSuggestionSize(searchSuggest, 0, 0, "title"); NumShards numShards = getNumShards("test"); @@ -1188,8 +1192,10 @@ public class SuggestSearchTests extends ESIntegTestCase { .endObject() .string(); PhraseSuggestionBuilder incorrectFilteredSuggest = suggest.collateQuery(incorrectFilterString); + Map> namedSuggestion = new HashMap<>(); + namedSuggestion.put("my_title_suggestion", incorrectFilteredSuggest); try { - searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, incorrectFilteredSuggest); + searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion); fail("Post query error has been swallowed"); } catch(ElasticsearchException e) { // expected @@ -1205,7 +1211,7 @@ public class SuggestSearchTests extends ESIntegTestCase { .string(); PhraseSuggestionBuilder filteredFilterSuggest = suggest.collateQuery(filterStringAsFilter); - searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", filteredFilterSuggest); + searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", filteredFilterSuggest); assertSuggestionSize(searchSuggest, 0, 2, "title"); // collate suggest with bad query @@ -1219,7 +1225,7 @@ public class SuggestSearchTests extends ESIntegTestCase { PhraseSuggestionBuilder in = suggest.collateQuery(filterStr); try { - searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, in); + searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion); fail("Post filter error has been swallowed"); } catch(ElasticsearchException e) { //expected @@ -1237,7 +1243,7 @@ public class SuggestSearchTests extends ESIntegTestCase { PhraseSuggestionBuilder phraseSuggestWithNoParams = suggest.collateQuery(collateWithParams); try { - searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, phraseSuggestWithNoParams); + searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion); fail("Malformed query (lack of additional params) should fail"); } catch (ElasticsearchException e) { // expected @@ -1249,33 +1255,35 @@ public class SuggestSearchTests extends ESIntegTestCase { params.put("query_field", "title"); PhraseSuggestionBuilder phraseSuggestWithParams = suggest.collateQuery(collateWithParams).collateParams(params); - searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", phraseSuggestWithParams); + searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", phraseSuggestWithParams); assertSuggestionSize(searchSuggest, 0, 2, "title"); // collate query request with prune set to true PhraseSuggestionBuilder phraseSuggestWithParamsAndReturn = suggest.collateQuery(collateWithParams).collateParams(params).collatePrune(true); - searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", phraseSuggestWithParamsAndReturn); + searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", phraseSuggestWithParamsAndReturn); assertSuggestionSize(searchSuggest, 0, 10, "title"); assertSuggestionPhraseCollateMatchExists(searchSuggest, "title", 2); } - protected Suggest searchSuggest(SuggestionBuilder... suggestion) { - return searchSuggest(null, suggestion); + protected Suggest searchSuggest(String name, SuggestionBuilder suggestion) { + return searchSuggest(null, name, suggestion); } - protected Suggest searchSuggest(String suggestText, SuggestionBuilder... suggestions) { - return searchSuggest(suggestText, 0, suggestions); + protected Suggest searchSuggest(String suggestText, String name, SuggestionBuilder suggestion) { + Map> map = new HashMap<>(); + map.put(name, suggestion); + return searchSuggest(suggestText, 0, map); } - protected Suggest searchSuggest(String suggestText, int expectShardsFailed, SuggestionBuilder... suggestions) { + protected Suggest searchSuggest(String suggestText, int expectShardsFailed, Map> suggestions) { if (randomBoolean()) { SearchRequestBuilder builder = client().prepareSearch().setSize(0); SuggestBuilder suggestBuilder = new SuggestBuilder(); if (suggestText != null) { suggestBuilder.setGlobalText(suggestText); } - for (SuggestionBuilder suggestion : suggestions) { - suggestBuilder.addSuggestion(suggestion); + for (Entry> suggestion : suggestions.entrySet()) { + suggestBuilder.addSuggestion(suggestion.getKey(), suggestion.getValue()); } builder.suggest(suggestBuilder); SearchResponse actionGet = builder.execute().actionGet(); @@ -1286,8 +1294,8 @@ public class SuggestSearchTests extends ESIntegTestCase { if (suggestText != null) { builder.setSuggestText(suggestText); } - for (SuggestionBuilder suggestion : suggestions) { - builder.addSuggestion(suggestion); + for (Entry> suggestion : suggestions.entrySet()) { + builder.addSuggestion(suggestion.getKey(), suggestion.getValue()); } SuggestResponse actionGet = builder.execute().actionGet(); From 354ede717b870f7413b538ed893ab1bb80128cc8 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 1 Mar 2016 11:48:52 -0800 Subject: [PATCH 041/320] Removed old help files and improved plugin cli tests --- .../plugins/InstallPluginCommand.java | 14 +- .../elasticsearch/plugins/plugin-install.help | 59 --- .../elasticsearch/plugins/plugin-list.help | 12 - .../elasticsearch/plugins/plugin-remove.help | 12 - .../org/elasticsearch/plugins/plugin.help | 24 -- .../common/cli/CliToolTests.java | 365 ------------------ .../plugins/RemovePluginCommandTests.java | 4 +- .../common/cli/CliToolTestCase.java | 11 +- 8 files changed, 18 insertions(+), 483 deletions(-) delete mode 100644 core/src/main/resources/org/elasticsearch/plugins/plugin-install.help delete mode 100644 core/src/main/resources/org/elasticsearch/plugins/plugin-list.help delete mode 100644 core/src/main/resources/org/elasticsearch/plugins/plugin-remove.help delete mode 100644 core/src/main/resources/org/elasticsearch/plugins/plugin.help delete mode 100644 qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CliToolTests.java diff --git a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index 977d89a3418d..bbe00fddd8ca 100644 --- a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -51,6 +51,7 @@ import java.nio.file.attribute.PosixFilePermission; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Set; @@ -101,7 +102,7 @@ class InstallPluginCommand extends Command { "lang-groovy")); // TODO: make this a resource file generated by gradle - static final Set OFFICIAL_PLUGINS = unmodifiableSet(newHashSet( + static final Set OFFICIAL_PLUGINS = unmodifiableSet(new LinkedHashSet<>(Arrays.asList( "analysis-icu", "analysis-kuromoji", "analysis-phonetic", @@ -120,7 +121,7 @@ class InstallPluginCommand extends Command { "repository-azure", "repository-hdfs", "repository-s3", - "store-smb")); + "store-smb"))); private final Environment env; private final OptionSpec batchOption; @@ -134,6 +135,15 @@ class InstallPluginCommand extends Command { this.arguments = parser.nonOptions("plugin id"); } + @Override + protected void printAdditionalHelp(Terminal terminal) { + terminal.println("The following official plugins may be installed by name:"); + for (String plugin : OFFICIAL_PLUGINS) { + terminal.println(" " + plugin); + } + terminal.println(""); + } + @Override protected int execute(Terminal terminal, OptionSet options) throws Exception { // TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args diff --git a/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help b/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help deleted file mode 100644 index 7037974ede3d..000000000000 --- a/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help +++ /dev/null @@ -1,59 +0,0 @@ -NAME - - install - Install a plugin - -SYNOPSIS - - plugin install - -DESCRIPTION - - This command installs an elasticsearch plugin. It can be used as follows: - - Officially supported or commercial plugins require just the plugin name: - - plugin install analysis-icu - plugin install x-pack - - Plugins from Maven Central require 'groupId:artifactId:version': - - plugin install org.elasticsearch:mapper-attachments:3.0.0 - - Plugins can be installed from a custom URL or file location as follows: - - plugin install http://some.domain.name//my-plugin-1.0.0.zip - plugin install file:/path/to/my-plugin-1.0.0.zip - -OFFICIAL PLUGINS - - The following plugins are officially supported and can be installed by just referring to their name - - - analysis-icu - - analysis-kuromoji - - analysis-phonetic - - analysis-smartcn - - analysis-stempel - - delete-by-query - - discovery-azure - - discovery-ec2 - - discovery-gce - - ingest-geoip - - lang-javascript - - lang-painless - - lang-python - - mapper-attachments - - mapper-murmur3 - - mapper-size - - repository-azure - - repository-hdfs - - repository-s3 - - store-smb - - -OPTIONS - - -v,--verbose Verbose output - - -h,--help Shows this message - - -b,--batch Enable batch mode explicitly, automatic confirmation of security permissions diff --git a/core/src/main/resources/org/elasticsearch/plugins/plugin-list.help b/core/src/main/resources/org/elasticsearch/plugins/plugin-list.help deleted file mode 100644 index c13949e8cb6c..000000000000 --- a/core/src/main/resources/org/elasticsearch/plugins/plugin-list.help +++ /dev/null @@ -1,12 +0,0 @@ -NAME - - list - List all plugins - -SYNOPSIS - - plugin list - -DESCRIPTION - - This command lists all installed elasticsearch plugins - diff --git a/core/src/main/resources/org/elasticsearch/plugins/plugin-remove.help b/core/src/main/resources/org/elasticsearch/plugins/plugin-remove.help deleted file mode 100644 index b708adf1f69c..000000000000 --- a/core/src/main/resources/org/elasticsearch/plugins/plugin-remove.help +++ /dev/null @@ -1,12 +0,0 @@ -NAME - - remove - Remove a plugin - -SYNOPSIS - - plugin remove - -DESCRIPTION - - This command removes an elasticsearch plugin - diff --git a/core/src/main/resources/org/elasticsearch/plugins/plugin.help b/core/src/main/resources/org/elasticsearch/plugins/plugin.help deleted file mode 100644 index 5cba544627a3..000000000000 --- a/core/src/main/resources/org/elasticsearch/plugins/plugin.help +++ /dev/null @@ -1,24 +0,0 @@ -NAME - - plugin - Manages plugins - -SYNOPSIS - - plugin - -DESCRIPTION - - Manage plugins - -COMMANDS - - install Install a plugin - - remove Remove a plugin - - list List installed plugins - -NOTES - - [*] For usage help on specific commands please type "plugin -h" - diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CliToolTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CliToolTests.java deleted file mode 100644 index 70d507853fa1..000000000000 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CliToolTests.java +++ /dev/null @@ -1,365 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.cli; - -import org.apache.commons.cli.CommandLine; -import org.elasticsearch.cli.UserError; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.node.internal.InternalSettingsPreparer; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; - -import static java.util.Collections.unmodifiableMap; -import static org.elasticsearch.common.cli.CliTool.ExitStatus.OK; -import static org.elasticsearch.common.cli.CliTool.ExitStatus.USAGE; -import static org.elasticsearch.common.cli.CliToolConfig.Builder.cmd; -import static org.hamcrest.Matchers.arrayContaining; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.hasItem; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; - -@SuppressForbidden(reason = "modifies system properties intentionally") -public class CliToolTests extends CliToolTestCase { - public void testOK() throws Exception { - Terminal terminal = new MockTerminal(); - final AtomicReference executed = new AtomicReference<>(false); - final NamedCommand cmd = new NamedCommand("cmd", terminal) { - @Override - public CliTool.ExitStatus execute(Settings settings, Environment env) { - executed.set(true); - return OK; - } - }; - SingleCmdTool tool = new SingleCmdTool("tool", terminal, cmd); - CliTool.ExitStatus status = tool.execute(); - assertStatus(status, OK); - assertCommandHasBeenExecuted(executed); - } - - public void testUsageError() throws Exception { - Terminal terminal = new MockTerminal(); - final AtomicReference executed = new AtomicReference<>(false); - final NamedCommand cmd = new NamedCommand("cmd", terminal) { - @Override - public CliTool.ExitStatus execute(Settings settings, Environment env) throws UserError { - executed.set(true); - throw new UserError(CliTool.ExitStatus.USAGE.status(), "bad usage"); - } - }; - SingleCmdTool tool = new SingleCmdTool("tool", terminal, cmd); - CliTool.ExitStatus status = tool.execute(); - assertStatus(status, CliTool.ExitStatus.USAGE); - assertCommandHasBeenExecuted(executed); - } - - public void testMultiCommand() throws Exception { - Terminal terminal = new MockTerminal(); - int count = randomIntBetween(2, 7); - List> executed = new ArrayList<>(count); - for (int i = 0; i < count; i++) { - executed.add(new AtomicReference<>(false)); - } - NamedCommand[] cmds = new NamedCommand[count]; - for (int i = 0; i < count; i++) { - final int index = i; - cmds[i] = new NamedCommand("cmd" + index, terminal) { - @Override - public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception { - executed.get(index).set(true); - return OK; - } - }; - } - MultiCmdTool tool = new MultiCmdTool("tool", terminal, cmds); - int cmdIndex = randomIntBetween(0, count-1); - CliTool.ExitStatus status = tool.execute("cmd" + cmdIndex); - assertThat(status, is(OK)); - for (int i = 0; i < count; i++) { - assertThat(executed.get(i).get(), is(i == cmdIndex)); - } - } - - public void testMultiCommandUnknownCommand() throws Exception { - Terminal terminal = new MockTerminal(); - int count = randomIntBetween(2, 7); - List> executed = new ArrayList<>(count); - for (int i = 0; i < count; i++) { - executed.add(new AtomicReference<>(false)); - } - NamedCommand[] cmds = new NamedCommand[count]; - for (int i = 0; i < count; i++) { - final int index = i; - cmds[i] = new NamedCommand("cmd" + index, terminal) { - @Override - public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception { - executed.get(index).set(true); - return OK; - } - }; - } - MultiCmdTool tool = new MultiCmdTool("tool", terminal, cmds); - CliTool.ExitStatus status = tool.execute("cmd" + count); // "cmd" + count doesn't exist - assertThat(status, is(CliTool.ExitStatus.USAGE)); - for (int i = 0; i < count; i++) { - assertThat(executed.get(i).get(), is(false)); - } - } - - public void testSingleCommandToolHelp() throws Exception { - CaptureOutputTerminal terminal = new CaptureOutputTerminal(); - final AtomicReference executed = new AtomicReference<>(false); - final NamedCommand cmd = new NamedCommand("cmd1", terminal) { - @Override - public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception { - executed.set(true); - throw new IOException("io error"); - } - }; - SingleCmdTool tool = new SingleCmdTool("tool", terminal, cmd); - CliTool.ExitStatus status = tool.execute(args("-h")); - assertStatus(status, CliTool.ExitStatus.OK_AND_EXIT); - assertThat(terminal.getTerminalOutput(), hasSize(3)); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("cmd1 help"))); - } - - public void testMultiCommandToolHelp() throws Exception { - CaptureOutputTerminal terminal = new CaptureOutputTerminal(); - NamedCommand[] cmds = new NamedCommand[2]; - cmds[0] = new NamedCommand("cmd0", terminal) { - @Override - public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception { - return OK; - } - }; - cmds[1] = new NamedCommand("cmd1", terminal) { - @Override - public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception { - return OK; - } - }; - MultiCmdTool tool = new MultiCmdTool("tool", terminal, cmds); - CliTool.ExitStatus status = tool.execute(args("-h")); - assertStatus(status, CliTool.ExitStatus.OK_AND_EXIT); - assertThat(terminal.getTerminalOutput(), hasSize(3)); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("tool help"))); - } - - public void testMultiCommandCmdHelp() throws Exception { - CaptureOutputTerminal terminal = new CaptureOutputTerminal(); - NamedCommand[] cmds = new NamedCommand[2]; - cmds[0] = new NamedCommand("cmd0", terminal) { - @Override - public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception { - return OK; - } - }; - cmds[1] = new NamedCommand("cmd1", terminal) { - @Override - public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception { - return OK; - } - }; - MultiCmdTool tool = new MultiCmdTool("tool", terminal, cmds); - CliTool.ExitStatus status = tool.execute(args("cmd1 -h")); - assertStatus(status, CliTool.ExitStatus.OK_AND_EXIT); - assertThat(terminal.getTerminalOutput(), hasSize(3)); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("cmd1 help"))); - } - - public void testNonUserErrorPropagates() throws Exception { - CaptureOutputTerminal terminal = new CaptureOutputTerminal(); - NamedCommand cmd = new NamedCommand("cmd", terminal) { - @Override - public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception { - throw new IOException("error message"); - } - }; - SingleCmdTool tool = new SingleCmdTool("tool", terminal, cmd); - IOException e = expectThrows(IOException.class, () -> { - tool.execute(); - }); - assertEquals("error message", e.getMessage()); - } - - public void testMultipleLaunch() throws Exception { - Terminal terminal = new MockTerminal(); - final AtomicReference executed = new AtomicReference<>(false); - final NamedCommand cmd = new NamedCommand("cmd", terminal) { - @Override - public CliTool.ExitStatus execute(Settings settings, Environment env) { - executed.set(true); - return OK; - } - }; - SingleCmdTool tool = new SingleCmdTool("tool", terminal, cmd); - tool.parse("cmd", Strings.splitStringByCommaToArray("--verbose")); - tool.parse("cmd", Strings.splitStringByCommaToArray("--silent")); - tool.parse("cmd", Strings.splitStringByCommaToArray("--help")); - } - - public void testPromptForSetting() throws Exception { - final AtomicInteger counter = new AtomicInteger(); - final AtomicReference promptedSecretValue = new AtomicReference<>(null); - final AtomicReference promptedTextValue = new AtomicReference<>(null); - final Terminal terminal = new MockTerminal() { - @Override - public char[] readSecret(String text) { - counter.incrementAndGet(); - return "changeit".toCharArray(); - } - - @Override - public String readText(String text) { - counter.incrementAndGet(); - return "replaced"; - } - }; - final NamedCommand cmd = new NamedCommand("noop", terminal) { - @Override - public CliTool.ExitStatus execute(Settings settings, Environment env) { - promptedSecretValue.set(settings.get("foo.password")); - promptedTextValue.set(settings.get("replace")); - return OK; - } - }; - - System.setProperty("es.foo.password", InternalSettingsPreparer.SECRET_PROMPT_VALUE); - System.setProperty("es.replace", InternalSettingsPreparer.TEXT_PROMPT_VALUE); - try { - new SingleCmdTool("tool", terminal, cmd).execute(); - } finally { - System.clearProperty("es.foo.password"); - System.clearProperty("es.replace"); - } - - assertThat(counter.intValue(), is(2)); - assertThat(promptedSecretValue.get(), is("changeit")); - assertThat(promptedTextValue.get(), is("replaced")); - } - - public void testStopAtNonOptionParsing() throws Exception { - final CliToolConfig.Cmd lenientCommand = cmd("lenient", CliTool.Command.Exit.class).stopAtNonOption(true).build(); - final CliToolConfig.Cmd strictCommand = cmd("strict", CliTool.Command.Exit.class).stopAtNonOption(false).build(); - final CliToolConfig config = CliToolConfig.config("elasticsearch", CliTool.class).cmds(lenientCommand, strictCommand).build(); - - final CaptureOutputTerminal terminal = new CaptureOutputTerminal(); - final CliTool cliTool = new CliTool(config, terminal) { - @Override - protected Command parse(String cmdName, CommandLine cli) throws Exception { - return new NamedCommand(cmdName, terminal) { - @Override - public ExitStatus execute(Settings settings, Environment env) throws Exception { - return OK; - } - }; - } - }; - - // known parameters, no error - assertStatus(cliTool.execute(args("lenient --verbose")), OK); - assertStatus(cliTool.execute(args("lenient -v")), OK); - - // unknown parameters, no error - assertStatus(cliTool.execute(args("lenient --unknown")), OK); - assertStatus(cliTool.execute(args("lenient -u")), OK); - - // unknown parameters, error - assertStatus(cliTool.execute(args("strict --unknown")), USAGE); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("Unrecognized option: --unknown"))); - - terminal.getTerminalOutput().clear(); - assertStatus(cliTool.execute(args("strict -u")), USAGE); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("Unrecognized option: -u"))); - } - - private void assertStatus(CliTool.ExitStatus status, CliTool.ExitStatus expectedStatus) { - assertThat(status, is(expectedStatus)); - } - - private void assertCommandHasBeenExecuted(AtomicReference executed) { - assertThat("Expected command atomic reference counter to be set to true", executed.get(), is(Boolean.TRUE)); - } - - private static class SingleCmdTool extends CliTool { - - private final Command command; - - private SingleCmdTool(String name, Terminal terminal, NamedCommand command) { - super(CliToolConfig.config(name, SingleCmdTool.class) - .cmds(cmd(command.name, command.getClass())) - .build(), terminal); - this.command = command; - } - - @Override - protected Command parse(String cmdName, CommandLine cli) throws Exception { - return command; - } - } - - private static class MultiCmdTool extends CliTool { - - private final Map commands; - - private MultiCmdTool(String name, Terminal terminal, NamedCommand... commands) { - super(CliToolConfig.config(name, MultiCmdTool.class) - .cmds(cmds(commands)) - .build(), terminal); - Map commandByName = new HashMap<>(); - for (int i = 0; i < commands.length; i++) { - commandByName.put(commands[i].name, commands[i]); - } - this.commands = unmodifiableMap(commandByName); - } - - @Override - protected Command parse(String cmdName, CommandLine cli) throws Exception { - return commands.get(cmdName); - } - - private static CliToolConfig.Cmd[] cmds(NamedCommand... commands) { - CliToolConfig.Cmd[] cmds = new CliToolConfig.Cmd[commands.length]; - for (int i = 0; i < commands.length; i++) { - cmds[i] = cmd(commands[i].name, commands[i].getClass()).build(); - } - return cmds; - } - } - - private static abstract class NamedCommand extends CliTool.Command { - - private final String name; - - private NamedCommand(String name, Terminal terminal) { - super(terminal); - this.name = name; - } - } -} diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java index 6ffe4168de1c..cedb4f5d878b 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java @@ -25,11 +25,9 @@ import java.nio.file.Files; import java.nio.file.Path; import org.apache.lucene.util.LuceneTestCase; -import org.elasticsearch.cli.ExitCodes; -import org.elasticsearch.common.cli.CliTool; +import org.elasticsearch.cli.UserError; import org.elasticsearch.common.cli.CliToolTestCase; import org.elasticsearch.common.cli.Terminal; -import org.elasticsearch.cli.UserError; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; diff --git a/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java index 21a5e0228f68..d96f6bd4e79b 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java @@ -19,7 +19,11 @@ package org.elasticsearch.common.cli; -import org.elasticsearch.ExceptionsHelper; +import java.io.IOException; +import java.io.PrintWriter; +import java.util.ArrayList; +import java.util.List; + import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.test.ESTestCase; @@ -27,11 +31,6 @@ import org.elasticsearch.test.StreamsUtils; import org.junit.After; import org.junit.Before; -import java.io.IOException; -import java.io.PrintWriter; -import java.util.ArrayList; -import java.util.List; - import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; From 6ca7666646e543ba14df9d946888a56de01aef97 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 1 Mar 2016 14:42:28 -0800 Subject: [PATCH 042/320] Tribe: Passthrough environment and network settings to tribe client nodes In 2.2, the client nodes created internally by a tribe node were changed to be explicit about which settings the client nodes use, no longer loading all settings from elasticsearch.yml. However, some settings were missed, notably network bind settings. This change adds those settings to be passed through, as well as adds unit tests for building the tribe client node settings. --- .../org/elasticsearch/tribe/TribeService.java | 54 +++++++++++---- .../tribe/TribeServiceTests.java | 65 +++++++++++++++++++ 2 files changed, 107 insertions(+), 12 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index bf66cce1b9e8..ff12ddba00c6 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -53,6 +54,7 @@ import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.node.Node; import org.elasticsearch.rest.RestStatus; +import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; @@ -154,6 +156,12 @@ public class TribeService extends AbstractLifecycleComponent { public static final Set TRIBE_SETTING_KEYS = Sets.newHashSet(TRIBE_NAME_SETTING.getKey(), ON_CONFLICT_SETTING.getKey(), BLOCKS_METADATA_INDICES_SETTING.getKey(), BLOCKS_METADATA_SETTING.getKey(), BLOCKS_READ_INDICES_SETTING.getKey(), BLOCKS_WRITE_INDICES_SETTING.getKey(), BLOCKS_WRITE_SETTING.getKey()); + // these settings should be passed through to each tribe client, if they are not set explicitly + private static final List PASS_THROUGH_SETTINGS = Arrays.asList( + NetworkService.GLOBAL_NETWORK_HOST_SETTING.getKey(), + NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING.getKey(), + NetworkService.GLOBAL_NETWORK_PUBLISHHOST_SETTING.getKey() + ); private final String onConflict; private final Set droppedIndices = ConcurrentCollections.newConcurrentSet(); @@ -167,18 +175,8 @@ public class TribeService extends AbstractLifecycleComponent { nodesSettings.remove("blocks"); // remove prefix settings that don't indicate a client nodesSettings.remove("on_conflict"); // remove prefix settings that don't indicate a client for (Map.Entry entry : nodesSettings.entrySet()) { - Settings.Builder sb = Settings.builder().put(entry.getValue()); - sb.put("node.name", settings.get("node.name") + "/" + entry.getKey()); - sb.put(Environment.PATH_HOME_SETTING.getKey(), Environment.PATH_HOME_SETTING.get(settings)); // pass through ES home dir - if (Environment.PATH_CONF_SETTING.exists(settings)) { - sb.put(Environment.PATH_CONF_SETTING.getKey(), Environment.PATH_CONF_SETTING.get(settings)); - } - sb.put(TRIBE_NAME_SETTING.getKey(), entry.getKey()); - if (sb.get("http.enabled") == null) { - sb.put("http.enabled", false); - } - sb.put(Node.NODE_CLIENT_SETTING.getKey(), true); - nodes.add(new TribeClientNode(sb.build())); + Settings clientSettings = buildClientSettings(entry.getKey(), settings, entry.getValue()); + nodes.add(new TribeClientNode(clientSettings)); } this.blockIndicesMetadata = BLOCKS_METADATA_INDICES_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY); @@ -197,6 +195,38 @@ public class TribeService extends AbstractLifecycleComponent { this.onConflict = ON_CONFLICT_SETTING.get(settings); } + // pkg private for testing + /** + * Builds node settings for a tribe client node from the tribe node's global settings, + * combined with tribe specific settings. + */ + static Settings buildClientSettings(String tribeName, Settings globalSettings, Settings tribeSettings) { + Settings.Builder sb = Settings.builder().put(tribeSettings); + sb.put("node.name", globalSettings.get("node.name") + "/" + tribeName); + sb.put(Environment.PATH_HOME_SETTING.getKey(), Environment.PATH_HOME_SETTING.get(globalSettings)); // pass through ES home dir + if (Environment.PATH_CONF_SETTING.exists(globalSettings)) { + sb.put(Environment.PATH_CONF_SETTING.getKey(), Environment.PATH_CONF_SETTING.get(globalSettings)); + } + if (Environment.PATH_PLUGINS_SETTING.exists(globalSettings)) { + sb.put(Environment.PATH_PLUGINS_SETTING.getKey(), Environment.PATH_PLUGINS_SETTING.get(globalSettings)); + } + if (Environment.PATH_LOGS_SETTING.exists(globalSettings)) { + sb.put(Environment.PATH_LOGS_SETTING.getKey(), Environment.PATH_LOGS_SETTING.get(globalSettings)); + } + for (String passthrough : PASS_THROUGH_SETTINGS) { + if (sb.get(passthrough) == null && globalSettings.get(passthrough) != null) { + sb.put(passthrough, globalSettings.get(passthrough)); + } + } + sb.put(TRIBE_NAME_SETTING.getKey(), tribeName); + if (sb.get("http.enabled") == null) { + sb.put("http.enabled", false); + } + sb.put(Node.NODE_CLIENT_SETTING.getKey(), true); + return sb.build(); + } + + @Override protected void doStart() { if (nodes.isEmpty() == false) { diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java b/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java new file mode 100644 index 000000000000..08dd81c1b9bd --- /dev/null +++ b/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.tribe; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +public class TribeServiceTests extends ESTestCase { + public void testMinimalSettings() { + Settings globalSettings = Settings.builder() + .put("node.name", "nodename") + .put("path.home", "some/path").build(); + Settings clientSettings = TribeService.buildClientSettings("tribe1", globalSettings, Settings.EMPTY); + assertEquals("some/path", clientSettings.get("path.home")); + assertEquals("nodename/tribe1", clientSettings.get("node.name")); + assertEquals("tribe1", clientSettings.get("tribe.name")); + assertEquals("false", clientSettings.get("http.enabled")); + assertEquals("true", clientSettings.get("node.client")); + assertEquals(5, clientSettings.getAsMap().size()); + } + + public void testEnvironmentSettings() { + Settings globalSettings = Settings.builder() + .put("node.name", "nodename") + .put("path.home", "some/path") + .put("path.conf", "conf/path") + .put("path.plugins", "plugins/path") + .put("path.logs", "logs/path").build(); + Settings clientSettings = TribeService.buildClientSettings("tribe1", globalSettings, Settings.EMPTY); + assertEquals("some/path", clientSettings.get("path.home")); + assertEquals("conf/path", clientSettings.get("path.conf")); + assertEquals("plugins/path", clientSettings.get("path.plugins")); + assertEquals("logs/path", clientSettings.get("path.logs")); + } + + public void testPassthroughSettings() { + Settings globalSettings = Settings.builder() + .put("node.name", "nodename") + .put("path.home", "some/path") + .put("network.host", "0.0.0.0") + .put("network.bind_host", "1.1.1.1") + .put("network.publish_host", "2.2.2.2").build(); + Settings clientSettings = TribeService.buildClientSettings("tribe1", globalSettings, Settings.EMPTY); + assertEquals("0.0.0.0", clientSettings.get("network.host")); + assertEquals("1.1.1.1", clientSettings.get("network.bind_host")); + assertEquals("2.2.2.2", clientSettings.get("network.publish_host")); + } +} From 6566327159abe1ab2b0da008b71e58cd66ee7196 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 1 Mar 2016 14:49:27 -0800 Subject: [PATCH 043/320] Add test for per tribe client overrides --- .../org/elasticsearch/tribe/TribeServiceTests.java | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java b/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java index 08dd81c1b9bd..1c6c72145f6a 100644 --- a/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java +++ b/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java @@ -61,5 +61,15 @@ public class TribeServiceTests extends ESTestCase { assertEquals("0.0.0.0", clientSettings.get("network.host")); assertEquals("1.1.1.1", clientSettings.get("network.bind_host")); assertEquals("2.2.2.2", clientSettings.get("network.publish_host")); + + // per tribe client overrides still work + Settings tribeSettings = Settings.builder() + .put("network.host", "3.3.3.3") + .put("network.bind_host", "4.4.4.4") + .put("network.publish_host", "5.5.5.5").build(); + clientSettings = TribeService.buildClientSettings("tribe1", globalSettings, tribeSettings); + assertEquals("3.3.3.3", clientSettings.get("network.host")); + assertEquals("4.4.4.4", clientSettings.get("network.bind_host")); + assertEquals("5.5.5.5", clientSettings.get("network.publish_host")); } } From 3bd33f839fd293611810ee8d79be4358213dbc52 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 1 Mar 2016 15:07:59 -0800 Subject: [PATCH 044/320] Add test for ignoring path settings in tribe client --- .../java/org/elasticsearch/tribe/TribeServiceTests.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java b/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java index 1c6c72145f6a..9916b6112131 100644 --- a/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java +++ b/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java @@ -48,6 +48,12 @@ public class TribeServiceTests extends ESTestCase { assertEquals("conf/path", clientSettings.get("path.conf")); assertEquals("plugins/path", clientSettings.get("path.plugins")); assertEquals("logs/path", clientSettings.get("path.logs")); + + // TODO: this should be an error, not just ignored! + Settings tribeSettings = Settings.builder() + .put("path.home", "alternate/path").build(); + clientSettings = TribeService.buildClientSettings("tribe1", globalSettings, tribeSettings); + assertEquals("some/path", clientSettings.get("path.home")); } public void testPassthroughSettings() { From e4d9e46508e46dba21268b5a22dc84cd50dfdb31 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Wed, 2 Mar 2016 09:49:52 +0100 Subject: [PATCH 045/320] Fix merge with master --- .../cluster/service/InternalClusterService.java | 2 +- .../common/settings/ScopedSettingsTests.java | 6 +++--- .../elasticsearch/cloud/gce/GceComputeService.java | 11 ++++++----- .../cloud/gce/GceComputeServiceImpl.java | 7 ++++--- .../discovery/gce/GceUnicastHostsProvider.java | 4 +++- 5 files changed, 17 insertions(+), 13 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index 29b7f6ad54ea..af5c6a2f44bc 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -106,7 +106,7 @@ public class InternalClusterService extends AbstractLifecycleComponent NODE_ID_SEED_SETTING = // don't use node.id.seed so it won't be seen as an attribute - Setting.longSetting("node_id.seed", 0L, Long.MIN_VALUE, false, Setting.Scope.CLUSTER); + Setting.longSetting("node_id.seed", 0L, Long.MIN_VALUE, SettingsProperty.ClusterScope); private final ThreadPool threadPool; private BiConsumer clusterStatePublisher; diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index d626fe961d5a..84adee21b320 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -310,9 +310,9 @@ public class ScopedSettingsTests extends ESTestCase { public void testOverlappingComplexMatchSettings() { Set> settings = new LinkedHashSet<>(2); final boolean groupFirst = randomBoolean(); - final Setting groupSetting = Setting.groupSetting("foo.", false, Setting.Scope.CLUSTER); - final Setting listSetting = Setting.listSetting("foo.bar", Collections.emptyList(), Function.identity(), false, - Setting.Scope.CLUSTER); + final Setting groupSetting = Setting.groupSetting("foo.", SettingsProperty.ClusterScope); + final Setting listSetting = + Setting.listSetting("foo.bar", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); settings.add(groupFirst ? groupSetting : listSetting); settings.add(groupFirst ? listSetting : groupSetting); diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java index ce5154b3436e..60af05ad6a10 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java @@ -22,6 +22,7 @@ package org.elasticsearch.cloud.gce; import com.google.api.services.compute.model.Instance; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.unit.TimeValue; import java.io.IOException; @@ -41,25 +42,25 @@ public interface GceComputeService extends LifecycleComponent /** * cloud.gce.project_id: Google project id */ - Setting PROJECT_SETTING = Setting.simpleString("cloud.gce.project_id", false, Setting.Scope.CLUSTER); + Setting PROJECT_SETTING = Setting.simpleString("cloud.gce.project_id", SettingsProperty.ClusterScope); /** * cloud.gce.zone: Google Compute Engine zones */ Setting> ZONE_SETTING = - Setting.listSetting("cloud.gce.zone", Collections.emptyList(), s -> s, false, Setting.Scope.CLUSTER); + Setting.listSetting("cloud.gce.zone", Collections.emptyList(), s -> s, SettingsProperty.ClusterScope); /** * cloud.gce.refresh_interval: How long the list of hosts is cached to prevent further requests to the AWS API. 0 disables caching. * A negative value will cause infinite caching. Defaults to 0s. */ Setting REFRESH_SETTING = - Setting.timeSetting("cloud.gce.refresh_interval", TimeValue.timeValueSeconds(0), false, Setting.Scope.CLUSTER); + Setting.timeSetting("cloud.gce.refresh_interval", TimeValue.timeValueSeconds(0), SettingsProperty.ClusterScope); /** * cloud.gce.retry: Should we retry calling GCE API in case of error? Defaults to true. */ - Setting RETRY_SETTING = Setting.boolSetting("cloud.gce.retry", true, false, Setting.Scope.CLUSTER); + Setting RETRY_SETTING = Setting.boolSetting("cloud.gce.retry", true, SettingsProperty.ClusterScope); /** * cloud.gce.max_wait: How long exponential backoff should retry before definitely failing. @@ -67,7 +68,7 @@ public interface GceComputeService extends LifecycleComponent * A negative value will retry indefinitely. Defaults to `-1s` (retry indefinitely). */ Setting MAX_WAIT_SETTING = - Setting.timeSetting("cloud.gce.max_wait", TimeValue.timeValueSeconds(-1), false, Setting.Scope.CLUSTER); + Setting.timeSetting("cloud.gce.max_wait", TimeValue.timeValueSeconds(-1), SettingsProperty.ClusterScope); /** * Return a collection of running instances within the same GCE project diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java index d9033b602d29..b0cfeb16c516 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.gce.RetryHttpInitializerWrapper; @@ -61,11 +62,11 @@ public class GceComputeServiceImpl extends AbstractLifecycleComponent GCE_VALIDATE_CERTIFICATES = - Setting.boolSetting("cloud.gce.validate_certificates", true, false, Setting.Scope.CLUSTER); + Setting.boolSetting("cloud.gce.validate_certificates", true, SettingsProperty.ClusterScope); public static final Setting GCE_HOST = - new Setting<>("cloud.gce.host", "http://metadata.google.internal", Function.identity(), false, Setting.Scope.CLUSTER); + new Setting<>("cloud.gce.host", "http://metadata.google.internal", Function.identity(), SettingsProperty.ClusterScope); public static final Setting GCE_ROOT_URL = - new Setting<>("cloud.gce.root_url", "https://www.googleapis.com", Function.identity(), false, Setting.Scope.CLUSTER); + new Setting<>("cloud.gce.root_url", "https://www.googleapis.com", Function.identity(), SettingsProperty.ClusterScope); private final String project; private final List zones; diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java index e840d3439d65..738befe9d161 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; @@ -43,6 +44,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.function.Function; /** * @@ -53,7 +55,7 @@ public class GceUnicastHostsProvider extends AbstractComponent implements Unicas * discovery.gce.tags: The gce discovery can filter machines to include in the cluster based on tags. */ public static final Setting> TAGS_SETTING = - Setting.listSetting("discovery.gce.tags", Collections.emptyList(), s -> s, false, Setting.Scope.CLUSTER); + Setting.listSetting("discovery.gce.tags", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); static final class Status { private static final String TERMINATED = "TERMINATED"; From c103e40e726c69fa093bb22e2574499dbce0282c Mon Sep 17 00:00:00 2001 From: David Pilato Date: Wed, 2 Mar 2016 10:06:53 +0100 Subject: [PATCH 046/320] Add support for deprecated settings This is a backport of #16845 in this branch. We now also support marking settings with `SettingsProperty.Deprecated`. If the setting is still used, it will print a `warn` to the user. --- .../common/settings/Setting.java | 22 ++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 1469d4679cb0..af2d53743e49 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.MemorySizeValue; @@ -81,6 +82,11 @@ public class Setting extends ToXContentToBytes { */ Dynamic, + /** + * mark this setting as deprecated + */ + Deprecated, + /** * Cluster scope. * @See IndexScope @@ -103,6 +109,7 @@ public class Setting extends ToXContentToBytes { IndexScope; } + private static final ESLogger logger = Loggers.getLogger(Setting.class); private final String key; protected final Function defaultValue; private final Function parser; @@ -215,6 +222,13 @@ public class Setting extends ToXContentToBytes { return properties.contains(SettingsProperty.NodeScope); } + /** + * Returns true if this setting is deprecated, otherwise false + */ + public boolean isDeprecated() { + return properties.contains(SettingsProperty.Deprecated); + } + /** * Returns true iff this setting is a group setting. Group settings represent a set of settings * rather than a single value. The key, see {@link #getKey()}, in contrast to non-group settings is a prefix like cluster.store. @@ -275,6 +289,12 @@ public class Setting extends ToXContentToBytes { * instead. This is useful if the value can't be parsed due to an invalid value to access the actual value. */ public String getRaw(Settings settings) { + // They're using the setting, so we need to tell them to stop + if (this.isDeprecated() && this.exists(settings)) { + // It would be convenient to show its replacement key, but replacement is often not so simple + logger.warn("[{}] setting was deprecated in Elasticsearch and it will be removed in a future release! " + + "See the breaking changes lists in the documentation for details", getKey()); + } return settings.get(key, defaultValue.apply(settings)); } @@ -678,7 +698,7 @@ public class Setting extends ToXContentToBytes { /** * This setting type allows to validate settings that have the same type and a common prefix. For instance feature.${type}=[true|false] - * can easily be added with this setting. Yet, dynamic key settings don't support updaters our of the box unless {@link #getConcreteSetting(String)} + * can easily be added with this setting. Yet, dynamic key settings don't support updaters out of the box unless {@link #getConcreteSetting(String)} * is used to pull the updater. */ public static Setting dynamicKeySetting(String key, String defaultValue, Function parser, From 3f71c1d6a5520be787fdf5cbdb2c6400346df319 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Wed, 2 Mar 2016 10:12:40 +0100 Subject: [PATCH 047/320] Replace `s -> s` by `Function.identity()` --- .../org/elasticsearch/common/network/NetworkService.java | 7 ++++--- .../org/elasticsearch/http/HttpTransportSettings.java | 7 ++++--- .../org/elasticsearch/transport/TransportSettings.java | 9 +++++---- .../org/elasticsearch/common/settings/SettingTests.java | 7 ++++--- .../org/elasticsearch/cloud/gce/GceComputeService.java | 3 ++- 5 files changed, 19 insertions(+), 14 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java index fc1922252221..83f4f5fc88c9 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java @@ -35,6 +35,7 @@ import java.util.HashSet; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeUnit; +import java.util.function.Function; /** * @@ -45,11 +46,11 @@ public class NetworkService extends AbstractComponent { public static final String DEFAULT_NETWORK_HOST = "_local_"; public static final Setting> GLOBAL_NETWORK_HOST_SETTING = - Setting.listSetting("network.host", Arrays.asList(DEFAULT_NETWORK_HOST), s -> s, SettingsProperty.ClusterScope); + Setting.listSetting("network.host", Arrays.asList(DEFAULT_NETWORK_HOST), Function.identity(), SettingsProperty.ClusterScope); public static final Setting> GLOBAL_NETWORK_BINDHOST_SETTING = - Setting.listSetting("network.bind_host", GLOBAL_NETWORK_HOST_SETTING, s -> s, SettingsProperty.ClusterScope); + Setting.listSetting("network.bind_host", GLOBAL_NETWORK_HOST_SETTING, Function.identity(), SettingsProperty.ClusterScope); public static final Setting> GLOBAL_NETWORK_PUBLISHHOST_SETTING = - Setting.listSetting("network.publish_host", GLOBAL_NETWORK_HOST_SETTING, s -> s, SettingsProperty.ClusterScope); + Setting.listSetting("network.publish_host", GLOBAL_NETWORK_HOST_SETTING, Function.identity(), SettingsProperty.ClusterScope); public static final Setting NETWORK_SERVER = Setting.boolSetting("network.server", true, SettingsProperty.ClusterScope); public static final class TcpSettings { diff --git a/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java b/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java index 9fbf444e76e2..b1b29eae60ce 100644 --- a/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java +++ b/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import java.util.List; +import java.util.function.Function; import static java.util.Collections.emptyList; import static org.elasticsearch.common.settings.Setting.listSetting; @@ -53,11 +54,11 @@ public final class HttpTransportSettings { public static final Setting SETTING_HTTP_COMPRESSION_LEVEL = Setting.intSetting("http.compression_level", 6, SettingsProperty.ClusterScope); public static final Setting> SETTING_HTTP_HOST = - listSetting("http.host", emptyList(), s -> s, SettingsProperty.ClusterScope); + listSetting("http.host", emptyList(), Function.identity(), SettingsProperty.ClusterScope); public static final Setting> SETTING_HTTP_PUBLISH_HOST = - listSetting("http.publish_host", SETTING_HTTP_HOST, s -> s, SettingsProperty.ClusterScope); + listSetting("http.publish_host", SETTING_HTTP_HOST, Function.identity(), SettingsProperty.ClusterScope); public static final Setting> SETTING_HTTP_BIND_HOST = - listSetting("http.bind_host", SETTING_HTTP_HOST, s -> s, SettingsProperty.ClusterScope); + listSetting("http.bind_host", SETTING_HTTP_HOST, Function.identity(), SettingsProperty.ClusterScope); public static final Setting SETTING_HTTP_PORT = new Setting("http.port", "9200-9300", PortsRange::new, SettingsProperty.ClusterScope); diff --git a/core/src/main/java/org/elasticsearch/transport/TransportSettings.java b/core/src/main/java/org/elasticsearch/transport/TransportSettings.java index eaa3f0041888..b52a54509bd0 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportSettings.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportSettings.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.settings.Setting.SettingsProperty; import org.elasticsearch.common.settings.Settings; import java.util.List; +import java.util.function.Function; import static java.util.Collections.emptyList; import static org.elasticsearch.common.settings.Setting.groupSetting; @@ -36,13 +37,13 @@ import static org.elasticsearch.common.settings.Setting.listSetting; final public class TransportSettings { public static final Setting> HOST = - listSetting("transport.host", emptyList(), s -> s, SettingsProperty.ClusterScope); + listSetting("transport.host", emptyList(), Function.identity(), SettingsProperty.ClusterScope); public static final Setting> PUBLISH_HOST = - listSetting("transport.publish_host", HOST, s -> s, SettingsProperty.ClusterScope); + listSetting("transport.publish_host", HOST, Function.identity(), SettingsProperty.ClusterScope); public static final Setting> BIND_HOST = - listSetting("transport.bind_host", HOST, s -> s, SettingsProperty.ClusterScope); + listSetting("transport.bind_host", HOST, Function.identity(), SettingsProperty.ClusterScope); public static final Setting PORT = - new Setting<>("transport.tcp.port", "9300-9400", s -> s, SettingsProperty.ClusterScope); + new Setting<>("transport.tcp.port", "9300-9400", Function.identity(), SettingsProperty.ClusterScope); public static final Setting PUBLISH_PORT = intSetting("transport.publish_port", -1, -1, SettingsProperty.ClusterScope); public static final String DEFAULT_PROFILE = "default"; diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index cd6496d8b2fb..9d1176dd0f44 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -29,6 +29,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; @@ -122,11 +123,11 @@ public class SettingTests extends ESTestCase { assertEquals(defautlValue, setting.getDefault(Settings.EMPTY)); Setting secondaryDefault = - new Setting<>("foo.bar", (s) -> s.get("old.foo.bar", "some_default"), (s) -> s, SettingsProperty.ClusterScope); + new Setting<>("foo.bar", (s) -> s.get("old.foo.bar", "some_default"), Function.identity(), SettingsProperty.ClusterScope); assertEquals("some_default", secondaryDefault.get(Settings.EMPTY)); assertEquals("42", secondaryDefault.get(Settings.builder().put("old.foo.bar", 42).build())); Setting secondaryDefaultViaSettings = - new Setting<>("foo.bar", secondaryDefault, (s) -> s, SettingsProperty.ClusterScope); + new Setting<>("foo.bar", secondaryDefault, Function.identity(), SettingsProperty.ClusterScope); assertEquals("some_default", secondaryDefaultViaSettings.get(Settings.EMPTY)); assertEquals("42", secondaryDefaultViaSettings.get(Settings.builder().put("old.foo.bar", 42).build())); } @@ -324,7 +325,7 @@ public class SettingTests extends ESTestCase { assertEquals(i, intValues.get(i).intValue()); } - Setting> settingWithFallback = Setting.listSetting("foo.baz", listSetting, s -> s, + Setting> settingWithFallback = Setting.listSetting("foo.baz", listSetting, Function.identity(), SettingsProperty.Dynamic, SettingsProperty.ClusterScope); value = settingWithFallback.get(Settings.EMPTY); assertEquals(1, value.size()); diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java index 60af05ad6a10..7a46768e5a04 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java @@ -29,6 +29,7 @@ import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.function.Function; public interface GceComputeService extends LifecycleComponent { @@ -48,7 +49,7 @@ public interface GceComputeService extends LifecycleComponent * cloud.gce.zone: Google Compute Engine zones */ Setting> ZONE_SETTING = - Setting.listSetting("cloud.gce.zone", Collections.emptyList(), s -> s, SettingsProperty.ClusterScope); + Setting.listSetting("cloud.gce.zone", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); /** * cloud.gce.refresh_interval: How long the list of hosts is cached to prevent further requests to the AWS API. 0 disables caching. From e4031932edaea00b5ec06f6153aa7e02e8f0e74e Mon Sep 17 00:00:00 2001 From: David Pilato Date: Wed, 2 Mar 2016 15:10:32 +0100 Subject: [PATCH 048/320] Use deprecation Logger --- .../main/java/org/elasticsearch/common/settings/Setting.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index af2d53743e49..394180d27f56 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; @@ -110,6 +111,8 @@ public class Setting extends ToXContentToBytes { } private static final ESLogger logger = Loggers.getLogger(Setting.class); + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger); + private final String key; protected final Function defaultValue; private final Function parser; @@ -292,7 +295,7 @@ public class Setting extends ToXContentToBytes { // They're using the setting, so we need to tell them to stop if (this.isDeprecated() && this.exists(settings)) { // It would be convenient to show its replacement key, but replacement is often not so simple - logger.warn("[{}] setting was deprecated in Elasticsearch and it will be removed in a future release! " + + deprecationLogger.deprecated("[{}] setting was deprecated in Elasticsearch and it will be removed in a future release! " + "See the breaking changes lists in the documentation for details", getKey()); } return settings.get(key, defaultValue.apply(settings)); From 209da28bb2d63b6e9e517cd996d40e9222224a8c Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 3 Mar 2016 09:37:33 -0800 Subject: [PATCH 049/320] Removed check file command tests, check file command is going away --- .../common/cli/CheckFileCommandTests.java | 329 ------------------ 1 file changed, 329 deletions(-) delete mode 100644 qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java deleted file mode 100644 index 45f3df22cd77..000000000000 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java +++ /dev/null @@ -1,329 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.cli; - -import com.google.common.jimfs.Configuration; -import com.google.common.jimfs.Jimfs; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.env.Environment; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.FileSystem; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.attribute.GroupPrincipal; -import java.nio.file.attribute.PosixFileAttributeView; -import java.nio.file.attribute.PosixFileAttributes; -import java.nio.file.attribute.PosixFilePermission; -import java.nio.file.attribute.UserPrincipal; -import java.util.Set; - -import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.hasItem; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; - -/** - * - */ -public class CheckFileCommandTests extends ESTestCase { - - private CliToolTestCase.CaptureOutputTerminal captureOutputTerminal = new CliToolTestCase.CaptureOutputTerminal(); - - private Configuration jimFsConfiguration = Configuration.unix().toBuilder().setAttributeViews("basic", "owner", "posix", "unix").build(); - private Configuration jimFsConfigurationWithoutPermissions = randomBoolean() ? Configuration.unix().toBuilder().setAttributeViews("basic").build() : Configuration.windows(); - - private enum Mode { - CHANGE, KEEP, DISABLED - } - - public void testThatCommandLogsErrorMessageOnFail() throws Exception { - executeCommand(jimFsConfiguration, new PermissionCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.CHANGE)); - assertThat(captureOutputTerminal.getTerminalOutput(), hasItem(containsString("Please ensure that the user account running Elasticsearch has read access to this file"))); - } - - public void testThatCommandLogsNothingWhenPermissionRemains() throws Exception { - executeCommand(jimFsConfiguration, new PermissionCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.KEEP)); - assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0)); - } - - public void testThatCommandLogsNothingWhenDisabled() throws Exception { - executeCommand(jimFsConfiguration, new PermissionCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.DISABLED)); - assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0)); - } - - public void testThatCommandLogsNothingIfFilesystemDoesNotSupportPermissions() throws Exception { - executeCommand(jimFsConfigurationWithoutPermissions, new PermissionCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.DISABLED)); - assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0)); - } - - public void testThatCommandLogsOwnerChange() throws Exception { - executeCommand(jimFsConfiguration, new OwnerCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.CHANGE)); - assertThat(captureOutputTerminal.getTerminalOutput(), hasItem(allOf(containsString("Owner of file ["), containsString("] used to be ["), containsString("], but now is [")))); - } - - public void testThatCommandLogsNothingIfOwnerRemainsSame() throws Exception { - executeCommand(jimFsConfiguration, new OwnerCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.KEEP)); - assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0)); - } - - public void testThatCommandLogsNothingIfOwnerIsDisabled() throws Exception { - executeCommand(jimFsConfiguration, new OwnerCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.DISABLED)); - assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0)); - } - - public void testThatCommandLogsNothingIfFileSystemDoesNotSupportOwners() throws Exception { - executeCommand(jimFsConfigurationWithoutPermissions, new OwnerCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.DISABLED)); - assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0)); - } - - public void testThatCommandLogsIfGroupChanges() throws Exception { - executeCommand(jimFsConfiguration, new GroupCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.CHANGE)); - assertThat(captureOutputTerminal.getTerminalOutput(), hasItem(allOf(containsString("Group of file ["), containsString("] used to be ["), containsString("], but now is [")))); - } - - public void testThatCommandLogsNothingIfGroupRemainsSame() throws Exception { - executeCommand(jimFsConfiguration, new GroupCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.KEEP)); - assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0)); - } - - public void testThatCommandLogsNothingIfGroupIsDisabled() throws Exception { - executeCommand(jimFsConfiguration, new GroupCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.DISABLED)); - assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0)); - } - - public void testThatCommandLogsNothingIfFileSystemDoesNotSupportGroups() throws Exception { - executeCommand(jimFsConfigurationWithoutPermissions, new GroupCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.DISABLED)); - assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0)); - } - - public void testThatCommandDoesNotLogAnythingOnFileCreation() throws Exception { - Configuration configuration = randomBoolean() ? jimFsConfiguration : jimFsConfigurationWithoutPermissions; - - try (FileSystem fs = Jimfs.newFileSystem(configuration)) { - Path path = fs.getPath(randomAsciiOfLength(10)); - Settings settings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); - new CreateFileCommand(captureOutputTerminal, path).execute(settings, new Environment(settings)); - assertThat(Files.exists(path), is(true)); - } - - assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0)); - } - - public void testThatCommandWorksIfFileIsDeletedByCommand() throws Exception { - Configuration configuration = randomBoolean() ? jimFsConfiguration : jimFsConfigurationWithoutPermissions; - - try (FileSystem fs = Jimfs.newFileSystem(configuration)) { - Path path = fs.getPath(randomAsciiOfLength(10)); - Files.write(path, "anything".getBytes(StandardCharsets.UTF_8)); - - Settings settings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); - new DeleteFileCommand(captureOutputTerminal, path).execute(settings, new Environment(settings)); - assertThat(Files.exists(path), is(false)); - } - - assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0)); - } - - private void executeCommand(Configuration configuration, AbstractTestCheckFileCommand command) throws Exception { - try (FileSystem fs = Jimfs.newFileSystem(configuration)) { - command.execute(fs); - } - } - - abstract class AbstractTestCheckFileCommand extends CheckFileCommand { - - protected final Mode mode; - protected FileSystem fs; - protected Path[] paths; - final Path baseDir; - - public AbstractTestCheckFileCommand(Path baseDir, Terminal terminal, Mode mode) throws IOException { - super(terminal); - this.mode = mode; - this.baseDir = baseDir; - } - - public CliTool.ExitStatus execute(FileSystem fs) throws Exception { - this.fs = fs; - this.paths = new Path[] { writePath(fs, "p1", "anything"), writePath(fs, "p2", "anything"), writePath(fs, "p3", "anything") }; - Settings settings = Settings.settingsBuilder() - .put(Environment.PATH_HOME_SETTING.getKey(), baseDir.toString()) - .build(); - return super.execute(Settings.EMPTY, new Environment(settings)); - } - - private Path writePath(FileSystem fs, String name, String content) throws IOException { - Path path = fs.getPath(name); - Files.write(path, content.getBytes(StandardCharsets.UTF_8)); - return path; - } - - @Override - protected Path[] pathsForPermissionsCheck(Settings settings, Environment env) { - return paths; - } - } - - /** - * command that changes permissions from a file if enabled - */ - class PermissionCheckFileCommand extends AbstractTestCheckFileCommand { - - public PermissionCheckFileCommand(Path baseDir, Terminal terminal, Mode mode) throws IOException { - super(baseDir, terminal, mode); - } - - @Override - public CliTool.ExitStatus doExecute(Settings settings, Environment env) throws Exception { - int randomInt = randomInt(paths.length - 1); - Path randomPath = paths[randomInt]; - switch (mode) { - case CHANGE: - Files.write(randomPath, randomAsciiOfLength(10).getBytes(StandardCharsets.UTF_8)); - Files.setPosixFilePermissions(randomPath, Sets.newHashSet(PosixFilePermission.OWNER_EXECUTE, PosixFilePermission.OTHERS_EXECUTE, PosixFilePermission.GROUP_EXECUTE)); - break; - case KEEP: - Files.write(randomPath, randomAsciiOfLength(10).getBytes(StandardCharsets.UTF_8)); - Set posixFilePermissions = Files.getPosixFilePermissions(randomPath); - Files.setPosixFilePermissions(randomPath, posixFilePermissions); - break; - } - return CliTool.ExitStatus.OK; - } - - } - - /** - * command that changes the owner of a file if enabled - */ - class OwnerCheckFileCommand extends AbstractTestCheckFileCommand { - - public OwnerCheckFileCommand(Path baseDir, Terminal terminal, Mode mode) throws IOException { - super(baseDir, terminal, mode); - } - - @Override - public CliTool.ExitStatus doExecute(Settings settings, Environment env) throws Exception { - int randomInt = randomInt(paths.length - 1); - Path randomPath = paths[randomInt]; - switch (mode) { - case CHANGE: - Files.write(randomPath, randomAsciiOfLength(10).getBytes(StandardCharsets.UTF_8)); - UserPrincipal randomOwner = fs.getUserPrincipalLookupService().lookupPrincipalByName(randomAsciiOfLength(10)); - Files.setOwner(randomPath, randomOwner); - break; - case KEEP: - Files.write(randomPath, randomAsciiOfLength(10).getBytes(StandardCharsets.UTF_8)); - UserPrincipal originalOwner = Files.getOwner(randomPath); - Files.setOwner(randomPath, originalOwner); - break; - } - - return CliTool.ExitStatus.OK; - } - } - - /** - * command that changes the group of a file if enabled - */ - class GroupCheckFileCommand extends AbstractTestCheckFileCommand { - - public GroupCheckFileCommand(Path baseDir, Terminal terminal, Mode mode) throws IOException { - super(baseDir, terminal, mode); - } - - @Override - public CliTool.ExitStatus doExecute(Settings settings, Environment env) throws Exception { - int randomInt = randomInt(paths.length - 1); - Path randomPath = paths[randomInt]; - switch (mode) { - case CHANGE: - Files.write(randomPath, randomAsciiOfLength(10).getBytes(StandardCharsets.UTF_8)); - GroupPrincipal randomPrincipal = fs.getUserPrincipalLookupService().lookupPrincipalByGroupName(randomAsciiOfLength(10)); - Files.getFileAttributeView(randomPath, PosixFileAttributeView.class).setGroup(randomPrincipal); - break; - case KEEP: - Files.write(randomPath, randomAsciiOfLength(10).getBytes(StandardCharsets.UTF_8)); - GroupPrincipal groupPrincipal = Files.readAttributes(randomPath, PosixFileAttributes.class).group(); - Files.getFileAttributeView(randomPath, PosixFileAttributeView.class).setGroup(groupPrincipal); - break; - } - - return CliTool.ExitStatus.OK; - } - } - - /** - * A command that creates a non existing file - */ - class CreateFileCommand extends CheckFileCommand { - - private final Path pathToCreate; - - public CreateFileCommand(Terminal terminal, Path pathToCreate) { - super(terminal); - this.pathToCreate = pathToCreate; - } - - @Override - public CliTool.ExitStatus doExecute(Settings settings, Environment env) throws Exception { - Files.write(pathToCreate, "anything".getBytes(StandardCharsets.UTF_8)); - return CliTool.ExitStatus.OK; - } - - @Override - protected Path[] pathsForPermissionsCheck(Settings settings, Environment env) throws Exception { - return new Path[] { pathToCreate }; - } - } - - /** - * A command that deletes an existing file - */ - class DeleteFileCommand extends CheckFileCommand { - - private final Path pathToDelete; - - public DeleteFileCommand(Terminal terminal, Path pathToDelete) { - super(terminal); - this.pathToDelete = pathToDelete; - } - - @Override - public CliTool.ExitStatus doExecute(Settings settings, Environment env) throws Exception { - Files.delete(pathToDelete); - return CliTool.ExitStatus.OK; - } - - @Override - protected Path[] pathsForPermissionsCheck(Settings settings, Environment env) throws Exception { - return new Path[] {pathToDelete}; - } - } -} From 30a788d87cff159f2f2045a1b98216afcd10855b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 1 Mar 2016 19:57:58 +0100 Subject: [PATCH 050/320] Suggestions: Make field name mandatory ctor argument The field name is a required argument for all suggesters, but it was specified via a field() setter in SuggestionBuilder so far. This changes field name to being a mandatory constructor argument and lets suggestion builders throw an error if field name is missing or the empty string. --- .../rest/action/search/RestSearchAction.java | 4 +- .../search/suggest/SuggestBuilders.java | 18 +- .../search/suggest/SuggestionBuilder.java | 78 ++++---- .../CompletionSuggestionBuilder.java | 10 +- .../phrase/PhraseSuggestionBuilder.java | 159 ++++++++++------ .../suggest/term/TermSuggestionBuilder.java | 128 ++++++++----- .../index/suggest/stats/SuggestStatsIT.java | 4 +- .../indices/IndicesOptionsIntegrationIT.java | 2 +- .../builder/SearchSourceBuilderTests.java | 2 +- .../AbstractSuggestionBuilderTestCase.java | 12 +- .../suggest/CompletionSuggestSearchIT.java | 60 +++--- .../ContextCompletionSuggestSearchIT.java | 46 ++--- .../suggest/CustomSuggesterSearchIT.java | 22 +-- .../CompletionSuggesterBuilderTests.java | 2 +- .../phrase/PhraseSuggestionBuilderTests.java | 81 +++++++- .../term/TermSuggestionBuilderTests.java | 50 +++-- docs/reference/migration/migrate_5_0.asciidoc | 7 + .../messy/tests/SuggestSearchTests.java | 179 +++++++++--------- 18 files changed, 518 insertions(+), 346 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 76d1dbf9d487..4f431ebb81a1 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -102,7 +102,7 @@ public class RestSearchAction extends BaseRestHandler { * content is read from the request using * RestAction.hasBodyContent. */ - public static void parseSearchRequest(SearchRequest searchRequest, IndicesQueriesRegistry indicesQueriesRegistry, RestRequest request, + public static void parseSearchRequest(SearchRequest searchRequest, IndicesQueriesRegistry indicesQueriesRegistry, RestRequest request, ParseFieldMatcher parseFieldMatcher, AggregatorParsers aggParsers, BytesReference restContent) throws IOException { if (searchRequest.source() == null) { searchRequest.source(new SearchSourceBuilder()); @@ -256,7 +256,7 @@ public class RestSearchAction extends BaseRestHandler { int suggestSize = request.paramAsInt("suggest_size", 5); String suggestMode = request.param("suggest_mode"); searchSourceBuilder.suggest(new SuggestBuilder().addSuggestion(suggestField, - termSuggestion().field(suggestField) + termSuggestion(suggestField) .text(suggestText).size(suggestSize) .suggestMode(SuggestMode.resolve(suggestMode)))); } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilders.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilders.java index 33b32744f52f..c9111c660f8f 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilders.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilders.java @@ -29,32 +29,32 @@ import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; public abstract class SuggestBuilders { /** - * Creates a term suggestion lookup query with the provided name + * Creates a term suggestion lookup query with the provided fieldname * * @return a {@link org.elasticsearch.search.suggest.term.TermSuggestionBuilder} * instance */ - public static TermSuggestionBuilder termSuggestion() { - return new TermSuggestionBuilder(); + public static TermSuggestionBuilder termSuggestion(String fieldname) { + return new TermSuggestionBuilder(fieldname); } /** - * Creates a phrase suggestion lookup query with the provided name + * Creates a phrase suggestion lookup query with the provided fieldname * * @return a {@link org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder} * instance */ - public static PhraseSuggestionBuilder phraseSuggestion() { - return new PhraseSuggestionBuilder(); + public static PhraseSuggestionBuilder phraseSuggestion(String fieldname) { + return new PhraseSuggestionBuilder(fieldname); } /** - * Creates a completion suggestion lookup query with the provided name + * Creates a completion suggestion lookup query with the provided fieldname * * @return a {@link org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder} * instance */ - public static CompletionSuggestionBuilder completionSuggestion() { - return new CompletionSuggestionBuilder(); + public static CompletionSuggestionBuilder completionSuggestion(String fieldname) { + return new CompletionSuggestionBuilder(fieldname); } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java index 5291be775a66..4ff418bea396 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java @@ -23,6 +23,7 @@ import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -43,8 +44,7 @@ import java.util.Objects; */ public abstract class SuggestionBuilder> extends ToXContentToBytes implements NamedWriteable { - // TODO this seems mandatory and should be constructor arg - protected String fieldname; + protected final String fieldname; protected String text; protected String prefix; protected String regex; @@ -60,6 +60,31 @@ public abstract class SuggestionBuilder> extends protected static final ParseField SIZE_FIELD = new ParseField("size"); protected static final ParseField SHARDSIZE_FIELD = new ParseField("shard_size"); + /** + * Creates a new suggestion. + * @param fieldname field to fetch the candidate suggestions from + */ + public SuggestionBuilder(String fieldname) { + Objects.requireNonNull(fieldname, "suggestion requires a field name"); + if (fieldname.isEmpty()) { + throw new IllegalArgumentException("suggestion field name is empty"); + } + this.fieldname = fieldname; + } + + /** + * internal copy constructor that copies over all class fields from second SuggestionBuilder except field name. + */ + protected SuggestionBuilder(String fieldname, SuggestionBuilder in) { + this(fieldname); + text = in.text; + prefix = in.prefix; + regex = in.regex; + analyzer = in.analyzer; + size = in.size; + shardSize = in.shardSize; + } + /** * Same as in {@link SuggestBuilder#setGlobalText(String)}, but in the suggestion scope. */ @@ -117,9 +142,7 @@ public abstract class SuggestionBuilder> extends if (analyzer != null) { builder.field(ANALYZER_FIELD.getPreferredName(), analyzer); } - if (fieldname != null) { - builder.field(FIELDNAME_FIELD.getPreferredName(), fieldname); - } + builder.field(FIELDNAME_FIELD.getPreferredName(), fieldname); if (size != null) { builder.field(SIZE_FIELD.getPreferredName(), size); } @@ -139,7 +162,7 @@ public abstract class SuggestionBuilder> extends XContentParser parser = parseContext.parser(); ParseFieldMatcher parsefieldMatcher = parseContext.parseFieldMatcher(); XContentParser.Token token; - String fieldName = null; + String currentFieldName = null; String suggestText = null; String prefix = null; String regex = null; @@ -147,21 +170,21 @@ public abstract class SuggestionBuilder> extends while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); + currentFieldName = parser.currentName(); } else if (token.isValue()) { - if (parsefieldMatcher.match(fieldName, TEXT_FIELD)) { + if (parsefieldMatcher.match(currentFieldName, TEXT_FIELD)) { suggestText = parser.text(); - } else if (parsefieldMatcher.match(fieldName, PREFIX_FIELD)) { + } else if (parsefieldMatcher.match(currentFieldName, PREFIX_FIELD)) { prefix = parser.text(); - } else if (parsefieldMatcher.match(fieldName, REGEX_FIELD)) { + } else if (parsefieldMatcher.match(currentFieldName, REGEX_FIELD)) { regex = parser.text(); } else { - throw new IllegalArgumentException("[suggestion] does not support [" + fieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "suggestion does not support [" + currentFieldName + "]"); } } else if (token == XContentParser.Token.START_OBJECT) { - SuggestionBuilder suggestParser = suggesters.getSuggestionPrototype(fieldName); + SuggestionBuilder suggestParser = suggesters.getSuggestionPrototype(currentFieldName); if (suggestParser == null) { - throw new IllegalArgumentException("Suggester[" + fieldName + "] not supported"); + throw new ParsingException(parser.getTokenLocation(), "suggestion [" + currentFieldName + "] not supported"); } suggestionBuilder = suggestParser.innerFromXContent(parseContext); } @@ -182,10 +205,6 @@ public abstract class SuggestionBuilder> extends public SuggestionContext build(QueryShardContext context) throws IOException { SuggestionContext suggestionContext = innerBuild(context); - // TODO make field mandatory in the builder, then remove this - if (suggestionContext.getField() == null) { - throw new IllegalArgumentException("The required field option is missing"); - } return suggestionContext; } @@ -254,20 +273,6 @@ public abstract class SuggestionBuilder> extends return getWriteableName(); } - - /** - * Sets from what field to fetch the candidate suggestions from. This is an - * required option and needs to be set via this setter or - * {@link org.elasticsearch.search.suggest.term.TermSuggestionBuilder#field(String)} - * method - */ - @SuppressWarnings("unchecked") - public T field(String field) { - Objects.requireNonNull(field, "fieldname must not be null"); - this.fieldname = field; - return (T)this; - } - /** * get the {@link #field()} parameter */ @@ -298,7 +303,7 @@ public abstract class SuggestionBuilder> extends @SuppressWarnings("unchecked") public T size(int size) { if (size <= 0) { - throw new IllegalArgumentException("Size must be positive"); + throw new IllegalArgumentException("size must be positive"); } this.size = size; return (T)this; @@ -339,8 +344,8 @@ public abstract class SuggestionBuilder> extends @Override public final T readFrom(StreamInput in) throws IOException { - T suggestionBuilder = doReadFrom(in); - suggestionBuilder.fieldname = in.readOptionalString(); + String fieldname = in.readString(); + T suggestionBuilder = doReadFrom(in, fieldname); suggestionBuilder.text = in.readOptionalString(); suggestionBuilder.prefix = in.readOptionalString(); suggestionBuilder.regex = in.readOptionalString(); @@ -353,13 +358,14 @@ public abstract class SuggestionBuilder> extends /** * Subclass should return a new instance, reading itself from the input string * @param in the input string to read from + * @param fieldname the fieldname needed for ctor or concrete suggestion */ - protected abstract T doReadFrom(StreamInput in) throws IOException; + protected abstract T doReadFrom(StreamInput in, String fieldname) throws IOException; @Override public final void writeTo(StreamOutput out) throws IOException { + out.writeString(fieldname); doWriteTo(out); - out.writeOptionalString(fieldname); out.writeOptionalString(text); out.writeOptionalString(prefix); out.writeOptionalString(regex); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index d8c7ededb954..f2540e4862cf 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -50,7 +50,7 @@ import java.util.Set; */ public class CompletionSuggestionBuilder extends SuggestionBuilder { - public static final CompletionSuggestionBuilder PROTOTYPE = new CompletionSuggestionBuilder(); + public static final CompletionSuggestionBuilder PROTOTYPE = new CompletionSuggestionBuilder("_na_"); static final String SUGGESTION_NAME = "completion"; static final ParseField PAYLOAD_FIELD = new ParseField("payload"); static final ParseField CONTEXTS_FIELD = new ParseField("contexts", "context"); @@ -60,6 +60,10 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder> queryContexts = new HashMap<>(); private final Set payloadFields = new HashSet<>(); + public CompletionSuggestionBuilder(String fieldname) { + super(fieldname); + } + /** * Sets the prefix to provide completions for. * The prefix gets analyzed by the suggest analyzer. @@ -229,8 +233,8 @@ public class CompletionSuggestionBuilder extends SuggestionBuilderphrase). */ -public final class PhraseSuggestionBuilder extends SuggestionBuilder { +public class PhraseSuggestionBuilder extends SuggestionBuilder { private static final String SUGGESTION_NAME = "phrase"; - public static final PhraseSuggestionBuilder PROTOTYPE = new PhraseSuggestionBuilder(); + public static final PhraseSuggestionBuilder PROTOTYPE = new PhraseSuggestionBuilder("_na_"); protected static final ParseField MAXERRORS_FIELD = new ParseField("max_errors"); protected static final ParseField RWE_LIKELIHOOD_FIELD = new ParseField("real_word_error_likelihood"); @@ -93,6 +94,32 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder> generators = new HashMap<>(); + public PhraseSuggestionBuilder(String fieldname) { + super(fieldname); + } + + /** + * internal copy constructor that copies over all class fields except for the fieldname which is + * set to the one provided in the first argument + */ + private PhraseSuggestionBuilder(String fieldname, PhraseSuggestionBuilder in) { + super(fieldname, in); + maxErrors = in.maxErrors; + separator = in.separator; + realWordErrorLikelihood = in.realWordErrorLikelihood; + confidence = in.confidence; + gramSize = in.gramSize; + forceUnigrams = in.forceUnigrams; + tokenLimit = in.tokenLimit; + preTag = in.preTag; + postTag = in.postTag; + collateQuery = in.collateQuery; + collateParams = in.collateParams; + collatePrune = in.collatePrune; + model = in.model; + generators.putAll(in.generators); + } + /** * Sets the gram size for the n-gram model used for this suggester. The * default value is 1 corresponding to unigrams. Use @@ -395,102 +422,114 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder { - public static final TermSuggestionBuilder PROTOTYPE = new TermSuggestionBuilder(); + public static final TermSuggestionBuilder PROTOTYPE = new TermSuggestionBuilder("_na_"); private static final String SUGGESTION_NAME = "term"; private SuggestMode suggestMode = SuggestMode.MISSING; - private Float accuracy = DEFAULT_ACCURACY; + private float accuracy = DEFAULT_ACCURACY; private SortBy sort = SortBy.SCORE; private StringDistanceImpl stringDistance = StringDistanceImpl.INTERNAL; - private Integer maxEdits = DEFAULT_MAX_EDITS; - private Integer maxInspections = DEFAULT_MAX_INSPECTIONS; - private Float maxTermFreq = DEFAULT_MAX_TERM_FREQ; - private Integer prefixLength = DEFAULT_PREFIX_LENGTH; - private Integer minWordLength = DEFAULT_MIN_WORD_LENGTH; - private Float minDocFreq = DEFAULT_MIN_DOC_FREQ; + private int maxEdits = DEFAULT_MAX_EDITS; + private int maxInspections = DEFAULT_MAX_INSPECTIONS; + private float maxTermFreq = DEFAULT_MAX_TERM_FREQ; + private int prefixLength = DEFAULT_PREFIX_LENGTH; + private int minWordLength = DEFAULT_MIN_WORD_LENGTH; + private float minDocFreq = DEFAULT_MIN_DOC_FREQ; + + public TermSuggestionBuilder(String fieldname) { + super(fieldname); + } + + /** + * internal copy constructor that copies over all class field except fieldname. + */ + private TermSuggestionBuilder(String fieldname, TermSuggestionBuilder in) { + super(fieldname, in); + suggestMode = in.suggestMode; + accuracy = in.accuracy; + sort = in.sort; + stringDistance = in.stringDistance; + maxEdits = in.maxEdits; + maxInspections = in.maxInspections; + maxTermFreq = in.maxTermFreq; + prefixLength = in.prefixLength; + minWordLength = in.minWordLength; + minDocFreq = in.minDocFreq; + } /** * The global suggest mode controls what suggested terms are included or @@ -126,7 +148,7 @@ public class TermSuggestionBuilder extends SuggestionBuilder randomAsciiOfLengthBetween(2, 20))); break; @@ -322,15 +321,12 @@ public abstract class AbstractSuggestionBuilderTestCase randomAsciiOfLengthBetween(2, 20))); break; case 3: - mutation.field(randomValueOtherThan(mutation.field(), () -> randomAsciiOfLengthBetween(2, 20))); - break; - case 4: mutation.analyzer(randomValueOtherThan(mutation.analyzer(), () -> randomAsciiOfLengthBetween(2, 20))); break; - case 5: + case 4: mutation.size(randomValueOtherThan(mutation.size(), () -> randomIntBetween(1, 20))); break; - case 6: + case 5: mutation.shardSize(randomValueOtherThan(mutation.shardSize(), () -> randomIntBetween(1, 20))); break; } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 6813ebd66454..bfb31de216c9 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -105,7 +105,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { )); } indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); assertSuggestions("foo", prefix, "suggestion10", "suggestion9", "suggestion8", "suggestion7", "suggestion6"); } @@ -126,7 +126,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { )); } indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).regex("sugg.*es"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).regex("sugg.*es"); assertSuggestions("foo", prefix, "sugg10estion", "sugg9estion", "sugg8estion", "sugg7estion", "sugg6estion"); } @@ -147,7 +147,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { )); } indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg", Fuzziness.ONE); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg", Fuzziness.ONE); assertSuggestions("foo", prefix, "sugxgestion10", "sugxgestion9", "sugxgestion8", "sugxgestion7", "sugxgestion6"); } @@ -173,13 +173,13 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { for (int i = 0; i < size; i++) { outputs[i] = "suggestion" + (numDocs - i); } - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sug").size(size); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sug").size(size); assertSuggestions("foo", prefix, outputs); - CompletionSuggestionBuilder regex = SuggestBuilders.completionSuggestion().field(FIELD).regex("su[g|s]g").size(size); + CompletionSuggestionBuilder regex = SuggestBuilders.completionSuggestion(FIELD).regex("su[g|s]g").size(size); assertSuggestions("foo", regex, outputs); - CompletionSuggestionBuilder fuzzyPrefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg", Fuzziness.ONE).size(size); + CompletionSuggestionBuilder fuzzyPrefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg", Fuzziness.ONE).size(size); assertSuggestions("foo", fuzzyPrefix, outputs); } @@ -198,7 +198,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"). + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"). size(numDocs).payload(Collections.singletonList("count")); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", prefix).execute().actionGet(); assertNoFailures(suggestResponse); @@ -245,7 +245,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { client().prepareIndex(INDEX, TYPE, "2").setSource(FIELD, "suggestion") ); indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg") + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") .payload(Collections.singletonList("test_field")); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", prefix).execute().actionGet(); assertNoFailures(suggestResponse); @@ -283,7 +283,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "2").setSource(source)); indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg") + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") .payload(Arrays.asList("title", "count")); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", prefix).execute().actionGet(); assertNoFailures(suggestResponse); @@ -334,7 +334,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { payloadFields.add("test_field" + i); } - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg") + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") .size(suggestionSize).payload(payloadFields); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", prefix).execute().actionGet(); assertNoFailures(suggestResponse); @@ -435,7 +435,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { refresh(); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("testSuggestions", - new CompletionSuggestionBuilder().field(FIELD).text("test").size(10) + new CompletionSuggestionBuilder(FIELD).text("test").size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, "testSuggestions", "testing"); @@ -636,7 +636,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { assertThat(putMappingResponse.isAcknowledged(), is(true)); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("suggs", - SuggestBuilders.completionSuggestion().field(FIELD + ".suggest").text("f").size(10) + SuggestBuilders.completionSuggestion(FIELD + ".suggest").text("f").size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, "suggs"); @@ -644,7 +644,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { ensureGreen(INDEX); SuggestResponse afterReindexingResponse = client().prepareSuggest(INDEX).addSuggestion("suggs", - SuggestBuilders.completionSuggestion().field(FIELD + ".suggest").text("f").size(10) + SuggestBuilders.completionSuggestion(FIELD + ".suggest").text("f").size(10) ).execute().actionGet(); assertSuggestions(afterReindexingResponse, "suggs", "Foo Fighters"); } @@ -661,12 +661,12 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { refresh(); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", - SuggestBuilders.completionSuggestion().field(FIELD).prefix("Nirv").size(10) + SuggestBuilders.completionSuggestion(FIELD).prefix("Nirv").size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "Nirvana"); suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", - SuggestBuilders.completionSuggestion().field(FIELD).prefix("Nirw", Fuzziness.ONE).size(10) + SuggestBuilders.completionSuggestion(FIELD).prefix("Nirw", Fuzziness.ONE).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "Nirvana"); } @@ -684,13 +684,13 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { // edit distance 1 SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", - SuggestBuilders.completionSuggestion().field(FIELD).prefix("Norw", Fuzziness.ONE).size(10) + SuggestBuilders.completionSuggestion(FIELD).prefix("Norw", Fuzziness.ONE).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo"); // edit distance 2 suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", - SuggestBuilders.completionSuggestion().field(FIELD).prefix("Norw", Fuzziness.TWO).size(10) + SuggestBuilders.completionSuggestion(FIELD).prefix("Norw", Fuzziness.TWO).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "Nirvana"); } @@ -707,12 +707,12 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { refresh(); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", - SuggestBuilders.completionSuggestion().field(FIELD).prefix("Nriv", FuzzyOptions.builder().setTranspositions(false).build()).size(10) + SuggestBuilders.completionSuggestion(FIELD).prefix("Nriv", FuzzyOptions.builder().setTranspositions(false).build()).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo"); suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", - SuggestBuilders.completionSuggestion().field(FIELD).prefix("Nriv", Fuzziness.ONE).size(10) + SuggestBuilders.completionSuggestion(FIELD).prefix("Nriv", Fuzziness.ONE).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "Nirvana"); } @@ -729,12 +729,12 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { refresh(); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", - SuggestBuilders.completionSuggestion().field(FIELD).prefix("Nriva", FuzzyOptions.builder().setFuzzyMinLength(6).build()).size(10) + SuggestBuilders.completionSuggestion(FIELD).prefix("Nriva", FuzzyOptions.builder().setFuzzyMinLength(6).build()).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo"); suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", - SuggestBuilders.completionSuggestion().field(FIELD).prefix("Nrivan", FuzzyOptions.builder().setFuzzyMinLength(6).build()).size(10) + SuggestBuilders.completionSuggestion(FIELD).prefix("Nrivan", FuzzyOptions.builder().setFuzzyMinLength(6).build()).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "Nirvana"); } @@ -751,12 +751,12 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { refresh(); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", - SuggestBuilders.completionSuggestion().field(FIELD).prefix("Nirw", FuzzyOptions.builder().setFuzzyPrefixLength(4).build()).size(10) + SuggestBuilders.completionSuggestion(FIELD).prefix("Nirw", FuzzyOptions.builder().setFuzzyPrefixLength(4).build()).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo"); suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", - SuggestBuilders.completionSuggestion().field(FIELD).prefix("Nirvo", FuzzyOptions.builder().setFuzzyPrefixLength(4).build()).size(10) + SuggestBuilders.completionSuggestion(FIELD).prefix("Nirvo", FuzzyOptions.builder().setFuzzyPrefixLength(4).build()).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "Nirvana"); } @@ -774,18 +774,18 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { // suggestion with a character, which needs unicode awareness org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder completionSuggestionBuilder = - SuggestBuilders.completionSuggestion().field(FIELD).prefix("öööи", FuzzyOptions.builder().setUnicodeAware(true).build()).size(10); + SuggestBuilders.completionSuggestion(FIELD).prefix("öööи", FuzzyOptions.builder().setUnicodeAware(true).build()).size(10); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", completionSuggestionBuilder).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "ööööö"); // removing unicode awareness leads to no result - completionSuggestionBuilder = SuggestBuilders.completionSuggestion().field(FIELD).prefix("öööи", FuzzyOptions.builder().setUnicodeAware(false).build()).size(10); + completionSuggestionBuilder = SuggestBuilders.completionSuggestion(FIELD).prefix("öööи", FuzzyOptions.builder().setUnicodeAware(false).build()).size(10); suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", completionSuggestionBuilder).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo"); // increasing edit distance instead of unicode awareness works again, as this is only a single character - completionSuggestionBuilder = SuggestBuilders.completionSuggestion().field(FIELD).prefix("öööи", FuzzyOptions.builder().setUnicodeAware(false).setFuzziness(Fuzziness.TWO).build()).size(10); + completionSuggestionBuilder = SuggestBuilders.completionSuggestion(FIELD).prefix("öööи", FuzzyOptions.builder().setUnicodeAware(false).setFuzziness(Fuzziness.TWO).build()).size(10); suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", completionSuggestionBuilder).execute().actionGet(); assertSuggestions(suggestResponse, false, "foo", "ööööö"); } @@ -815,8 +815,8 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { refresh(); ensureGreen(); // load the fst index into ram - client().prepareSuggest(INDEX).addSuggestion("foo", SuggestBuilders.completionSuggestion().field(FIELD).prefix("f")).get(); - client().prepareSuggest(INDEX).addSuggestion("foo", SuggestBuilders.completionSuggestion().field(otherField).prefix("f")).get(); + client().prepareSuggest(INDEX).addSuggestion("foo", SuggestBuilders.completionSuggestion(FIELD).prefix("f")).get(); + client().prepareSuggest(INDEX).addSuggestion("foo", SuggestBuilders.completionSuggestion(otherField).prefix("f")).get(); // Get all stats IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats(INDEX).setIndices(INDEX).setCompletion(true).get(); @@ -921,14 +921,14 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { } public void assertSuggestions(String suggestion, String... suggestions) { String suggestionName = RandomStrings.randomAsciiOfLength(random(), 10); - CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion().field(FIELD).text(suggestion).size(10); + CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion(FIELD).text(suggestion).size(10); assertSuggestions(suggestionName, suggestionBuilder, suggestions); } public void assertSuggestionsNotInOrder(String suggestString, String... suggestions) { String suggestionName = RandomStrings.randomAsciiOfLength(random(), 10); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(suggestionName, - SuggestBuilders.completionSuggestion().field(FIELD).text(suggestString).size(10) + SuggestBuilders.completionSuggestion(FIELD).text(suggestString).size(10) ).execute().actionGet(); assertSuggestions(suggestResponse, false, suggestionName, suggestions); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java index bffba38dd15f..fa552172b66c 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -90,7 +90,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); } @@ -122,7 +122,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).regex("sugg.*es"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).regex("sugg.*es"); assertSuggestions("foo", prefix, "sugg9estion", "sugg8estion", "sugg7estion", "sugg6estion", "sugg5estion"); } @@ -154,7 +154,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg", Fuzziness.ONE); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg", Fuzziness.ONE); assertSuggestions("foo", prefix, "sugxgestion9", "sugxgestion8", "sugxgestion7", "sugxgestion6", "sugxgestion5"); } @@ -179,7 +179,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg") + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") .categoryContexts("cat", CategoryQueryContext.builder().setCategory("cat0").build()); assertSuggestions("foo", prefix, "suggestion8", "suggestion6", "suggestion4", "suggestion2", "suggestion0"); @@ -206,7 +206,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg") + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") .categoryContexts("cat", CategoryQueryContext.builder().setCategory("cat0").setBoost(3).build(), CategoryQueryContext.builder().setCategory("cat1").build() @@ -236,7 +236,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); } @@ -266,17 +266,17 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { ensureYellow(INDEX); // filter only on context cat - CompletionSuggestionBuilder catFilterSuggest = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder catFilterSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); catFilterSuggest.categoryContexts("cat", CategoryQueryContext.builder().setCategory("cat0").build()); assertSuggestions("foo", catFilterSuggest, "suggestion8", "suggestion6", "suggestion4", "suggestion2", "suggestion0"); // filter only on context type - CompletionSuggestionBuilder typeFilterSuggest = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder typeFilterSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); typeFilterSuggest.categoryContexts("type", CategoryQueryContext.builder().setCategory("type2").build(), CategoryQueryContext.builder().setCategory("type1").build()); assertSuggestions("foo", typeFilterSuggest, "suggestion9", "suggestion6", "suggestion5", "suggestion2", "suggestion1"); - CompletionSuggestionBuilder multiContextFilterSuggest = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder multiContextFilterSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); // query context order should never matter if (randomBoolean()) { multiContextFilterSuggest.categoryContexts("type", CategoryQueryContext.builder().setCategory("type2").build()); @@ -314,21 +314,21 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { ensureYellow(INDEX); // boost only on context cat - CompletionSuggestionBuilder catBoostSuggest = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder catBoostSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); catBoostSuggest.categoryContexts("cat", CategoryQueryContext.builder().setCategory("cat0").setBoost(3).build(), CategoryQueryContext.builder().setCategory("cat1").build()); assertSuggestions("foo", catBoostSuggest, "suggestion8", "suggestion6", "suggestion4", "suggestion9", "suggestion2"); // boost only on context type - CompletionSuggestionBuilder typeBoostSuggest = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder typeBoostSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); typeBoostSuggest.categoryContexts("type", CategoryQueryContext.builder().setCategory("type2").setBoost(2).build(), CategoryQueryContext.builder().setCategory("type1").setBoost(4).build()); assertSuggestions("foo", typeBoostSuggest, "suggestion9", "suggestion5", "suggestion6", "suggestion1", "suggestion2"); // boost on both contexts - CompletionSuggestionBuilder multiContextBoostSuggest = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder multiContextBoostSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); // query context order should never matter if (randomBoolean()) { multiContextBoostSuggest.categoryContexts("type", @@ -375,7 +375,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); } @@ -406,7 +406,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); assertSuggestions("foo", prefix, "suggestion0", "suggestion1", "suggestion2", "suggestion3", "suggestion4"); } @@ -432,7 +432,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); } @@ -459,10 +459,10 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); - CompletionSuggestionBuilder geoFilteringPrefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg") + CompletionSuggestionBuilder geoFilteringPrefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") .geoContexts("geo", GeoQueryContext.builder().setGeoPoint(new GeoPoint(geoPoints[0])).build()); assertSuggestions("foo", geoFilteringPrefix, "suggestion8", "suggestion6", "suggestion4", "suggestion2", "suggestion0"); @@ -491,12 +491,12 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); GeoQueryContext context1 = GeoQueryContext.builder().setGeoPoint(geoPoints[0]).setBoost(2).build(); GeoQueryContext context2 = GeoQueryContext.builder().setGeoPoint(geoPoints[1]).build(); - CompletionSuggestionBuilder geoBoostingPrefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg") + CompletionSuggestionBuilder geoBoostingPrefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") .geoContexts("geo", context1, context2); assertSuggestions("foo", geoBoostingPrefix, "suggestion8", "suggestion6", "suggestion4", "suggestion9", "suggestion7"); @@ -527,7 +527,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg") + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") .geoContexts("geo", GeoQueryContext.builder().setGeoPoint(new GeoPoint(52.2263, 4.543)).build()); assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); } @@ -565,10 +565,10 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); - CompletionSuggestionBuilder geoNeighbourPrefix = SuggestBuilders.completionSuggestion().field(FIELD).prefix("sugg") + CompletionSuggestionBuilder geoNeighbourPrefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") .geoContexts("geo", GeoQueryContext.builder().setGeoPoint(GeoPoint.fromGeohash(geohash)).build()); assertSuggestions("foo", geoNeighbourPrefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); @@ -625,7 +625,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { refresh(); String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion().field(FIELD).text("h").size(10) + CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).text("h").size(10) .geoContexts("st", GeoQueryContext.builder().setGeoPoint(new GeoPoint(52.52, 13.4)).build()); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(suggestionName, context).get(); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java index ca8d4fa862e1..178e353b7a9e 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java @@ -86,17 +86,15 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { public final static CustomSuggestionBuilder PROTOTYPE = new CustomSuggestionBuilder("_na_", "_na_"); - private String randomField; private String randomSuffix; public CustomSuggestionBuilder(String randomField, String randomSuffix) { - this.randomField = randomField; + super(randomField); this.randomSuffix = randomSuffix; } @Override protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { - builder.field("field", randomField); builder.field("suffix", randomSuffix); return builder; } @@ -108,39 +106,39 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { @Override public void doWriteTo(StreamOutput out) throws IOException { - out.writeString(randomField); out.writeString(randomSuffix); } @Override - public CustomSuggestionBuilder doReadFrom(StreamInput in) throws IOException { - return new CustomSuggestionBuilder(in.readString(), in.readString()); + public CustomSuggestionBuilder doReadFrom(StreamInput in, String fieldname) throws IOException { + return new CustomSuggestionBuilder(fieldname, in.readString()); } @Override protected boolean doEquals(CustomSuggestionBuilder other) { - return Objects.equals(randomField, other.randomField) && - Objects.equals(randomSuffix, other.randomSuffix); + return Objects.equals(randomSuffix, other.randomSuffix); } @Override protected int doHashCode() { - return Objects.hash(randomField, randomSuffix); + return Objects.hash(randomSuffix); } @Override protected CustomSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException { // TODO some parsing - return new CustomSuggestionBuilder(randomField, randomSuffix); + return new CustomSuggestionBuilder(field(), randomSuffix); } @Override protected SuggestionContext innerBuild(QueryShardContext context) throws IOException { Map options = new HashMap<>(); - options.put("field", randomField); + options.put("field", field()); options.put("suffix", randomSuffix); - return new CustomSuggester.CustomSuggestionsContext(context, options); + CustomSuggester.CustomSuggestionsContext customSuggestionsContext = new CustomSuggester.CustomSuggestionsContext(context, options); + customSuggestionsContext.setField(field()); + return customSuggestionsContext; } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java index 58e719dcaa2e..c5547163d279 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -42,7 +42,7 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe @Override protected CompletionSuggestionBuilder randomSuggestionBuilder() { - CompletionSuggestionBuilder testBuilder = new CompletionSuggestionBuilder(); + CompletionSuggestionBuilder testBuilder = new CompletionSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); switch (randomIntBetween(0, 3)) { case 0: testBuilder.prefix(randomAsciiOfLength(10)); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java index cedbb91fc615..3011acee57ae 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java @@ -47,7 +47,7 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC } public static PhraseSuggestionBuilder randomPhraseSuggestionBuilder() { - PhraseSuggestionBuilder testBuilder = new PhraseSuggestionBuilder(); + PhraseSuggestionBuilder testBuilder = new PhraseSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); maybeSet(testBuilder::maxErrors, randomFloat()); maybeSet(testBuilder::separator, randomAsciiOfLengthBetween(1, 10)); maybeSet(testBuilder::realWordErrorLikelihood, randomFloat()); @@ -193,4 +193,83 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC } } + public void testInvalidParameters() throws IOException { + // test missing field name + try { + new PhraseSuggestionBuilder(null); + fail("Should not allow null as field name"); + } catch (NullPointerException e) { + assertEquals("suggestion requires a field name", e.getMessage()); + } + + // test emtpy field name + try { + new PhraseSuggestionBuilder(""); + fail("Should not allow empty string as field name"); + } catch (IllegalArgumentException e) { + assertEquals("suggestion field name is empty", e.getMessage()); + } + + PhraseSuggestionBuilder builder = new PhraseSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); + try { + builder.gramSize(0); + fail("Should not allow gramSize < 1"); + } catch (IllegalArgumentException e) { + assertEquals("gramSize must be >= 1", e.getMessage()); + } + + try { + builder.gramSize(-1); + fail("Should not allow gramSize < 1"); + } catch (IllegalArgumentException e) { + assertEquals("gramSize must be >= 1", e.getMessage()); + } + + try { + builder.maxErrors(-1); + fail("Should not allow maxErrors < 0"); + } catch (IllegalArgumentException e) { + assertEquals("max_error must be > 0.0", e.getMessage()); + } + + try { + builder.separator(null); + fail("Should not allow null as separator"); + } catch (NullPointerException e) { + assertEquals("separator cannot be set to null", e.getMessage()); + } + + try { + builder.realWordErrorLikelihood(-1); + fail("Should not allow real world error likelihood < 0"); + } catch (IllegalArgumentException e) { + assertEquals("real_word_error_likelihood must be > 0.0", e.getMessage()); + } + + try { + builder.confidence(-1); + fail("Should not allow confidence < 0"); + } catch (IllegalArgumentException e) { + assertEquals("confidence must be >= 0.0", e.getMessage()); + } + + try { + builder.tokenLimit(0); + fail("token_limit must be >= 1"); + } catch (IllegalArgumentException e) { + assertEquals("token_limit must be >= 1", e.getMessage()); + } + + try { + if (randomBoolean()) { + builder.highlight(null, ""); + } else { + builder.highlight("", null); + } + fail("Pre and post tag must both be null or both not be null."); + } catch (IllegalArgumentException e) { + assertEquals("Pre and post tag must both be null or both not be null.", e.getMessage()); + } + } + } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java index ee0e92279d73..e0c2a33664af 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java @@ -28,7 +28,13 @@ import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; import java.io.IOException; -import static org.hamcrest.Matchers.notNullValue; +import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_ACCURACY; +import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MAX_EDITS; +import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MAX_INSPECTIONS; +import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MAX_TERM_FREQ; +import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MIN_DOC_FREQ; +import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MIN_WORD_LENGTH; +import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_PREFIX_LENGTH; /** * Test the {@link TermSuggestionBuilder} class. @@ -40,7 +46,7 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas */ @Override protected TermSuggestionBuilder randomSuggestionBuilder() { - TermSuggestionBuilder testBuilder = new TermSuggestionBuilder(); + TermSuggestionBuilder testBuilder = new TermSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); maybeSet(testBuilder::suggestMode, randomSuggestMode()); maybeSet(testBuilder::accuracy, randomFloat()); maybeSet(testBuilder::sort, randomSort()); @@ -124,7 +130,23 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas } public void testInvalidParameters() throws IOException { - TermSuggestionBuilder builder = new TermSuggestionBuilder(); + // test missing field name + try { + new TermSuggestionBuilder(null); + fail("Should not allow null as field name"); + } catch (NullPointerException e) { + assertEquals("suggestion requires a field name", e.getMessage()); + } + + // test emtpy field name + try { + new TermSuggestionBuilder(""); + fail("Should not allow empty string as field name"); + } catch (IllegalArgumentException e) { + assertEquals("suggestion field name is empty", e.getMessage()); + } + + TermSuggestionBuilder builder = new TermSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); // test invalid accuracy values try { builder.accuracy(-0.5f); @@ -237,17 +259,17 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas } public void testDefaultValuesSet() { - TermSuggestionBuilder builder = new TermSuggestionBuilder(); - assertThat(builder.accuracy(), notNullValue()); - assertThat(builder.maxEdits(), notNullValue()); - assertThat(builder.maxInspections(), notNullValue()); - assertThat(builder.maxTermFreq(), notNullValue()); - assertThat(builder.minDocFreq(), notNullValue()); - assertThat(builder.minWordLength(), notNullValue()); - assertThat(builder.prefixLength(), notNullValue()); - assertThat(builder.sort(), notNullValue()); - assertThat(builder.stringDistance(), notNullValue()); - assertThat(builder.suggestMode(), notNullValue()); + TermSuggestionBuilder builder = new TermSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); + assertEquals(DEFAULT_ACCURACY, builder.accuracy(), Float.MIN_VALUE); + assertEquals(DEFAULT_MAX_EDITS, builder.maxEdits()); + assertEquals(DEFAULT_MAX_INSPECTIONS, builder.maxInspections()); + assertEquals(DEFAULT_MAX_TERM_FREQ, builder.maxTermFreq(), Float.MIN_VALUE); + assertEquals(DEFAULT_MIN_DOC_FREQ, builder.minDocFreq(), Float.MIN_VALUE); + assertEquals(DEFAULT_MIN_WORD_LENGTH, builder.minWordLength()); + assertEquals(DEFAULT_PREFIX_LENGTH, builder.prefixLength()); + assertEquals(SortBy.SCORE, builder.sort()); + assertEquals(StringDistanceImpl.INTERNAL, builder.stringDistance()); + assertEquals(SuggestMode.MISSING, builder.suggestMode()); } @Override diff --git a/docs/reference/migration/migrate_5_0.asciidoc b/docs/reference/migration/migrate_5_0.asciidoc index a87719e2c899..ac6f418fad22 100644 --- a/docs/reference/migration/migrate_5_0.asciidoc +++ b/docs/reference/migration/migrate_5_0.asciidoc @@ -671,6 +671,13 @@ The inner DirectCandidateGenerator class has been moved out to its own class cal The `setText` method has been changed to `setGlobalText` to make the intent more clear, and a `getGlobalText` method has been added. +The `addSuggestion` method now required the user specified suggestion name, previously used in the ctor of each +suggestion. + +=== SuggestionBuilder + +The `field` setter has been deleted. Instead the field name needs to be specified as constructor argument. + ==== Elasticsearch will no longer detect logging implementations Elasticsearch now logs only to log4j 1.2. Previously if log4j wasn't on the classpath it made some effort to degrade to diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java index c828013f762c..cdafa3363cc9 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java @@ -20,38 +20,6 @@ package org.elasticsearch.messy.tests; -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; -import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; -import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestion; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionPhraseCollateMatchExists; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionSize; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; -import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.endsWith; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.nullValue; - -import java.io.IOException; -import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.concurrent.ExecutionException; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -81,6 +49,38 @@ import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; +import java.io.IOException; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.concurrent.ExecutionException; + +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.elasticsearch.index.query.QueryBuilders.matchQuery; +import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; +import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestion; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionPhraseCollateMatchExists; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionSize; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.nullValue; + /** * Integration tests for term and phrase suggestions. Many of these tests many requests that vary only slightly from one another. Where * possible these tests should declare for the first request, make the request, modify the configuration for the next request, make that @@ -104,10 +104,9 @@ public class SuggestSearchTests extends ESIntegTestCase { index("test", "type1", "4", "text", "abcc"); refresh(); - TermSuggestionBuilder termSuggest = termSuggestion() + TermSuggestionBuilder termSuggest = termSuggestion("text") .suggestMode(TermSuggestionBuilder.SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. - .text("abcd") - .field("text"); + .text("abcd"); logger.info("--> run suggestions with one index"); searchSuggest("test", termSuggest); createIndex("test_1"); @@ -118,11 +117,10 @@ public class SuggestSearchTests extends ESIntegTestCase { index("test_1", "type1", "3", "text", "ab bd"); index("test_1", "type1", "4", "text", "ab cc"); refresh(); - termSuggest = termSuggestion() + termSuggest = termSuggestion("text") .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("ab cd") - .minWordLength(1) - .field("text"); + .minWordLength(1); logger.info("--> run suggestions with two indices"); searchSuggest("test", termSuggest); @@ -145,11 +143,10 @@ public class SuggestSearchTests extends ESIntegTestCase { index("test_2", "type1", "4", "text", "abcc"); refresh(); - termSuggest = termSuggestion() + termSuggest = termSuggestion("text") .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("ab cd") - .minWordLength(1) - .field("text"); + .minWordLength(1); logger.info("--> run suggestions with three indices"); try { searchSuggest("test", termSuggest); @@ -165,11 +162,10 @@ public class SuggestSearchTests extends ESIntegTestCase { } - termSuggest = termSuggestion() + termSuggest = termSuggestion("text") .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("ABCD") - .minWordLength(1) - .field("text"); + .minWordLength(1); logger.info("--> run suggestions with four indices"); try { searchSuggest("test", termSuggest); @@ -219,7 +215,7 @@ public class SuggestSearchTests extends ESIntegTestCase { refresh(); DirectCandidateGeneratorBuilder generator = candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2); - PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion().field("name.shingled") + PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("name.shingled") .addCandidateGenerator(generator) .gramSize(3); Suggest searchSuggest = searchSuggest("ice tea", "did_you_mean", phraseSuggestion); @@ -255,10 +251,9 @@ public class SuggestSearchTests extends ESIntegTestCase { SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "spellchecker")).get(); assertThat("didn't ask for suggestions but got some", search.getSuggest(), nullValue()); - TermSuggestionBuilder termSuggestion = termSuggestion() + TermSuggestionBuilder termSuggestion = termSuggestion("text") .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. .text("abcd") - .field("text") .size(10); Suggest suggest = searchSuggest("test", termSuggestion); assertSuggestion(suggest, 0, "test", 10, "abc0"); @@ -298,13 +293,15 @@ public class SuggestSearchTests extends ESIntegTestCase { client().prepareIndex("test", "type1").setSource("name", "I like ice cream.")); refresh(); - PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion().field("name.shingled") + PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("name.shingled") .addCandidateGenerator(candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2)) .gramSize(3); Suggest searchSuggest = searchSuggest("ice tea", "did_you_mean", phraseSuggestion); assertSuggestion(searchSuggest, 0, 0, "did_you_mean", "iced tea"); - phraseSuggestion.field("nosuchField"); + phraseSuggestion = phraseSuggestion("nosuchField") + .addCandidateGenerator(candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2)) + .gramSize(3); { SearchRequestBuilder searchBuilder = client().prepareSearch().setSize(0); searchBuilder.suggest(new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean", phraseSuggestion)); @@ -330,10 +327,9 @@ public class SuggestSearchTests extends ESIntegTestCase { SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "spellcecker")).get(); assertThat("didn't ask for suggestions but got some", search.getSuggest(), nullValue()); - TermSuggestionBuilder termSuggest = termSuggestion() + TermSuggestionBuilder termSuggest = termSuggestion("text") .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. - .text("abcd") - .field("text"); + .text("abcd"); Suggest suggest = searchSuggest("test", termSuggest); assertSuggestion(suggest, 0, "test", "aacd", "abbd", "abcc"); assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd")); @@ -350,10 +346,9 @@ public class SuggestSearchTests extends ESIntegTestCase { index("test", "type1", "1", "foo", "bar"); refresh(); - TermSuggestionBuilder termSuggest = termSuggestion() + TermSuggestionBuilder termSuggest = termSuggestion("text") .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests. - .text("abcd") - .field("text"); + .text("abcd"); Suggest suggest = searchSuggest("test", termSuggest); assertSuggestionSize(suggest, 0, 0, "test"); assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd")); @@ -374,14 +369,14 @@ public class SuggestSearchTests extends ESIntegTestCase { refresh(); Map> suggestions = new HashMap<>(); - suggestions.put("size1", termSuggestion() + suggestions.put("size1", termSuggestion("field1") .size(1).text("prefix_abcd").maxTermFreq(10).prefixLength(1).minDocFreq(0) - .field("field1").suggestMode(SuggestMode.ALWAYS)); - suggestions.put("field2", termSuggestion() - .field("field2").text("prefix_eeeh prefix_efgh") + .suggestMode(SuggestMode.ALWAYS)); + suggestions.put("field2", termSuggestion("field2") + .text("prefix_eeeh prefix_efgh") .maxTermFreq(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS)); - suggestions.put("accuracy", termSuggestion() - .field("field2").text("prefix_efgh").accuracy(1f) + suggestions.put("accuracy", termSuggestion("field2") + .text("prefix_efgh").accuracy(1f) .maxTermFreq(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS)); Suggest suggest = searchSuggest(null, 0, suggestions); assertSuggestion(suggest, 0, "size1", "prefix_aacd"); @@ -418,16 +413,16 @@ public class SuggestSearchTests extends ESIntegTestCase { refresh(); Map> suggestions = new HashMap<>(); - suggestions.put("size3SortScoreFirst", termSuggestion() - .size(3).minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS)); - suggestions.put("size10SortScoreFirst", termSuggestion() - .size(10).minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS).shardSize(50)); - suggestions.put("size3SortScoreFirstMaxEdits1", termSuggestion() + suggestions.put("size3SortScoreFirst", termSuggestion("field1") + .size(3).minDocFreq(0).suggestMode(SuggestMode.ALWAYS)); + suggestions.put("size10SortScoreFirst", termSuggestion("field1") + .size(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS).shardSize(50)); + suggestions.put("size3SortScoreFirstMaxEdits1", termSuggestion("field1") .maxEdits(1) - .size(10).minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS)); - suggestions.put("size10SortFrequencyFirst", termSuggestion() + .size(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS)); + suggestions.put("size10SortFrequencyFirst", termSuggestion("field1") .size(10).sort(SortBy.FREQUENCY).shardSize(1000) - .minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS)); + .minDocFreq(0).suggestMode(SuggestMode.ALWAYS)); Suggest suggest = searchSuggest("prefix_abcd", 0, suggestions); // The commented out assertions fail sometimes because suggestions are based off of shard frequencies instead of index frequencies. @@ -453,7 +448,7 @@ public class SuggestSearchTests extends ESIntegTestCase { refresh(); Suggest searchSuggest = searchSuggest( "a an the", "simple_phrase", - phraseSuggestion().field("body").gramSize(1) + phraseSuggestion("body").gramSize(1) .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always")) .size(1)); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); @@ -489,13 +484,13 @@ public class SuggestSearchTests extends ESIntegTestCase { refresh(); Suggest searchSuggest = searchSuggest( "hello word", "simple_phrase", - phraseSuggestion().field("body") + phraseSuggestion("body") .addCandidateGenerator(candidateGenerator("body").prefixLength(4).minWordLength(1).suggestMode("always")) .size(1).confidence(1.0f)); assertSuggestion(searchSuggest, 0, "simple_phrase", "hello words"); searchSuggest = searchSuggest( "hello word", "simple_phrase", - phraseSuggestion().field("body") + phraseSuggestion("body") .addCandidateGenerator(candidateGenerator("body").prefixLength(2).minWordLength(1).suggestMode("always")) .size(1).confidence(1.0f)); assertSuggestion(searchSuggest, 0, "simple_phrase", "hello world"); @@ -543,8 +538,7 @@ public class SuggestSearchTests extends ESIntegTestCase { } refresh(); - PhraseSuggestionBuilder phraseSuggest = phraseSuggestion() - .field("bigram").gramSize(2).analyzer("body") + PhraseSuggestionBuilder phraseSuggest = phraseSuggestion("bigram").gramSize(2).analyzer("body") .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always")) .size(1); Suggest searchSuggest = searchSuggest( "american ame", "simple_phrase", phraseSuggest); @@ -680,9 +674,8 @@ public class SuggestSearchTests extends ESIntegTestCase { index("test", "type1", "2", "body", line, "body_reverse", line, "bigram", line); refresh(); - PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion() + PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("bigram") .realWordErrorLikelihood(0.95f) - .field("bigram") .gramSize(2) .analyzer("body") .addCandidateGenerator(candidateGenerator("body").minWordLength(1).prefixLength(1).suggestMode("always").size(1).accuracy(0.1f)) @@ -740,8 +733,7 @@ public class SuggestSearchTests extends ESIntegTestCase { NumShards numShards = getNumShards("test"); // Lets make sure some things throw exceptions - PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion() - .field("bigram") + PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("bigram") .analyzer("body") .addCandidateGenerator(candidateGenerator("does_not_exist").minWordLength(1).suggestMode("always")) .realWordErrorLikelihood(0.95f) @@ -773,10 +765,15 @@ public class SuggestSearchTests extends ESIntegTestCase { searchSuggest( "Xor the Got-Jewel", 0, suggestion); // Field doesn't produce unigrams but the analyzer does - phraseSuggestion.forceUnigrams(true).field("bigram").analyzer("ngram"); + phraseSuggestion.forceUnigrams(true).analyzer("ngram"); searchSuggest( "Xor the Got-Jewel", 0, suggestion); - phraseSuggestion.field("ngram").analyzer("myDefAnalyzer") + phraseSuggestion = phraseSuggestion("ngram") + .analyzer("myDefAnalyzer") + .forceUnigrams(true) + .realWordErrorLikelihood(0.95f) + .maxErrors(0.5f) + .size(1) .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always")); Suggest suggest = searchSuggest( "Xor the Got-Jewel", 0, suggestion); @@ -801,8 +798,8 @@ public class SuggestSearchTests extends ESIntegTestCase { client().prepareIndex("test", "type1", "3").setSource("field1", "foobar3").setRouting("3")); Suggest suggest = searchSuggest( "foobar", "simple", - termSuggestion() - .size(10).minDocFreq(0).field("field1").suggestMode(SuggestMode.ALWAYS)); + termSuggestion("field1") + .size(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS)); ElasticsearchAssertions.assertSuggestionSize(suggest, 0, 3, "simple"); } @@ -842,14 +839,14 @@ public class SuggestSearchTests extends ESIntegTestCase { SearchRequestBuilder request = client().prepareSearch().setSize(0) .suggest( new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean", - phraseSuggestion().field("fielddoesnotexist").maxErrors(5.0f))); + phraseSuggestion("fielddoesnotexist").maxErrors(5.0f))); assertThrows(request, SearchPhaseExecutionException.class); // When searching on a shard which does not hold yet any document of an existing type, we should not fail SearchResponse searchResponse = client().prepareSearch().setSize(0) .suggest( new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean", - phraseSuggestion().field("name").maxErrors(5.0f))) + phraseSuggestion("name").maxErrors(5.0f))) .get(); ElasticsearchAssertions.assertNoFailures(searchResponse); ElasticsearchAssertions.assertSuggestion(searchResponse.getSuggest(), 0, 0, "did_you_mean", "testing suggestions"); @@ -888,7 +885,7 @@ public class SuggestSearchTests extends ESIntegTestCase { .setSize(0) .suggest( new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean", - phraseSuggestion().field("name").maxErrors(5.0f))) + phraseSuggestion("name").maxErrors(5.0f))) .get(); assertNoFailures(searchResponse); @@ -945,16 +942,14 @@ public class SuggestSearchTests extends ESIntegTestCase { } refresh(); - Suggest searchSuggest = searchSuggest("nobel prize", "simple_phrase", phraseSuggestion() - .field("body") + Suggest searchSuggest = searchSuggest("nobel prize", "simple_phrase", phraseSuggestion("body") .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f)) .confidence(2f) .maxErrors(5f) .size(1)); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); - searchSuggest = searchSuggest("noble prize", "simple_phrase", phraseSuggestion() - .field("body") + searchSuggest = searchSuggest("noble prize", "simple_phrase", phraseSuggestion("body") .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f)) .confidence(2f) .maxErrors(5f) @@ -1084,8 +1079,7 @@ public class SuggestSearchTests extends ESIntegTestCase { } indexRandom(true, builders); - PhraseSuggestionBuilder suggest = phraseSuggestion() - .field("title") + PhraseSuggestionBuilder suggest = phraseSuggestion("title") .addCandidateGenerator(candidateGenerator("title") .suggestMode("always") .maxTermFreq(.99f) @@ -1149,8 +1143,7 @@ public class SuggestSearchTests extends ESIntegTestCase { indexRandom(true, builders); // suggest without collate - PhraseSuggestionBuilder suggest = phraseSuggestion() - .field("title") + PhraseSuggestionBuilder suggest = phraseSuggestion("title") .addCandidateGenerator(candidateGenerator("title") .suggestMode("always") .maxTermFreq(.99f) From 76719341dc8f5fb27433020bb3e4663e6e161a62 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Fri, 4 Mar 2016 13:24:39 +0100 Subject: [PATCH 051/320] Fix after merge --- .../common/settings/Setting.java | 22 ++++++++++--------- .../common/settings/SettingTests.java | 3 ++- .../azure/storage/AzureStorageSettings.java | 14 +++++++----- 3 files changed, 22 insertions(+), 17 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 12adef7b4bb2..c6753d243c08 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -165,11 +165,10 @@ public class Setting extends ToXContentToBytes { * @param key the settings key for this setting. * @param defaultValue a default value function that returns the default values string representation. * @param parser a parser that parses the string rep into a complex datatype. - * @param dynamic true iff this setting can be dynamically updateable - * @param scope the scope of this setting + * @param properties properties for this setting like scope, filtering... */ - public Setting(String key, Function defaultValue, Function parser, boolean dynamic, Scope scope) { - this(new SimpleKey(key), defaultValue, parser, dynamic, scope); + public Setting(String key, Function defaultValue, Function parser, SettingsProperty... properties) { + this(new SimpleKey(key), defaultValue, parser, properties); } /** @@ -722,8 +721,9 @@ public class Setting extends ToXContentToBytes { * can easily be added with this setting. Yet, prefix key settings don't support updaters out of the box unless * {@link #getConcreteSetting(String)} is used to pull the updater. */ - public static Setting prefixKeySetting(String prefix, String defaultValue, Function parser, boolean dynamic, Scope scope) { - return affixKeySetting(AffixKey.withPrefix(prefix), (s) -> defaultValue, parser, dynamic, scope); + public static Setting prefixKeySetting(String prefix, String defaultValue, Function parser, + SettingsProperty... properties) { + return affixKeySetting(AffixKey.withPrefix(prefix), (s) -> defaultValue, parser, properties); } /** @@ -731,12 +731,14 @@ public class Setting extends ToXContentToBytes { * storage.${backend}.enable=[true|false] can easily be added with this setting. Yet, adfix key settings don't support updaters * out of the box unless {@link #getConcreteSetting(String)} is used to pull the updater. */ - public static Setting adfixKeySetting(String prefix, String suffix, Function defaultValue, Function parser, boolean dynamic, Scope scope) { - return affixKeySetting(AffixKey.withAdfix(prefix, suffix), defaultValue, parser, dynamic, scope); + public static Setting adfixKeySetting(String prefix, String suffix, Function defaultValue, + Function parser, SettingsProperty... properties) { + return affixKeySetting(AffixKey.withAdfix(prefix, suffix), defaultValue, parser, properties); } - public static Setting adfixKeySetting(String prefix, String suffix, String defaultValue, Function parser, boolean dynamic, Scope scope) { - return adfixKeySetting(prefix, suffix, (s) -> defaultValue, parser, dynamic, scope); + public static Setting adfixKeySetting(String prefix, String suffix, String defaultValue, Function parser, + SettingsProperty... properties) { + return adfixKeySetting(prefix, suffix, (s) -> defaultValue, parser, properties); } public static Setting affixKeySetting(AffixKey key, Function defaultValue, Function parser, diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 98bb385c1229..841126d1415b 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -384,7 +384,8 @@ public class SettingTests extends ESTestCase { } public void testAdfixKeySetting() { - Setting setting = Setting.adfixKeySetting("foo", "enable", "false", Boolean::parseBoolean, false, Setting.Scope.CLUSTER); + Setting setting = + Setting.adfixKeySetting("foo", "enable", "false", Boolean::parseBoolean, Setting.SettingsProperty.ClusterScope); assertTrue(setting.hasComplexMatcher()); assertTrue(setting.match("foo.bar.enable")); assertTrue(setting.match("foo.baz.enable")); diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java index da9151e9504f..06185845ffb9 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java @@ -46,11 +46,13 @@ public final class AzureStorageSettings { TIMEOUT_KEY, (s) -> Storage.TIMEOUT_SETTING.get(s).toString(), (s) -> Setting.parseTimeValue(s, TimeValue.timeValueSeconds(-1), TIMEOUT_KEY.toString()), - false, - Setting.Scope.CLUSTER); - private static final Setting ACCOUNT_SETTING = Setting.adfixKeySetting(Storage.PREFIX, ACCOUNT_SUFFIX, "", Function.identity(), false, Setting.Scope.CLUSTER); - private static final Setting KEY_SETTING = Setting.adfixKeySetting(Storage.PREFIX, KEY_SUFFIX, "", Function.identity(), false, Setting.Scope.CLUSTER); - private static final Setting DEFAULT_SETTING = Setting.adfixKeySetting(Storage.PREFIX, DEFAULT_SUFFIX, "false", Boolean::valueOf, false, Setting.Scope.CLUSTER); + Setting.SettingsProperty.ClusterScope); + private static final Setting ACCOUNT_SETTING = + Setting.adfixKeySetting(Storage.PREFIX, ACCOUNT_SUFFIX, "", Function.identity(), Setting.SettingsProperty.ClusterScope); + private static final Setting KEY_SETTING = + Setting.adfixKeySetting(Storage.PREFIX, KEY_SUFFIX, "", Function.identity(), Setting.SettingsProperty.ClusterScope); + private static final Setting DEFAULT_SETTING = + Setting.adfixKeySetting(Storage.PREFIX, DEFAULT_SUFFIX, "false", Boolean::valueOf, Setting.SettingsProperty.ClusterScope); private final String name; @@ -110,7 +112,7 @@ public final class AzureStorageSettings { } private static List createStorageSettings(Settings settings) { - Setting storageGroupSetting = Setting.groupSetting(Storage.PREFIX, false, Setting.Scope.CLUSTER); + Setting storageGroupSetting = Setting.groupSetting(Storage.PREFIX, Setting.SettingsProperty.ClusterScope); // ignore global timeout which has the same prefix but does not belong to any group Settings groups = storageGroupSetting.get(settings.filter((k) -> k.equals(Storage.TIMEOUT_SETTING.getKey()) == false)); List storageSettings = new ArrayList<>(); From 2bb3846d1fd6b120e963762ee9efe8c3120ff412 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Fri, 4 Mar 2016 16:53:22 +0100 Subject: [PATCH 052/320] Update after review: * remove `ClusterScope` * rename `ClusterSettings` to `NodeSettings` * rename `SettingsProperty` to `Property` --- .../close/TransportCloseIndexAction.java | 4 +- .../action/support/AutoCreateIndex.java | 4 +- .../action/support/DestructiveOperations.java | 4 +- .../master/TransportMasterNodeReadAction.java | 4 +- .../bootstrap/BootstrapSettings.java | 10 +- .../cache/recycler/PageCacheRecycler.java | 15 +-- .../java/org/elasticsearch/client/Client.java | 8 +- .../TransportClientNodesService.java | 10 +- .../elasticsearch/cluster/ClusterModule.java | 4 +- .../elasticsearch/cluster/ClusterName.java | 4 +- .../cluster/InternalClusterInfoService.java | 6 +- .../action/index/MappingUpdatedAction.java | 4 +- .../cluster/metadata/AutoExpandReplicas.java | 4 +- .../cluster/metadata/IndexMetaData.java | 30 ++--- .../cluster/metadata/MetaData.java | 4 +- .../cluster/routing/UnassignedInfo.java | 6 +- .../allocator/BalancedShardsAllocator.java | 8 +- .../decider/AwarenessAllocationDecider.java | 8 +- .../ClusterRebalanceAllocationDecider.java | 4 +- .../ConcurrentRebalanceAllocationDecider.java | 4 +- .../decider/DiskThresholdDecider.java | 12 +- .../decider/EnableAllocationDecider.java | 10 +- .../decider/FilterAllocationDecider.java | 8 +- .../decider/ShardsLimitAllocationDecider.java | 6 +- .../SnapshotInProgressAllocationDecider.java | 4 +- .../decider/ThrottlingAllocationDecider.java | 10 +- .../service/InternalClusterService.java | 8 +- .../common/logging/ESLoggerFactory.java | 7 +- .../common/network/NetworkModule.java | 10 +- .../common/network/NetworkService.java | 28 ++-- .../settings/AbstractScopedSettings.java | 6 +- .../common/settings/ClusterSettings.java | 4 +- .../common/settings/IndexScopedSettings.java | 10 +- .../common/settings/Setting.java | 123 ++++++++---------- .../common/settings/SettingsModule.java | 19 +-- .../common/util/concurrent/EsExecutors.java | 4 +- .../common/util/concurrent/ThreadContext.java | 5 +- .../discovery/DiscoveryModule.java | 6 +- .../discovery/DiscoverySettings.java | 12 +- .../discovery/zen/ZenDiscovery.java | 20 +-- .../zen/elect/ElectMasterService.java | 4 +- .../discovery/zen/fd/FaultDetection.java | 12 +- .../zen/ping/unicast/UnicastZenPing.java | 6 +- .../org/elasticsearch/env/Environment.java | 20 +-- .../elasticsearch/env/NodeEnvironment.java | 11 +- .../elasticsearch/gateway/GatewayService.java | 16 +-- .../gateway/PrimaryShardAllocator.java | 6 +- .../http/HttpTransportSettings.java | 44 +++---- .../http/netty/NettyHttpServerTransport.java | 26 ++-- .../org/elasticsearch/index/IndexModule.java | 8 +- .../elasticsearch/index/IndexSettings.java | 34 ++--- .../org/elasticsearch/index/IndexWarmer.java | 4 +- .../elasticsearch/index/IndexingSlowLog.java | 18 +-- .../index/MergePolicyConfig.java | 18 +-- .../index/MergeSchedulerConfig.java | 10 +- .../elasticsearch/index/SearchSlowLog.java | 24 ++-- .../index/cache/bitset/BitsetFilterCache.java | 4 +- .../index/engine/EngineConfig.java | 6 +- .../fielddata/IndexFieldDataService.java | 4 +- .../index/mapper/FieldMapper.java | 6 +- .../index/mapper/MapperService.java | 6 +- .../index/mapper/core/NumberFieldMapper.java | 4 +- .../percolator/PercolatorQueriesRegistry.java | 6 +- .../index/store/FsDirectoryService.java | 5 +- .../elasticsearch/index/store/IndexStore.java | 6 +- .../index/store/IndexStoreConfig.java | 6 +- .../org/elasticsearch/index/store/Store.java | 6 +- .../indices/IndicesQueryCache.java | 6 +- .../indices/IndicesRequestCache.java | 8 +- .../elasticsearch/indices/IndicesService.java | 4 +- .../indices/analysis/HunspellService.java | 8 +- .../HierarchyCircuitBreakerService.java | 16 +-- .../cache/IndicesFieldDataCache.java | 4 +- .../indices/recovery/RecoverySettings.java | 14 +- .../indices/store/IndicesStore.java | 5 +- .../indices/ttl/IndicesTTLService.java | 4 +- .../elasticsearch/monitor/fs/FsService.java | 4 +- .../monitor/jvm/JvmGcMonitorService.java | 8 +- .../elasticsearch/monitor/jvm/JvmService.java | 4 +- .../elasticsearch/monitor/os/OsService.java | 4 +- .../monitor/process/ProcessService.java | 4 +- .../java/org/elasticsearch/node/Node.java | 20 +-- .../internal/InternalSettingsPreparer.java | 4 +- .../elasticsearch/plugins/PluginsService.java | 4 +- .../repositories/fs/FsRepository.java | 14 +- .../repositories/uri/URLRepository.java | 14 +- .../elasticsearch/rest/BaseRestHandler.java | 4 +- .../elasticsearch/script/ScriptService.java | 9 +- .../elasticsearch/script/ScriptSettings.java | 10 +- .../elasticsearch/search/SearchService.java | 9 +- .../elasticsearch/threadpool/ThreadPool.java | 4 +- .../elasticsearch/transport/Transport.java | 4 +- .../transport/TransportService.java | 6 +- .../transport/TransportSettings.java | 14 +- .../transport/netty/NettyTransport.java | 44 +++---- .../org/elasticsearch/tribe/TribeService.java | 16 +-- .../repositories/RepositoryBlocksIT.java | 2 +- .../cluster/snapshots/SnapshotBlocksIT.java | 2 +- .../cluster/ClusterModuleTests.java | 6 +- .../cluster/settings/SettingsFilteringIT.java | 8 +- .../common/settings/ScopedSettingsTests.java | 30 ++--- .../common/settings/SettingTests.java | 81 ++++-------- .../common/settings/SettingsModuleTests.java | 12 +- .../elasticsearch/index/IndexModuleTests.java | 6 +- .../index/IndexSettingsTests.java | 10 +- .../index/SettingsListenerIT.java | 6 +- .../indices/IndicesOptionsIntegrationIT.java | 8 +- .../RandomExceptionCircuitBreakerIT.java | 6 +- .../basic/SearchWithRandomExceptionsIT.java | 6 +- .../snapshots/mockstore/MockRepository.java | 6 +- .../azure/management/AzureComputeService.java | 18 +-- .../cloud/aws/AwsEc2Service.java | 54 ++++---- .../cloud/gce/GceComputeService.java | 12 +- .../cloud/gce/GceComputeServiceImpl.java | 9 +- .../gce/GceUnicastHostsProvider.java | 4 +- .../mapper/attachments/AttachmentMapper.java | 8 +- .../azure/storage/AzureStorageService.java | 16 +-- .../azure/storage/AzureStorageSettings.java | 10 +- .../repositories/azure/AzureRepository.java | 14 +- .../elasticsearch/cloud/aws/AwsS3Service.java | 40 +++--- .../repositories/s3/S3Repository.java | 58 ++++----- .../elasticsearch/test/ESIntegTestCase.java | 8 +- .../test/InternalSettingsPlugin.java | 8 +- .../test/MockIndexEventListener.java | 4 +- .../test/engine/MockEngineSupport.java | 6 +- .../test/store/MockFSDirectoryService.java | 14 +- .../test/store/MockFSIndexStore.java | 4 +- .../test/tasks/MockTaskManager.java | 4 +- .../transport/AssertingLocalTransport.java | 6 +- 129 files changed, 733 insertions(+), 799 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java index 6065a2ec66e6..5c88a8be3d38 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java @@ -33,7 +33,7 @@ import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -48,7 +48,7 @@ public class TransportCloseIndexAction extends TransportMasterNodeAction CLUSTER_INDICES_CLOSE_ENABLE_SETTING = - Setting.boolSetting("cluster.indices.close.enable", true, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.boolSetting("cluster.indices.close.enable", true, Property.Dynamic, Property.NodeScope); @Inject public TransportCloseIndexAction(Settings settings, TransportService transportService, ClusterService clusterService, diff --git a/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java b/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java index 2169d3a1521f..339abcb22bc1 100644 --- a/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java +++ b/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.MapperService; @@ -41,7 +41,7 @@ import java.util.List; public final class AutoCreateIndex { public static final Setting AUTO_CREATE_INDEX_SETTING = - new Setting<>("action.auto_create_index", "true", AutoCreate::new, SettingsProperty.ClusterScope); + new Setting<>("action.auto_create_index", "true", AutoCreate::new, Property.NodeScope); private final boolean dynamicMappingDisabled; private final IndexNameExpressionResolver resolver; diff --git a/core/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java b/core/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java index 6591384271b2..31fc1d06175d 100644 --- a/core/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java +++ b/core/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; /** @@ -35,7 +35,7 @@ public final class DestructiveOperations extends AbstractComponent { * Setting which controls whether wildcard usage (*, prefix*, _all) is allowed. */ public static final Setting REQUIRES_NAME_SETTING = - Setting.boolSetting("action.destructive_requires_name", false, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.boolSetting("action.destructive_requires_name", false, Property.Dynamic, Property.NodeScope); private volatile boolean destructiveRequiresName; @Inject diff --git a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java index 08ba0defd739..f53355f24e34 100644 --- a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -39,7 +39,7 @@ public abstract class TransportMasterNodeReadAction { public static final Setting FORCE_LOCAL_SETTING = - Setting.boolSetting("action.master.force_local", false, SettingsProperty.ClusterScope); + Setting.boolSetting("action.master.force_local", false, Property.NodeScope); private final boolean forceLocal; diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapSettings.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapSettings.java index dd9263330e6d..4e9dffc995b7 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapSettings.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapSettings.java @@ -20,7 +20,7 @@ package org.elasticsearch.bootstrap; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; public final class BootstrapSettings { @@ -29,13 +29,13 @@ public final class BootstrapSettings { // TODO: remove this hack when insecure defaults are removed from java public static final Setting SECURITY_FILTER_BAD_DEFAULTS_SETTING = - Setting.boolSetting("security.manager.filter_bad_defaults", true, SettingsProperty.ClusterScope); + Setting.boolSetting("security.manager.filter_bad_defaults", true, Property.NodeScope); public static final Setting MLOCKALL_SETTING = - Setting.boolSetting("bootstrap.mlockall", false, SettingsProperty.ClusterScope); + Setting.boolSetting("bootstrap.mlockall", false, Property.NodeScope); public static final Setting SECCOMP_SETTING = - Setting.boolSetting("bootstrap.seccomp", true, SettingsProperty.ClusterScope); + Setting.boolSetting("bootstrap.seccomp", true, Property.NodeScope); public static final Setting CTRLHANDLER_SETTING = - Setting.boolSetting("bootstrap.ctrlhandler", true, SettingsProperty.ClusterScope); + Setting.boolSetting("bootstrap.ctrlhandler", true, Property.NodeScope); } diff --git a/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java b/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java index f58947409e94..2fad8678649d 100644 --- a/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java +++ b/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java @@ -19,14 +19,13 @@ package org.elasticsearch.cache.recycler; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.recycler.AbstractRecyclerC; import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; @@ -45,18 +44,18 @@ import static org.elasticsearch.common.recycler.Recyclers.none; public class PageCacheRecycler extends AbstractComponent implements Releasable { public static final Setting TYPE_SETTING = - new Setting<>("cache.recycler.page.type", Type.CONCURRENT.name(), Type::parse, SettingsProperty.ClusterScope); + new Setting<>("cache.recycler.page.type", Type.CONCURRENT.name(), Type::parse, Property.NodeScope); public static final Setting LIMIT_HEAP_SETTING = - Setting.byteSizeSetting("cache.recycler.page.limit.heap", "10%", SettingsProperty.ClusterScope); + Setting.byteSizeSetting("cache.recycler.page.limit.heap", "10%", Property.NodeScope); public static final Setting WEIGHT_BYTES_SETTING = - Setting.doubleSetting("cache.recycler.page.weight.bytes", 1d, 0d, SettingsProperty.ClusterScope); + Setting.doubleSetting("cache.recycler.page.weight.bytes", 1d, 0d, Property.NodeScope); public static final Setting WEIGHT_LONG_SETTING = - Setting.doubleSetting("cache.recycler.page.weight.longs", 1d, 0d, SettingsProperty.ClusterScope); + Setting.doubleSetting("cache.recycler.page.weight.longs", 1d, 0d, Property.NodeScope); public static final Setting WEIGHT_INT_SETTING = - Setting.doubleSetting("cache.recycler.page.weight.ints", 1d, 0d, SettingsProperty.ClusterScope); + Setting.doubleSetting("cache.recycler.page.weight.ints", 1d, 0d, Property.NodeScope); // object pages are less useful to us so we give them a lower weight by default public static final Setting WEIGHT_OBJECTS_SETTING = - Setting.doubleSetting("cache.recycler.page.weight.objects", 0.1d, 0d, SettingsProperty.ClusterScope); + Setting.doubleSetting("cache.recycler.page.weight.objects", 0.1d, 0d, Property.NodeScope); private final Recycler bytePage; private final Recycler intPage; diff --git a/core/src/main/java/org/elasticsearch/client/Client.java b/core/src/main/java/org/elasticsearch/client/Client.java index 1ced0f2b0199..e5d8d4f55b7b 100644 --- a/core/src/main/java/org/elasticsearch/client/Client.java +++ b/core/src/main/java/org/elasticsearch/client/Client.java @@ -19,12 +19,8 @@ package org.elasticsearch.client; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; @@ -87,7 +83,7 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import java.util.Map; @@ -115,7 +111,7 @@ public interface Client extends ElasticsearchClient, Releasable { default: throw new IllegalArgumentException("Can't parse [client.type] must be one of [node, transport]"); } - }, SettingsProperty.ClusterScope); + }, Property.NodeScope); /** * The admin client that can be used to perform administrative operations. diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java index 28c921333cae..2dfd8f9fa030 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java @@ -34,7 +34,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; @@ -102,13 +102,13 @@ public class TransportClientNodesService extends AbstractComponent { public static final Setting CLIENT_TRANSPORT_NODES_SAMPLER_INTERVAL = - Setting.positiveTimeSetting("client.transport.nodes_sampler_interval", timeValueSeconds(5), SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("client.transport.nodes_sampler_interval", timeValueSeconds(5), Property.NodeScope); public static final Setting CLIENT_TRANSPORT_PING_TIMEOUT = - Setting.positiveTimeSetting("client.transport.ping_timeout", timeValueSeconds(5), SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("client.transport.ping_timeout", timeValueSeconds(5), Property.NodeScope); public static final Setting CLIENT_TRANSPORT_IGNORE_CLUSTER_NAME = - Setting.boolSetting("client.transport.ignore_cluster_name", false, SettingsProperty.ClusterScope); + Setting.boolSetting("client.transport.ignore_cluster_name", false, Property.NodeScope); public static final Setting CLIENT_TRANSPORT_SNIFF = - Setting.boolSetting("client.transport.sniff", false, SettingsProperty.ClusterScope); + Setting.boolSetting("client.transport.sniff", false, Property.NodeScope); @Inject public TransportClientNodesService(Settings settings, ClusterName clusterName, TransportService transportService, diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index c57549236e87..c54cf1b070f7 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -58,7 +58,7 @@ import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.gateway.GatewayAllocator; @@ -76,7 +76,7 @@ public class ClusterModule extends AbstractModule { public static final String EVEN_SHARD_COUNT_ALLOCATOR = "even_shard"; public static final String BALANCED_ALLOCATOR = "balanced"; // default public static final Setting SHARDS_ALLOCATOR_TYPE_SETTING = - new Setting<>("cluster.routing.allocation.type", BALANCED_ALLOCATOR, Function.identity(), SettingsProperty.ClusterScope); + new Setting<>("cluster.routing.allocation.type", BALANCED_ALLOCATOR, Function.identity(), Property.NodeScope); public static final List> DEFAULT_ALLOCATION_DECIDERS = Collections.unmodifiableList(Arrays.asList( SameShardAllocationDecider.class, diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterName.java b/core/src/main/java/org/elasticsearch/cluster/ClusterName.java index 185c68e075c2..09c64065dbd5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterName.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterName.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import java.io.IOException; @@ -38,7 +38,7 @@ public class ClusterName implements Streamable { throw new IllegalArgumentException("[cluster.name] must not be empty"); } return s; - }, SettingsProperty.ClusterScope); + }, Property.NodeScope); public static final ClusterName DEFAULT = new ClusterName(CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY).intern()); diff --git a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index 32f521a67824..896793f1bf35 100644 --- a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -39,7 +39,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; @@ -67,10 +67,10 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu public static final Setting INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING = Setting.timeSetting("cluster.info.update.interval", TimeValue.timeValueSeconds(30), TimeValue.timeValueSeconds(10), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); public static final Setting INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING = Setting.positiveTimeSetting("cluster.info.update.timeout", TimeValue.timeValueSeconds(15), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); private volatile TimeValue updateFrequency; diff --git a/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java b/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java index 4005631d5afc..d483d56d84c7 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java @@ -28,7 +28,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.mapper.MapperService; @@ -44,7 +44,7 @@ public class MappingUpdatedAction extends AbstractComponent { public static final Setting INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING = Setting.positiveTimeSetting("indices.mapping.dynamic_timeout", TimeValue.timeValueSeconds(30), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); private IndicesAdminClient client; private volatile TimeValue dynamicMappingUpdateTimeout; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java b/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java index 9a8499832eac..4b4a8e54d7c6 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java @@ -20,7 +20,7 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; /** * This class acts as a functional wrapper around the index.auto_expand_replicas setting. @@ -57,7 +57,7 @@ final class AutoExpandReplicas { } } return new AutoExpandReplicas(min, max, true); - }, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + }, Property.Dynamic, Property.IndexScope); private final int minReplicas; private final int maxReplicas; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index 3031273815d7..8c093a72ff3c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -38,7 +38,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.FromXContentBuilder; @@ -154,35 +154,35 @@ public class IndexMetaData implements Diffable, FromXContentBuild public static final String INDEX_SETTING_PREFIX = "index."; public static final String SETTING_NUMBER_OF_SHARDS = "index.number_of_shards"; public static final Setting INDEX_NUMBER_OF_SHARDS_SETTING = - Setting.intSetting(SETTING_NUMBER_OF_SHARDS, 5, 1, SettingsProperty.IndexScope); + Setting.intSetting(SETTING_NUMBER_OF_SHARDS, 5, 1, Property.IndexScope); public static final String SETTING_NUMBER_OF_REPLICAS = "index.number_of_replicas"; public static final Setting INDEX_NUMBER_OF_REPLICAS_SETTING = - Setting.intSetting(SETTING_NUMBER_OF_REPLICAS, 1, 0, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting.intSetting(SETTING_NUMBER_OF_REPLICAS, 1, 0, Property.Dynamic, Property.IndexScope); public static final String SETTING_SHADOW_REPLICAS = "index.shadow_replicas"; public static final Setting INDEX_SHADOW_REPLICAS_SETTING = - Setting.boolSetting(SETTING_SHADOW_REPLICAS, false, SettingsProperty.IndexScope); + Setting.boolSetting(SETTING_SHADOW_REPLICAS, false, Property.IndexScope); public static final String SETTING_SHARED_FILESYSTEM = "index.shared_filesystem"; public static final Setting INDEX_SHARED_FILESYSTEM_SETTING = - Setting.boolSetting(SETTING_SHARED_FILESYSTEM, false, SettingsProperty.IndexScope); + Setting.boolSetting(SETTING_SHARED_FILESYSTEM, false, Property.IndexScope); public static final String SETTING_AUTO_EXPAND_REPLICAS = "index.auto_expand_replicas"; public static final Setting INDEX_AUTO_EXPAND_REPLICAS_SETTING = AutoExpandReplicas.SETTING; public static final String SETTING_READ_ONLY = "index.blocks.read_only"; public static final Setting INDEX_READ_ONLY_SETTING = - Setting.boolSetting(SETTING_READ_ONLY, false, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting.boolSetting(SETTING_READ_ONLY, false, Property.Dynamic, Property.IndexScope); public static final String SETTING_BLOCKS_READ = "index.blocks.read"; public static final Setting INDEX_BLOCKS_READ_SETTING = - Setting.boolSetting(SETTING_BLOCKS_READ, false, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting.boolSetting(SETTING_BLOCKS_READ, false, Property.Dynamic, Property.IndexScope); public static final String SETTING_BLOCKS_WRITE = "index.blocks.write"; public static final Setting INDEX_BLOCKS_WRITE_SETTING = - Setting.boolSetting(SETTING_BLOCKS_WRITE, false, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting.boolSetting(SETTING_BLOCKS_WRITE, false, Property.Dynamic, Property.IndexScope); public static final String SETTING_BLOCKS_METADATA = "index.blocks.metadata"; public static final Setting INDEX_BLOCKS_METADATA_SETTING = - Setting.boolSetting(SETTING_BLOCKS_METADATA, false, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting.boolSetting(SETTING_BLOCKS_METADATA, false, Property.Dynamic, Property.IndexScope); public static final String SETTING_VERSION_CREATED = "index.version.created"; public static final String SETTING_VERSION_CREATED_STRING = "index.version.created_string"; @@ -192,23 +192,23 @@ public class IndexMetaData implements Diffable, FromXContentBuild public static final String SETTING_CREATION_DATE = "index.creation_date"; public static final String SETTING_PRIORITY = "index.priority"; public static final Setting INDEX_PRIORITY_SETTING = - Setting.intSetting("index.priority", 1, 0, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting.intSetting("index.priority", 1, 0, Property.Dynamic, Property.IndexScope); public static final String SETTING_CREATION_DATE_STRING = "index.creation_date_string"; public static final String SETTING_INDEX_UUID = "index.uuid"; public static final String SETTING_DATA_PATH = "index.data_path"; public static final Setting INDEX_DATA_PATH_SETTING = - new Setting<>(SETTING_DATA_PATH, "", Function.identity(), SettingsProperty.IndexScope); + new Setting<>(SETTING_DATA_PATH, "", Function.identity(), Property.IndexScope); public static final String SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE = "index.shared_filesystem.recover_on_any_node"; public static final Setting INDEX_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE_SETTING = - Setting.boolSetting(SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting.boolSetting(SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false, Property.Dynamic, Property.IndexScope); public static final String INDEX_UUID_NA_VALUE = "_na_"; public static final Setting INDEX_ROUTING_REQUIRE_GROUP_SETTING = - Setting.groupSetting("index.routing.allocation.require.", SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting.groupSetting("index.routing.allocation.require.", Property.Dynamic, Property.IndexScope); public static final Setting INDEX_ROUTING_INCLUDE_GROUP_SETTING = - Setting.groupSetting("index.routing.allocation.include.", SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting.groupSetting("index.routing.allocation.include.", Property.Dynamic, Property.IndexScope); public static final Setting INDEX_ROUTING_EXCLUDE_GROUP_SETTING = - Setting.groupSetting("index.routing.allocation.exclude.", SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting.groupSetting("index.routing.allocation.exclude.", Property.Dynamic, Property.IndexScope); public static final IndexMetaData PROTO = IndexMetaData.builder("") .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 996046015b18..4c83f64581e6 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -41,7 +41,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.FromXContentBuilder; @@ -141,7 +141,7 @@ public class MetaData implements Iterable, Diffable, Fr public static final Setting SETTING_READ_ONLY_SETTING = - Setting.boolSetting("cluster.blocks.read_only", false, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.boolSetting("cluster.blocks.read_only", false, Property.Dynamic, Property.NodeScope); public static final ClusterBlock CLUSTER_READ_ONLY_BLOCK = new ClusterBlock(6, "cluster read-only (api)", false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA_WRITE)); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java b/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java index 1b7fcf96779b..be7d90a1fef0 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java @@ -29,7 +29,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; @@ -46,8 +46,8 @@ public class UnassignedInfo implements ToXContent, Writeable { private static final TimeValue DEFAULT_DELAYED_NODE_LEFT_TIMEOUT = TimeValue.timeValueMinutes(1); public static final Setting INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING = - Setting.timeSetting("index.unassigned.node_left.delayed_timeout", DEFAULT_DELAYED_NODE_LEFT_TIMEOUT, SettingsProperty.Dynamic, - SettingsProperty.IndexScope); + Setting.timeSetting("index.unassigned.node_left.delayed_timeout", DEFAULT_DELAYED_NODE_LEFT_TIMEOUT, Property.Dynamic, + Property.IndexScope); /** * Reason why the shard is in unassigned state. diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 22cb17ab8cb7..40e0d6c97329 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -39,7 +39,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.PriorityComparator; @@ -74,12 +74,12 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; public class BalancedShardsAllocator extends AbstractComponent implements ShardsAllocator { public static final Setting INDEX_BALANCE_FACTOR_SETTING = - Setting.floatSetting("cluster.routing.allocation.balance.index", 0.55f, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.floatSetting("cluster.routing.allocation.balance.index", 0.55f, Property.Dynamic, Property.NodeScope); public static final Setting SHARD_BALANCE_FACTOR_SETTING = - Setting.floatSetting("cluster.routing.allocation.balance.shard", 0.45f, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.floatSetting("cluster.routing.allocation.balance.shard", 0.45f, Property.Dynamic, Property.NodeScope); public static final Setting THRESHOLD_SETTING = Setting.floatSetting("cluster.routing.allocation.balance.threshold", 1.0f, 0.0f, - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); private volatile WeightFunction weightFunction; private volatile float threshold; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java index 235cfd841866..77613f390841 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java @@ -28,7 +28,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import java.util.HashMap; @@ -79,10 +79,10 @@ public class AwarenessAllocationDecider extends AllocationDecider { public static final String NAME = "awareness"; public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING = - new Setting<>("cluster.routing.allocation.awareness.attributes", "", Strings::splitStringByCommaToArray , SettingsProperty.Dynamic, - SettingsProperty.ClusterScope); + new Setting<>("cluster.routing.allocation.awareness.attributes", "", Strings::splitStringByCommaToArray , Property.Dynamic, + Property.NodeScope); public static final Setting CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING = - Setting.groupSetting("cluster.routing.allocation.awareness.force.", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.groupSetting("cluster.routing.allocation.awareness.force.", Property.Dynamic, Property.NodeScope); private String[] awarenessAttributes; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java index 58966dd62a63..84e974aceb08 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java @@ -24,7 +24,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import java.util.Locale; @@ -51,7 +51,7 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider { public static final String NAME = "cluster_rebalance"; public static final Setting CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING = new Setting<>("cluster.routing.allocation.allow_rebalance", ClusterRebalanceType.INDICES_ALL_ACTIVE.name().toLowerCase(Locale.ROOT), - ClusterRebalanceType::parseString, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + ClusterRebalanceType::parseString, Property.Dynamic, Property.NodeScope); /** * An enum representation for the configured re-balance type. diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java index cab73958b757..fe6bf918dc28 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java @@ -24,7 +24,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; /** @@ -45,7 +45,7 @@ public class ConcurrentRebalanceAllocationDecider extends AllocationDecider { public static final Setting CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING = Setting.intSetting("cluster.routing.allocation.cluster_concurrent_rebalance", 2, -1, - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); private volatile int clusterConcurrentRebalance; @Inject diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index e8b5a3dba04d..dcb6080bd1ed 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -38,7 +38,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.RatioValue; @@ -83,21 +83,21 @@ public class DiskThresholdDecider extends AllocationDecider { private volatile TimeValue rerouteInterval; public static final Setting CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING = - Setting.boolSetting("cluster.routing.allocation.disk.threshold_enabled", true, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.boolSetting("cluster.routing.allocation.disk.threshold_enabled", true, Property.Dynamic, Property.NodeScope); public static final Setting CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.low", "85%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.low"), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); public static final Setting CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING = new Setting<>("cluster.routing.allocation.disk.watermark.high", "90%", (s) -> validWatermarkSetting(s, "cluster.routing.allocation.disk.watermark.high"), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); public static final Setting CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS_SETTING = Setting.boolSetting("cluster.routing.allocation.disk.include_relocations", true, - SettingsProperty.Dynamic, SettingsProperty.ClusterScope);; + Property.Dynamic, Property.NodeScope);; public static final Setting CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING = Setting.positiveTimeSetting("cluster.routing.allocation.disk.reroute_interval", TimeValue.timeValueSeconds(60), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); /** * Listens for a node to go over the high watermark and kicks off an empty diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java index 0cca4cac4806..80dada860228 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java @@ -26,7 +26,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import java.util.Locale; @@ -63,17 +63,17 @@ public class EnableAllocationDecider extends AllocationDecider { public static final Setting CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING = new Setting<>("cluster.routing.allocation.enable", Allocation.ALL.name(), Allocation::parse, - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); public static final Setting INDEX_ROUTING_ALLOCATION_ENABLE_SETTING = new Setting<>("index.routing.allocation.enable", Allocation.ALL.name(), Allocation::parse, - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); public static final Setting CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING = new Setting<>("cluster.routing.rebalance.enable", Rebalance.ALL.name(), Rebalance::parse, - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); public static final Setting INDEX_ROUTING_REBALANCE_ENABLE_SETTING = new Setting<>("index.routing.rebalance.enable", Rebalance.ALL.name(), Rebalance::parse, - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); private volatile Rebalance enableRebalance; private volatile Allocation enableAllocation; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java index b4c50d1849bd..c3ff0bb355ed 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java @@ -27,7 +27,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.AND; @@ -62,11 +62,11 @@ public class FilterAllocationDecider extends AllocationDecider { public static final String NAME = "filter"; public static final Setting CLUSTER_ROUTING_REQUIRE_GROUP_SETTING = - Setting.groupSetting("cluster.routing.allocation.require.", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.groupSetting("cluster.routing.allocation.require.", Property.Dynamic, Property.NodeScope); public static final Setting CLUSTER_ROUTING_INCLUDE_GROUP_SETTING = - Setting.groupSetting("cluster.routing.allocation.include.", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.groupSetting("cluster.routing.allocation.include.", Property.Dynamic, Property.NodeScope); public static final Setting CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING = - Setting.groupSetting("cluster.routing.allocation.exclude.", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.groupSetting("cluster.routing.allocation.exclude.", Property.Dynamic, Property.NodeScope); private volatile DiscoveryNodeFilters clusterRequireFilters; private volatile DiscoveryNodeFilters clusterIncludeFilters; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java index 03a383830ce6..ab8be4dc8da5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java @@ -27,7 +27,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; /** @@ -62,7 +62,7 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { */ public static final Setting INDEX_TOTAL_SHARDS_PER_NODE_SETTING = Setting.intSetting("index.routing.allocation.total_shards_per_node", -1, -1, - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); /** * Controls the maximum number of shards per node on a global level. @@ -70,7 +70,7 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { */ public static final Setting CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING = Setting.intSetting("cluster.routing.allocation.total_shards_per_node", -1, -1, - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); @Inject diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java index a9d269d3b3ab..d656afc8036c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java @@ -26,7 +26,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; /** @@ -42,7 +42,7 @@ public class SnapshotInProgressAllocationDecider extends AllocationDecider { */ public static final Setting CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED_SETTING = Setting.boolSetting("cluster.routing.allocation.snapshot.relocation_enabled", false, - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); private volatile boolean enableRelocation = false; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java index 649002362911..ca6b312da4c8 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java @@ -25,7 +25,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; /** @@ -55,21 +55,21 @@ public class ThrottlingAllocationDecider extends AllocationDecider { new Setting<>("cluster.routing.allocation.node_concurrent_recoveries", Integer.toString(DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES), (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_recoveries"), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING = Setting.intSetting("cluster.routing.allocation.node_initial_primaries_recoveries", DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, 0, - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_incoming_recoveries", (s) -> CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getRaw(s), (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_incoming_recoveries"), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); public static final Setting CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING = new Setting<>("cluster.routing.allocation.node_concurrent_outgoing_recoveries", (s) -> CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getRaw(s), (s) -> Setting.parseInt(s, 0, "cluster.routing.allocation.node_concurrent_outgoing_recoveries"), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); private volatile int primariesInitialRecoveries; diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index 84a7130e3340..47b65d6864f5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -52,7 +52,7 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.transport.TransportAddress; @@ -99,14 +99,14 @@ public class InternalClusterService extends AbstractLifecycleComponent CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING = Setting.positiveTimeSetting("cluster.service.slow_task_logging_threshold", TimeValue.timeValueSeconds(30), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); public static final Setting CLUSTER_SERVICE_RECONNECT_INTERVAL_SETTING = - Setting.positiveTimeSetting("cluster.service.reconnect_interval", TimeValue.timeValueSeconds(10), SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("cluster.service.reconnect_interval", TimeValue.timeValueSeconds(10), Property.NodeScope); public static final String UPDATE_THREAD_NAME = "clusterService#updateTask"; public static final Setting NODE_ID_SEED_SETTING = // don't use node.id.seed so it won't be seen as an attribute - Setting.longSetting("node_id.seed", 0L, Long.MIN_VALUE, SettingsProperty.ClusterScope); + Setting.longSetting("node_id.seed", 0L, Long.MIN_VALUE, Property.NodeScope); private final ThreadPool threadPool; private BiConsumer clusterStatePublisher; diff --git a/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java index dbf1644cbfec..c0951c47df1d 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java +++ b/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java @@ -21,8 +21,7 @@ package org.elasticsearch.common.logging; import org.apache.log4j.Logger; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.Setting.Property; import java.util.Locale; @@ -32,10 +31,10 @@ import java.util.Locale; public abstract class ESLoggerFactory { public static final Setting LOG_DEFAULT_LEVEL_SETTING = - new Setting<>("logger.level", LogLevel.INFO.name(), LogLevel::parse, SettingsProperty.ClusterScope); + new Setting<>("logger.level", LogLevel.INFO.name(), LogLevel::parse, Property.NodeScope); public static final Setting LOG_LEVEL_SETTING = Setting.prefixKeySetting("logger.", LogLevel.INFO.name(), LogLevel::parse, - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); public static ESLogger getLogger(String prefix, String name) { prefix = prefix == null ? null : prefix.intern(); diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java index c79e8dd3af50..1a54ad2753ac 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -28,7 +28,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.http.HttpServer; @@ -155,11 +155,11 @@ public class NetworkModule extends AbstractModule { public static final String LOCAL_TRANSPORT = "local"; public static final String NETTY_TRANSPORT = "netty"; - public static final Setting HTTP_TYPE_SETTING = Setting.simpleString("http.type", SettingsProperty.ClusterScope); - public static final Setting HTTP_ENABLED = Setting.boolSetting("http.enabled", true, SettingsProperty.ClusterScope); + public static final Setting HTTP_TYPE_SETTING = Setting.simpleString("http.type", Property.NodeScope); + public static final Setting HTTP_ENABLED = Setting.boolSetting("http.enabled", true, Property.NodeScope); public static final Setting TRANSPORT_SERVICE_TYPE_SETTING = - Setting.simpleString("transport.service.type", SettingsProperty.ClusterScope); - public static final Setting TRANSPORT_TYPE_SETTING = Setting.simpleString("transport.type", SettingsProperty.ClusterScope); + Setting.simpleString("transport.service.type", Property.NodeScope); + public static final Setting TRANSPORT_TYPE_SETTING = Setting.simpleString("transport.type", Property.NodeScope); diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java index 83f4f5fc88c9..ff1f3912cc5c 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java @@ -22,7 +22,7 @@ package org.elasticsearch.common.network; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -46,32 +46,32 @@ public class NetworkService extends AbstractComponent { public static final String DEFAULT_NETWORK_HOST = "_local_"; public static final Setting> GLOBAL_NETWORK_HOST_SETTING = - Setting.listSetting("network.host", Arrays.asList(DEFAULT_NETWORK_HOST), Function.identity(), SettingsProperty.ClusterScope); + Setting.listSetting("network.host", Arrays.asList(DEFAULT_NETWORK_HOST), Function.identity(), Property.NodeScope); public static final Setting> GLOBAL_NETWORK_BINDHOST_SETTING = - Setting.listSetting("network.bind_host", GLOBAL_NETWORK_HOST_SETTING, Function.identity(), SettingsProperty.ClusterScope); + Setting.listSetting("network.bind_host", GLOBAL_NETWORK_HOST_SETTING, Function.identity(), Property.NodeScope); public static final Setting> GLOBAL_NETWORK_PUBLISHHOST_SETTING = - Setting.listSetting("network.publish_host", GLOBAL_NETWORK_HOST_SETTING, Function.identity(), SettingsProperty.ClusterScope); - public static final Setting NETWORK_SERVER = Setting.boolSetting("network.server", true, SettingsProperty.ClusterScope); + Setting.listSetting("network.publish_host", GLOBAL_NETWORK_HOST_SETTING, Function.identity(), Property.NodeScope); + public static final Setting NETWORK_SERVER = Setting.boolSetting("network.server", true, Property.NodeScope); public static final class TcpSettings { public static final Setting TCP_NO_DELAY = - Setting.boolSetting("network.tcp.no_delay", true, SettingsProperty.ClusterScope); + Setting.boolSetting("network.tcp.no_delay", true, Property.NodeScope); public static final Setting TCP_KEEP_ALIVE = - Setting.boolSetting("network.tcp.keep_alive", true, SettingsProperty.ClusterScope); + Setting.boolSetting("network.tcp.keep_alive", true, Property.NodeScope); public static final Setting TCP_REUSE_ADDRESS = - Setting.boolSetting("network.tcp.reuse_address", NetworkUtils.defaultReuseAddress(), SettingsProperty.ClusterScope); + Setting.boolSetting("network.tcp.reuse_address", NetworkUtils.defaultReuseAddress(), Property.NodeScope); public static final Setting TCP_SEND_BUFFER_SIZE = - Setting.byteSizeSetting("network.tcp.send_buffer_size", new ByteSizeValue(-1), SettingsProperty.ClusterScope); + Setting.byteSizeSetting("network.tcp.send_buffer_size", new ByteSizeValue(-1), Property.NodeScope); public static final Setting TCP_RECEIVE_BUFFER_SIZE = - Setting.byteSizeSetting("network.tcp.receive_buffer_size", new ByteSizeValue(-1), SettingsProperty.ClusterScope); + Setting.byteSizeSetting("network.tcp.receive_buffer_size", new ByteSizeValue(-1), Property.NodeScope); public static final Setting TCP_BLOCKING = - Setting.boolSetting("network.tcp.blocking", false, SettingsProperty.ClusterScope); + Setting.boolSetting("network.tcp.blocking", false, Property.NodeScope); public static final Setting TCP_BLOCKING_SERVER = - Setting.boolSetting("network.tcp.blocking_server", TCP_BLOCKING, SettingsProperty.ClusterScope); + Setting.boolSetting("network.tcp.blocking_server", TCP_BLOCKING, Property.NodeScope); public static final Setting TCP_BLOCKING_CLIENT = - Setting.boolSetting("network.tcp.blocking_client", TCP_BLOCKING, SettingsProperty.ClusterScope); + Setting.boolSetting("network.tcp.blocking_client", TCP_BLOCKING, Property.NodeScope); public static final Setting TCP_CONNECT_TIMEOUT = - Setting.timeSetting("network.tcp.connect_timeout", new TimeValue(30, TimeUnit.SECONDS), SettingsProperty.ClusterScope); + Setting.timeSetting("network.tcp.connect_timeout", new TimeValue(30, TimeUnit.SECONDS), Property.NodeScope); } /** diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index 63ee81c30b7d..baed9c0849fd 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -44,11 +44,11 @@ public abstract class AbstractScopedSettings extends AbstractComponent { private final List> settingUpdaters = new CopyOnWriteArrayList<>(); private final Map> complexMatchers; private final Map> keySettings; - private final Setting.SettingsProperty scope; + private final Setting.Property scope; private static final Pattern KEY_PATTERN = Pattern.compile("^(?:[-\\w]+[.])*[-\\w]+$"); private static final Pattern GROUP_KEY_PATTERN = Pattern.compile("^(?:[-\\w]+[.])+$"); - protected AbstractScopedSettings(Settings settings, Set> settingsSet, Setting.SettingsProperty scope) { + protected AbstractScopedSettings(Settings settings, Set> settingsSet, Setting.Property scope) { super(settings); this.lastSettingsApplied = Settings.EMPTY; this.scope = scope; @@ -96,7 +96,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { return GROUP_KEY_PATTERN.matcher(key).matches(); } - public Setting.SettingsProperty getScope() { + public Setting.Property getScope() { return this.scope; } diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 888902fff68d..45f58390b58c 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -45,7 +45,7 @@ import org.elasticsearch.cluster.service.InternalClusterService; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.discovery.DiscoveryModule; @@ -102,7 +102,7 @@ import java.util.function.Predicate; */ public final class ClusterSettings extends AbstractScopedSettings { public ClusterSettings(Settings nodeSettings, Set> settingsSet) { - super(nodeSettings, settingsSet, SettingsProperty.ClusterScope); + super(nodeSettings, settingsSet, Property.NodeScope); addSettingsUpdater(new LoggingSettingUpdater(nodeSettings)); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index ae056460cd83..ae88f513c657 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -22,7 +22,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.gateway.PrimaryShardAllocator; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; @@ -50,7 +50,7 @@ import java.util.function.Predicate; /** * Encapsulates all valid index level settings. - * @see org.elasticsearch.common.settings.Setting.SettingsProperty#IndexScope + * @see Property#IndexScope */ public final class IndexScopedSettings extends AbstractScopedSettings { @@ -135,15 +135,15 @@ public final class IndexScopedSettings extends AbstractScopedSettings { EngineConfig.INDEX_CODEC_SETTING, IndexWarmer.INDEX_NORMS_LOADING_SETTING, // this sucks but we can't really validate all the analyzers/similarity in here - Setting.groupSetting("index.similarity.", SettingsProperty.IndexScope), // this allows similarity settings to be passed - Setting.groupSetting("index.analysis.", SettingsProperty.IndexScope) // this allows analysis settings to be passed + Setting.groupSetting("index.similarity.", Property.IndexScope), // this allows similarity settings to be passed + Setting.groupSetting("index.analysis.", Property.IndexScope) // this allows analysis settings to be passed ))); public static final IndexScopedSettings DEFAULT_SCOPED_SETTINGS = new IndexScopedSettings(Settings.EMPTY, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); public IndexScopedSettings(Settings settings, Set> settingsSet) { - super(settings, settingsSet, SettingsProperty.IndexScope); + super(settings, settingsSet, Property.IndexScope); } private IndexScopedSettings(Settings settings, IndexScopedSettings other, IndexMetaData metaData) { diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index c6753d243c08..7464e06c1797 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -54,7 +54,7 @@ import java.util.stream.Collectors; * together with {@link AbstractScopedSettings}. This class contains several utility methods that makes it straight forward * to add settings for the majority of the cases. For instance a simple boolean settings can be defined like this: *
      {@code
      - * public static final Setting; MY_BOOLEAN = Setting.boolSetting("my.bool.setting", true, SettingsProperty.ClusterScope);}
      + * public static final Setting; MY_BOOLEAN = Setting.boolSetting("my.bool.setting", true, SettingsProperty.NodeScope);}
        * 
      * To retrieve the value of the setting a {@link Settings} object can be passed directly to the {@link Setting#get(Settings)} method. *
      @@ -66,13 +66,13 @@ import java.util.stream.Collectors;
        *     RED, GREEN, BLUE;
        * }
        * public static final Setting MY_BOOLEAN =
      - *     new Setting<>("my.color.setting", Color.RED.toString(), Color::valueOf, SettingsProperty.ClusterScope);
      + *     new Setting<>("my.color.setting", Color.RED.toString(), Color::valueOf, SettingsProperty.NodeScope);
        * }
        * 
      */ public class Setting extends ToXContentToBytes { - public enum SettingsProperty { + public enum Property { /** * should be filtered in some api (mask password/credentials) */ @@ -89,25 +89,14 @@ public class Setting extends ToXContentToBytes { Deprecated, /** - * Cluster scope. - * @See IndexScope - * @See NodeScope - */ - ClusterScope, - - /** - * Node scope. - * @See ClusterScope - * @See IndexScope + * Node scope */ NodeScope, /** - * Index scope. - * @See ClusterScope - * @See NodeScope + * Index scope */ - IndexScope; + IndexScope } private static final ESLogger logger = Loggers.getLogger(Setting.class); @@ -116,31 +105,30 @@ public class Setting extends ToXContentToBytes { private final Key key; protected final Function defaultValue; private final Function parser; - private final EnumSet properties; + private final EnumSet properties; /** - * Creates a new Setting instance + * Creates a new Setting instance. When no scope is provided, we default to {@link Property#NodeScope}. * @param key the settings key for this setting. * @param defaultValue a default value function that returns the default values string representation. * @param parser a parser that parses the string rep into a complex datatype. * @param properties properties for this setting like scope, filtering... */ - public Setting(Key key, Function defaultValue, Function parser, SettingsProperty... properties) { + public Setting(Key key, Function defaultValue, Function parser, Property... properties) { assert parser.apply(defaultValue.apply(Settings.EMPTY)) != null || this.isGroupSetting(): "parser returned null"; this.key = key; this.defaultValue = defaultValue; this.parser = parser; if (properties.length == 0) { - this.properties = EnumSet.of(SettingsProperty.NodeScope); + this.properties = EnumSet.of(Property.NodeScope); } else { this.properties = EnumSet.copyOf(Arrays.asList(properties)); } // We validate scope settings. They are mutually exclusive int numScopes = 0; - for (SettingsProperty property : properties) { - if (property == SettingsProperty.ClusterScope || - property == SettingsProperty.IndexScope || - property == SettingsProperty.NodeScope) { + for (Property property : properties) { + if (property == Property.NodeScope || + property == Property.IndexScope) { numScopes++; } } @@ -156,7 +144,7 @@ public class Setting extends ToXContentToBytes { * @param parser a parser that parses the string rep into a complex datatype. * @param properties properties for this setting like scope, filtering... */ - public Setting(String key, String defaultValue, Function parser, SettingsProperty... properties) { + public Setting(String key, String defaultValue, Function parser, Property... properties) { this(key, s -> defaultValue, parser, properties); } @@ -167,7 +155,7 @@ public class Setting extends ToXContentToBytes { * @param parser a parser that parses the string rep into a complex datatype. * @param properties properties for this setting like scope, filtering... */ - public Setting(String key, Function defaultValue, Function parser, SettingsProperty... properties) { + public Setting(String key, Function defaultValue, Function parser, Property... properties) { this(new SimpleKey(key), defaultValue, parser, properties); } @@ -178,7 +166,7 @@ public class Setting extends ToXContentToBytes { * @param parser a parser that parses the string rep into a complex datatype. * @param properties properties for this setting like scope, filtering... */ - public Setting(String key, Setting fallBackSetting, Function parser, SettingsProperty... properties) { + public Setting(String key, Setting fallBackSetting, Function parser, Property... properties) { this(key, fallBackSetting::getRaw, parser, properties); } @@ -204,14 +192,14 @@ public class Setting extends ToXContentToBytes { * Returns true if this setting is dynamically updateable, otherwise false */ public final boolean isDynamic() { - return properties.contains(SettingsProperty.Dynamic); + return properties.contains(Property.Dynamic); } /** * Returns the setting properties - * @see SettingsProperty + * @see Property */ - public EnumSet getProperties() { + public EnumSet getProperties() { return properties; } @@ -219,35 +207,28 @@ public class Setting extends ToXContentToBytes { * Returns true if this setting must be filtered, otherwise false */ public boolean isFiltered() { - return properties.contains(SettingsProperty.Filtered); + return properties.contains(Property.Filtered); } /** - * Returns true if this setting has a cluster scope, otherwise false + * Returns true if this setting has a node scope, otherwise false */ - public boolean hasClusterScope() { - return properties.contains(SettingsProperty.ClusterScope); + public boolean hasNodeScope() { + return properties.contains(Property.NodeScope); } /** * Returns true if this setting has an index scope, otherwise false */ public boolean hasIndexScope() { - return properties.contains(SettingsProperty.IndexScope); - } - - /** - * Returns true if this setting has an index scope, otherwise false - */ - public boolean hasNodeScope() { - return properties.contains(SettingsProperty.NodeScope); + return properties.contains(Property.IndexScope); } /** * Returns true if this setting is deprecated, otherwise false */ public boolean isDeprecated() { - return properties.contains(SettingsProperty.Deprecated); + return properties.contains(Property.Deprecated); } /** @@ -451,11 +432,11 @@ public class Setting extends ToXContentToBytes { } - public static Setting floatSetting(String key, float defaultValue, SettingsProperty... properties) { + public static Setting floatSetting(String key, float defaultValue, Property... properties) { return new Setting<>(key, (s) -> Float.toString(defaultValue), Float::parseFloat, properties); } - public static Setting floatSetting(String key, float defaultValue, float minValue, SettingsProperty... properties) { + public static Setting floatSetting(String key, float defaultValue, float minValue, Property... properties) { return new Setting<>(key, (s) -> Float.toString(defaultValue), (s) -> { float value = Float.parseFloat(s); if (value < minValue) { @@ -465,19 +446,19 @@ public class Setting extends ToXContentToBytes { }, properties); } - public static Setting intSetting(String key, int defaultValue, int minValue, int maxValue, SettingsProperty... properties) { + public static Setting intSetting(String key, int defaultValue, int minValue, int maxValue, Property... properties) { return new Setting<>(key, (s) -> Integer.toString(defaultValue), (s) -> parseInt(s, minValue, maxValue, key), properties); } - public static Setting intSetting(String key, int defaultValue, int minValue, SettingsProperty... properties) { + public static Setting intSetting(String key, int defaultValue, int minValue, Property... properties) { return new Setting<>(key, (s) -> Integer.toString(defaultValue), (s) -> parseInt(s, minValue, key), properties); } - public static Setting longSetting(String key, long defaultValue, long minValue, SettingsProperty... properties) { + public static Setting longSetting(String key, long defaultValue, long minValue, Property... properties) { return new Setting<>(key, (s) -> Long.toString(defaultValue), (s) -> parseLong(s, minValue, key), properties); } - public static Setting simpleString(String key, SettingsProperty... properties) { + public static Setting simpleString(String key, Property... properties) { return new Setting<>(key, s -> "", Function.identity(), properties); } @@ -512,52 +493,52 @@ public class Setting extends ToXContentToBytes { return timeValue; } - public static Setting intSetting(String key, int defaultValue, SettingsProperty... properties) { + public static Setting intSetting(String key, int defaultValue, Property... properties) { return intSetting(key, defaultValue, Integer.MIN_VALUE, properties); } - public static Setting boolSetting(String key, boolean defaultValue, SettingsProperty... properties) { + public static Setting boolSetting(String key, boolean defaultValue, Property... properties) { return new Setting<>(key, (s) -> Boolean.toString(defaultValue), Booleans::parseBooleanExact, properties); } - public static Setting boolSetting(String key, Setting fallbackSetting, SettingsProperty... properties) { + public static Setting boolSetting(String key, Setting fallbackSetting, Property... properties) { return new Setting<>(key, fallbackSetting, Booleans::parseBooleanExact, properties); } - public static Setting byteSizeSetting(String key, String percentage, SettingsProperty... properties) { + public static Setting byteSizeSetting(String key, String percentage, Property... properties) { return new Setting<>(key, (s) -> percentage, (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, key), properties); } - public static Setting byteSizeSetting(String key, ByteSizeValue value, SettingsProperty... properties) { + public static Setting byteSizeSetting(String key, ByteSizeValue value, Property... properties) { return byteSizeSetting(key, (s) -> value.toString(), properties); } public static Setting byteSizeSetting(String key, Setting fallbackSettings, - SettingsProperty... properties) { + Property... properties) { return byteSizeSetting(key, fallbackSettings::getRaw, properties); } public static Setting byteSizeSetting(String key, Function defaultValue, - SettingsProperty... properties) { + Property... properties) { return new Setting<>(key, defaultValue, (s) -> ByteSizeValue.parseBytesSizeValue(s, key), properties); } - public static Setting positiveTimeSetting(String key, TimeValue defaultValue, SettingsProperty... properties) { + public static Setting positiveTimeSetting(String key, TimeValue defaultValue, Property... properties) { return timeSetting(key, defaultValue, TimeValue.timeValueMillis(0), properties); } public static Setting> listSetting(String key, List defaultStringValue, Function singleValueParser, - SettingsProperty... properties) { + Property... properties) { return listSetting(key, (s) -> defaultStringValue, singleValueParser, properties); } public static Setting> listSetting(String key, Setting> fallbackSetting, Function singleValueParser, - SettingsProperty... properties) { + Property... properties) { return listSetting(key, (s) -> parseableStringToList(fallbackSetting.getRaw(s)), singleValueParser, properties); } public static Setting> listSetting(String key, Function> defaultStringValue, - Function singleValueParser, SettingsProperty... properties) { + Function singleValueParser, Property... properties) { Function> parser = (s) -> parseableStringToList(s).stream().map(singleValueParser).collect(Collectors.toList()); @@ -611,7 +592,7 @@ public class Setting extends ToXContentToBytes { } } - public static Setting groupSetting(String key, SettingsProperty... properties) { + public static Setting groupSetting(String key, Property... properties) { // TODO CHECK IF WE REMOVE if (key.endsWith(".") == false) { throw new IllegalArgumentException("key must end with a '.'"); @@ -671,7 +652,7 @@ public class Setting extends ToXContentToBytes { } public static Setting timeSetting(String key, Function defaultValue, TimeValue minValue, - SettingsProperty... properties) { + Property... properties) { return new Setting<>(key, defaultValue, (s) -> { TimeValue timeValue = TimeValue.parseTimeValue(s, null, key); if (timeValue.millis() < minValue.millis()) { @@ -681,19 +662,19 @@ public class Setting extends ToXContentToBytes { }, properties); } - public static Setting timeSetting(String key, TimeValue defaultValue, TimeValue minValue, SettingsProperty... properties) { + public static Setting timeSetting(String key, TimeValue defaultValue, TimeValue minValue, Property... properties) { return timeSetting(key, (s) -> defaultValue.getStringRep(), minValue, properties); } - public static Setting timeSetting(String key, TimeValue defaultValue, SettingsProperty... properties) { + public static Setting timeSetting(String key, TimeValue defaultValue, Property... properties) { return new Setting<>(key, (s) -> defaultValue.toString(), (s) -> TimeValue.parseTimeValue(s, key), properties); } - public static Setting timeSetting(String key, Setting fallbackSetting, SettingsProperty... properties) { + public static Setting timeSetting(String key, Setting fallbackSetting, Property... properties) { return new Setting<>(key, fallbackSetting::getRaw, (s) -> TimeValue.parseTimeValue(s, key), properties); } - public static Setting doubleSetting(String key, double defaultValue, double minValue, SettingsProperty... properties) { + public static Setting doubleSetting(String key, double defaultValue, double minValue, Property... properties) { return new Setting<>(key, (s) -> Double.toString(defaultValue), (s) -> { final double d = Double.parseDouble(s); if (d < minValue) { @@ -722,7 +703,7 @@ public class Setting extends ToXContentToBytes { * {@link #getConcreteSetting(String)} is used to pull the updater. */ public static Setting prefixKeySetting(String prefix, String defaultValue, Function parser, - SettingsProperty... properties) { + Property... properties) { return affixKeySetting(AffixKey.withPrefix(prefix), (s) -> defaultValue, parser, properties); } @@ -732,17 +713,17 @@ public class Setting extends ToXContentToBytes { * out of the box unless {@link #getConcreteSetting(String)} is used to pull the updater. */ public static Setting adfixKeySetting(String prefix, String suffix, Function defaultValue, - Function parser, SettingsProperty... properties) { + Function parser, Property... properties) { return affixKeySetting(AffixKey.withAdfix(prefix, suffix), defaultValue, parser, properties); } public static Setting adfixKeySetting(String prefix, String suffix, String defaultValue, Function parser, - SettingsProperty... properties) { + Property... properties) { return adfixKeySetting(prefix, suffix, (s) -> defaultValue, parser, properties); } public static Setting affixKeySetting(AffixKey key, Function defaultValue, Function parser, - SettingsProperty... properties) { + Property... properties) { return new Setting(key, defaultValue, parser, properties) { @Override diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index 8786ac5f4474..ee770f747562 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -35,7 +35,7 @@ public class SettingsModule extends AbstractModule { private final Settings settings; private final Set settingsFilterPattern = new HashSet<>(); - private final Map> clusterSettings = new HashMap<>(); + private final Map> nodeSettings = new HashMap<>(); private final Map> indexSettings = new HashMap<>(); private static final Predicate TRIBE_CLIENT_NODE_SETTINGS_PREDICATE = (s) -> s.startsWith("tribe.") && TribeService.TRIBE_SETTING_KEYS.contains(s) == false; @@ -52,7 +52,7 @@ public class SettingsModule extends AbstractModule { @Override protected void configure() { final IndexScopedSettings indexScopedSettings = new IndexScopedSettings(settings, new HashSet<>(this.indexSettings.values())); - final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(this.clusterSettings.values())); + final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(this.nodeSettings.values())); // by now we are fully configured, lets check node level settings for unregistered index settings indexScopedSettings.validate(settings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE)); final Predicate acceptOnlyClusterSettings = TRIBE_CLIENT_NODE_SETTINGS_PREDICATE.or(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE).negate(); @@ -76,17 +76,18 @@ public class SettingsModule extends AbstractModule { registerSettingsFilter(setting.getKey()); } } - if (setting.hasClusterScope()) { - if (clusterSettings.containsKey(setting.getKey())) { + if (setting.hasNodeScope()) { + if (nodeSettings.containsKey(setting.getKey())) { throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); } - clusterSettings.put(setting.getKey(), setting); - } - if (setting.hasIndexScope()) { + nodeSettings.put(setting.getKey(), setting); + } else if (setting.hasIndexScope()) { if (indexSettings.containsKey(setting.getKey())) { throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); } indexSettings.put(setting.getKey(), setting); + } else { + throw new IllegalArgumentException("No scope found for setting [" + setting.getKey() + "]"); } } @@ -108,8 +109,8 @@ public class SettingsModule extends AbstractModule { * Check if a setting has already been registered */ public boolean exists(Setting setting) { - if (setting.hasClusterScope()) { - return clusterSettings.containsKey(setting.getKey()); + if (setting.hasNodeScope()) { + return nodeSettings.containsKey(setting.getKey()); } if (setting.hasIndexScope()) { return indexSettings.containsKey(setting.getKey()); diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index 7f4e9c8b6d11..df1288d4fd29 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -20,7 +20,7 @@ package org.elasticsearch.common.util.concurrent; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import java.util.Arrays; @@ -43,7 +43,7 @@ public class EsExecutors { * This is used to adjust thread pools sizes etc. per node. */ public static final Setting PROCESSORS_SETTING = - Setting.intSetting("processors", Math.min(32, Runtime.getRuntime().availableProcessors()), 1, SettingsProperty.ClusterScope); + Setting.intSetting("processors", Math.min(32, Runtime.getRuntime().availableProcessors()), 1, Property.NodeScope); /** * Returns the number of processors available but at most 32. diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index 47c115a47e0c..2ac6082e85da 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -19,12 +19,11 @@ package org.elasticsearch.common.util.concurrent; import org.apache.lucene.util.CloseableThreadLocal; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import java.io.Closeable; @@ -64,7 +63,7 @@ import java.util.concurrent.atomic.AtomicBoolean; public final class ThreadContext implements Closeable, Writeable{ public static final String PREFIX = "request.headers"; - public static final Setting DEFAULT_HEADERS_SETTING = Setting.groupSetting(PREFIX + ".", SettingsProperty.ClusterScope); + public static final Setting DEFAULT_HEADERS_SETTING = Setting.groupSetting(PREFIX + ".", Property.NodeScope); private final Map defaultHeader; private static final ThreadContextStruct DEFAULT_CONTEXT = new ThreadContextStruct(Collections.emptyMap()); private final ContextThreadLocal threadLocal; diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java b/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java index fa5c0e950ed0..4076b880d6f4 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java @@ -23,7 +23,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.discovery.local.LocalDiscovery; @@ -48,9 +48,9 @@ public class DiscoveryModule extends AbstractModule { public static final Setting DISCOVERY_TYPE_SETTING = new Setting<>("discovery.type", settings -> DiscoveryNode.localNode(settings) ? "local" : "zen", Function.identity(), - SettingsProperty.ClusterScope); + Property.NodeScope); public static final Setting ZEN_MASTER_SERVICE_TYPE_SETTING = - new Setting<>("discovery.zen.masterservice.type", "zen", Function.identity(), SettingsProperty.ClusterScope); + new Setting<>("discovery.zen.masterservice.type", "zen", Function.identity(), Property.NodeScope); private final Settings settings; private final Map>> unicastHostProviders = new HashMap<>(); diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java b/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java index 9cf78cf93e59..ca7ab342cd56 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java @@ -24,7 +24,7 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.RestStatus; @@ -45,7 +45,7 @@ public class DiscoverySettings extends AbstractComponent { **/ public static final Setting PUBLISH_TIMEOUT_SETTING = Setting.positiveTimeSetting("discovery.zen.publish_timeout", TimeValue.timeValueSeconds(30), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); /** * sets the timeout for receiving enough acks for a specific cluster state and committing it. failing @@ -54,14 +54,14 @@ public class DiscoverySettings extends AbstractComponent { public static final Setting COMMIT_TIMEOUT_SETTING = new Setting<>("discovery.zen.commit_timeout", (s) -> PUBLISH_TIMEOUT_SETTING.getRaw(s), (s) -> TimeValue.parseTimeValue(s, TimeValue.timeValueSeconds(30), "discovery.zen.commit_timeout"), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); public static final Setting NO_MASTER_BLOCK_SETTING = new Setting<>("discovery.zen.no_master_block", "write", DiscoverySettings::parseNoMasterBlock, - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); public static final Setting PUBLISH_DIFF_ENABLE_SETTING = - Setting.boolSetting("discovery.zen.publish_diff.enable", true, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.boolSetting("discovery.zen.publish_diff.enable", true, Property.Dynamic, Property.NodeScope); public static final Setting INITIAL_STATE_TIMEOUT_SETTING = - Setting.positiveTimeSetting("discovery.initial_state_timeout", TimeValue.timeValueSeconds(30), SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("discovery.initial_state_timeout", TimeValue.timeValueSeconds(30), Property.NodeScope); private volatile ClusterBlock noMasterBlock; private volatile TimeValue publishTimeout; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index f0a491f28212..63a0cfbe39d7 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -46,7 +46,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.Discovery; @@ -89,27 +89,27 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; public class ZenDiscovery extends AbstractLifecycleComponent implements Discovery, PingContextProvider { public final static Setting PING_TIMEOUT_SETTING = - Setting.positiveTimeSetting("discovery.zen.ping_timeout", timeValueSeconds(3), SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("discovery.zen.ping_timeout", timeValueSeconds(3), Property.NodeScope); public final static Setting JOIN_TIMEOUT_SETTING = Setting.timeSetting("discovery.zen.join_timeout", settings -> TimeValue.timeValueMillis(PING_TIMEOUT_SETTING.get(settings).millis() * 20).toString(), - TimeValue.timeValueMillis(0), SettingsProperty.ClusterScope); + TimeValue.timeValueMillis(0), Property.NodeScope); public final static Setting JOIN_RETRY_ATTEMPTS_SETTING = - Setting.intSetting("discovery.zen.join_retry_attempts", 3, 1, SettingsProperty.ClusterScope); + Setting.intSetting("discovery.zen.join_retry_attempts", 3, 1, Property.NodeScope); public final static Setting JOIN_RETRY_DELAY_SETTING = - Setting.positiveTimeSetting("discovery.zen.join_retry_delay", TimeValue.timeValueMillis(100), SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("discovery.zen.join_retry_delay", TimeValue.timeValueMillis(100), Property.NodeScope); public final static Setting MAX_PINGS_FROM_ANOTHER_MASTER_SETTING = - Setting.intSetting("discovery.zen.max_pings_from_another_master", 3, 1, SettingsProperty.ClusterScope); + Setting.intSetting("discovery.zen.max_pings_from_another_master", 3, 1, Property.NodeScope); public final static Setting SEND_LEAVE_REQUEST_SETTING = - Setting.boolSetting("discovery.zen.send_leave_request", true, SettingsProperty.ClusterScope); + Setting.boolSetting("discovery.zen.send_leave_request", true, Property.NodeScope); public final static Setting MASTER_ELECTION_FILTER_CLIENT_SETTING = - Setting.boolSetting("discovery.zen.master_election.filter_client", true, SettingsProperty.ClusterScope); + Setting.boolSetting("discovery.zen.master_election.filter_client", true, Property.NodeScope); public final static Setting MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING = Setting.timeSetting("discovery.zen.master_election.wait_for_joins_timeout", settings -> TimeValue.timeValueMillis(JOIN_TIMEOUT_SETTING.get(settings).millis() / 2).toString(), TimeValue.timeValueMillis(0), - SettingsProperty.ClusterScope); + Property.NodeScope); public final static Setting MASTER_ELECTION_FILTER_DATA_SETTING = - Setting.boolSetting("discovery.zen.master_election.filter_data", false, SettingsProperty.ClusterScope); + Setting.boolSetting("discovery.zen.master_election.filter_data", false, Property.NodeScope); public static final String DISCOVERY_REJOIN_ACTION_NAME = "internal:discovery/zen/rejoin"; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java b/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java index 8a35c6615e7f..a3da8be5a945 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java @@ -26,7 +26,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -42,7 +42,7 @@ import java.util.List; public class ElectMasterService extends AbstractComponent { public static final Setting DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING = - Setting.intSetting("discovery.zen.minimum_master_nodes", -1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.intSetting("discovery.zen.minimum_master_nodes", -1, Property.Dynamic, Property.NodeScope); // This is the minimum version a master needs to be on, otherwise it gets ignored // This is based on the minimum compatible version of the current version this node is on diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java b/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java index 6fc575d51cdf..1cfd46634a52 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java @@ -22,7 +22,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.threadpool.ThreadPool; @@ -38,15 +38,15 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; public abstract class FaultDetection extends AbstractComponent { public static final Setting CONNECT_ON_NETWORK_DISCONNECT_SETTING = - Setting.boolSetting("discovery.zen.fd.connect_on_network_disconnect", false, SettingsProperty.ClusterScope); + Setting.boolSetting("discovery.zen.fd.connect_on_network_disconnect", false, Property.NodeScope); public static final Setting PING_INTERVAL_SETTING = - Setting.positiveTimeSetting("discovery.zen.fd.ping_interval", timeValueSeconds(1), SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("discovery.zen.fd.ping_interval", timeValueSeconds(1), Property.NodeScope); public static final Setting PING_TIMEOUT_SETTING = - Setting.timeSetting("discovery.zen.fd.ping_timeout", timeValueSeconds(30), SettingsProperty.ClusterScope); + Setting.timeSetting("discovery.zen.fd.ping_timeout", timeValueSeconds(30), Property.NodeScope); public static final Setting PING_RETRIES_SETTING = - Setting.intSetting("discovery.zen.fd.ping_retries", 3, SettingsProperty.ClusterScope); + Setting.intSetting("discovery.zen.fd.ping_retries", 3, Property.NodeScope); public static final Setting REGISTER_CONNECTION_LISTENER_SETTING = - Setting.boolSetting("discovery.zen.fd.register_connection_listener", true, SettingsProperty.ClusterScope); + Setting.boolSetting("discovery.zen.fd.register_connection_listener", true, Property.NodeScope); protected final ThreadPool threadPool; protected final ClusterName clusterName; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java index 35e5688f2aa7..0e9b81ad1fcf 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java @@ -32,7 +32,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; @@ -89,9 +89,9 @@ public class UnicastZenPing extends AbstractLifecycleComponent implemen public static final String ACTION_NAME = "internal:discovery/zen/unicast"; public static final Setting> DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING = Setting.listSetting("discovery.zen.ping.unicast.hosts", Collections.emptyList(), Function.identity(), - SettingsProperty.ClusterScope); + Property.NodeScope); public static final Setting DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING = - Setting.intSetting("discovery.zen.ping.unicast.concurrent_connects", 10, 0, SettingsProperty.ClusterScope); + Setting.intSetting("discovery.zen.ping.unicast.concurrent_connects", 10, 0, Property.NodeScope); // these limits are per-address public static final int LIMIT_FOREIGN_PORTS_COUNT = 1; diff --git a/core/src/main/java/org/elasticsearch/env/Environment.java b/core/src/main/java/org/elasticsearch/env/Environment.java index 0e0ab1ace200..e022ce6ad2f2 100644 --- a/core/src/main/java/org/elasticsearch/env/Environment.java +++ b/core/src/main/java/org/elasticsearch/env/Environment.java @@ -23,7 +23,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import java.io.IOException; @@ -47,17 +47,17 @@ import static org.elasticsearch.common.Strings.cleanPath; // TODO: move PathUtils to be package-private here instead of // public+forbidden api! public class Environment { - public static final Setting PATH_HOME_SETTING = Setting.simpleString("path.home", SettingsProperty.ClusterScope); - public static final Setting PATH_CONF_SETTING = Setting.simpleString("path.conf", SettingsProperty.ClusterScope); - public static final Setting PATH_SCRIPTS_SETTING = Setting.simpleString("path.scripts", SettingsProperty.ClusterScope); + public static final Setting PATH_HOME_SETTING = Setting.simpleString("path.home", Property.NodeScope); + public static final Setting PATH_CONF_SETTING = Setting.simpleString("path.conf", Property.NodeScope); + public static final Setting PATH_SCRIPTS_SETTING = Setting.simpleString("path.scripts", Property.NodeScope); public static final Setting> PATH_DATA_SETTING = - Setting.listSetting("path.data", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); - public static final Setting PATH_LOGS_SETTING = Setting.simpleString("path.logs", SettingsProperty.ClusterScope); - public static final Setting PATH_PLUGINS_SETTING = Setting.simpleString("path.plugins", SettingsProperty.ClusterScope); + Setting.listSetting("path.data", Collections.emptyList(), Function.identity(), Property.NodeScope); + public static final Setting PATH_LOGS_SETTING = Setting.simpleString("path.logs", Property.NodeScope); + public static final Setting PATH_PLUGINS_SETTING = Setting.simpleString("path.plugins", Property.NodeScope); public static final Setting> PATH_REPO_SETTING = - Setting.listSetting("path.repo", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); - public static final Setting PATH_SHARED_DATA_SETTING = Setting.simpleString("path.shared_data", SettingsProperty.ClusterScope); - public static final Setting PIDFILE_SETTING = Setting.simpleString("pidfile", SettingsProperty.ClusterScope); + Setting.listSetting("path.repo", Collections.emptyList(), Function.identity(), Property.NodeScope); + public static final Setting PATH_SHARED_DATA_SETTING = Setting.simpleString("path.shared_data", Property.NodeScope); + public static final Setting PIDFILE_SETTING = Setting.simpleString("pidfile", Property.NodeScope); private final Settings settings; diff --git a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java index e93a835db366..b4b69e6ca1b3 100644 --- a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -36,7 +36,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -49,7 +49,6 @@ import org.elasticsearch.index.store.FsDirectoryService; import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.monitor.fs.FsProbe; import org.elasticsearch.monitor.jvm.JvmInfo; -import org.elasticsearch.monitor.process.ProcessProbe; import java.io.Closeable; import java.io.IOException; @@ -138,19 +137,19 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl * Maximum number of data nodes that should run in an environment. */ public static final Setting MAX_LOCAL_STORAGE_NODES_SETTING = Setting.intSetting("node.max_local_storage_nodes", 50, 1, - SettingsProperty.ClusterScope); + Property.NodeScope); /** * If true automatically append node id to custom data paths. */ public static final Setting ADD_NODE_ID_TO_CUSTOM_PATH = - Setting.boolSetting("node.add_id_to_custom_path", true, SettingsProperty.ClusterScope); + Setting.boolSetting("node.add_id_to_custom_path", true, Property.NodeScope); /** * If true the [verbose] SegmentInfos.infoStream logging is sent to System.out. */ public static final Setting ENABLE_LUCENE_SEGMENT_INFOS_TRACE_SETTING = - Setting.boolSetting("node.enable_lucene_segment_infos_trace", false, SettingsProperty.ClusterScope); + Setting.boolSetting("node.enable_lucene_segment_infos_trace", false, Property.NodeScope); public static final String NODES_FOLDER = "nodes"; public static final String INDICES_FOLDER = "indices"; @@ -225,7 +224,7 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl maybeLogPathDetails(); maybeLogHeapDetails(); - + applySegmentInfosTrace(settings); assertCanWrite(); success = true; diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java index 2f7eeac6b0b4..1a5424d0f073 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java @@ -37,7 +37,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.Discovery; @@ -53,19 +53,19 @@ import java.util.concurrent.atomic.AtomicBoolean; public class GatewayService extends AbstractLifecycleComponent implements ClusterStateListener { public static final Setting EXPECTED_NODES_SETTING = - Setting.intSetting("gateway.expected_nodes", -1, -1, SettingsProperty.ClusterScope); + Setting.intSetting("gateway.expected_nodes", -1, -1, Property.NodeScope); public static final Setting EXPECTED_DATA_NODES_SETTING = - Setting.intSetting("gateway.expected_data_nodes", -1, -1, SettingsProperty.ClusterScope); + Setting.intSetting("gateway.expected_data_nodes", -1, -1, Property.NodeScope); public static final Setting EXPECTED_MASTER_NODES_SETTING = - Setting.intSetting("gateway.expected_master_nodes", -1, -1, SettingsProperty.ClusterScope); + Setting.intSetting("gateway.expected_master_nodes", -1, -1, Property.NodeScope); public static final Setting RECOVER_AFTER_TIME_SETTING = - Setting.positiveTimeSetting("gateway.recover_after_time", TimeValue.timeValueMillis(0), SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("gateway.recover_after_time", TimeValue.timeValueMillis(0), Property.NodeScope); public static final Setting RECOVER_AFTER_NODES_SETTING = - Setting.intSetting("gateway.recover_after_nodes", -1, -1, SettingsProperty.ClusterScope); + Setting.intSetting("gateway.recover_after_nodes", -1, -1, Property.NodeScope); public static final Setting RECOVER_AFTER_DATA_NODES_SETTING = - Setting.intSetting("gateway.recover_after_data_nodes", -1, -1, SettingsProperty.ClusterScope); + Setting.intSetting("gateway.recover_after_data_nodes", -1, -1, Property.NodeScope); public static final Setting RECOVER_AFTER_MASTER_NODES_SETTING = - Setting.intSetting("gateway.recover_after_master_nodes", 0, 0, SettingsProperty.ClusterScope); + Setting.intSetting("gateway.recover_after_master_nodes", 0, 0, Property.NodeScope); public static final ClusterBlock STATE_NOT_RECOVERED_BLOCK = new ClusterBlock(1, "state not recovered / initialized", true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL); diff --git a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java index 6a37a0a7d253..5f6e50d6fc90 100644 --- a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java @@ -31,7 +31,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.TransportNodesListGatewayStartedShards.NodeGatewayStartedShards; import org.elasticsearch.index.shard.ShardStateMetaData; @@ -70,11 +70,11 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { public static final Setting NODE_INITIAL_SHARDS_SETTING = new Setting<>("gateway.initial_shards", (settings) -> settings.get("gateway.local.initial_shards", "quorum"), INITIAL_SHARDS_PARSER, - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); @Deprecated public static final Setting INDEX_RECOVERY_INITIAL_SHARDS_SETTING = new Setting<>("index.recovery.initial_shards", (settings) -> NODE_INITIAL_SHARDS_SETTING.get(settings) , INITIAL_SHARDS_PARSER, - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); public PrimaryShardAllocator(Settings settings) { super(settings); diff --git a/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java b/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java index b1b29eae60ce..48af1c839655 100644 --- a/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java +++ b/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java @@ -20,7 +20,7 @@ package org.elasticsearch.http; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.transport.PortsRange; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -34,50 +34,50 @@ import static org.elasticsearch.common.settings.Setting.listSetting; public final class HttpTransportSettings { public static final Setting SETTING_CORS_ENABLED = - Setting.boolSetting("http.cors.enabled", false, SettingsProperty.ClusterScope); + Setting.boolSetting("http.cors.enabled", false, Property.NodeScope); public static final Setting SETTING_CORS_ALLOW_ORIGIN = - new Setting("http.cors.allow-origin", "", (value) -> value, SettingsProperty.ClusterScope); + new Setting("http.cors.allow-origin", "", (value) -> value, Property.NodeScope); public static final Setting SETTING_CORS_MAX_AGE = - Setting.intSetting("http.cors.max-age", 1728000, SettingsProperty.ClusterScope); + Setting.intSetting("http.cors.max-age", 1728000, Property.NodeScope); public static final Setting SETTING_CORS_ALLOW_METHODS = - new Setting("http.cors.allow-methods", "OPTIONS, HEAD, GET, POST, PUT, DELETE", (value) -> value, SettingsProperty.ClusterScope); + new Setting("http.cors.allow-methods", "OPTIONS, HEAD, GET, POST, PUT, DELETE", (value) -> value, Property.NodeScope); public static final Setting SETTING_CORS_ALLOW_HEADERS = - new Setting("http.cors.allow-headers", "X-Requested-With, Content-Type, Content-Length", (value) -> value, SettingsProperty.ClusterScope); + new Setting("http.cors.allow-headers", "X-Requested-With, Content-Type, Content-Length", (value) -> value, Property.NodeScope); public static final Setting SETTING_CORS_ALLOW_CREDENTIALS = - Setting.boolSetting("http.cors.allow-credentials", false, SettingsProperty.ClusterScope); + Setting.boolSetting("http.cors.allow-credentials", false, Property.NodeScope); public static final Setting SETTING_PIPELINING = - Setting.boolSetting("http.pipelining", true, SettingsProperty.ClusterScope); + Setting.boolSetting("http.pipelining", true, Property.NodeScope); public static final Setting SETTING_PIPELINING_MAX_EVENTS = - Setting.intSetting("http.pipelining.max_events", 10000, SettingsProperty.ClusterScope); + Setting.intSetting("http.pipelining.max_events", 10000, Property.NodeScope); public static final Setting SETTING_HTTP_COMPRESSION = - Setting.boolSetting("http.compression", false, SettingsProperty.ClusterScope); + Setting.boolSetting("http.compression", false, Property.NodeScope); public static final Setting SETTING_HTTP_COMPRESSION_LEVEL = - Setting.intSetting("http.compression_level", 6, SettingsProperty.ClusterScope); + Setting.intSetting("http.compression_level", 6, Property.NodeScope); public static final Setting> SETTING_HTTP_HOST = - listSetting("http.host", emptyList(), Function.identity(), SettingsProperty.ClusterScope); + listSetting("http.host", emptyList(), Function.identity(), Property.NodeScope); public static final Setting> SETTING_HTTP_PUBLISH_HOST = - listSetting("http.publish_host", SETTING_HTTP_HOST, Function.identity(), SettingsProperty.ClusterScope); + listSetting("http.publish_host", SETTING_HTTP_HOST, Function.identity(), Property.NodeScope); public static final Setting> SETTING_HTTP_BIND_HOST = - listSetting("http.bind_host", SETTING_HTTP_HOST, Function.identity(), SettingsProperty.ClusterScope); + listSetting("http.bind_host", SETTING_HTTP_HOST, Function.identity(), Property.NodeScope); public static final Setting SETTING_HTTP_PORT = - new Setting("http.port", "9200-9300", PortsRange::new, SettingsProperty.ClusterScope); + new Setting("http.port", "9200-9300", PortsRange::new, Property.NodeScope); public static final Setting SETTING_HTTP_PUBLISH_PORT = - Setting.intSetting("http.publish_port", -1, -1, SettingsProperty.ClusterScope); + Setting.intSetting("http.publish_port", -1, -1, Property.NodeScope); public static final Setting SETTING_HTTP_DETAILED_ERRORS_ENABLED = - Setting.boolSetting("http.detailed_errors.enabled", true, SettingsProperty.ClusterScope); + Setting.boolSetting("http.detailed_errors.enabled", true, Property.NodeScope); public static final Setting SETTING_HTTP_MAX_CONTENT_LENGTH = - Setting.byteSizeSetting("http.max_content_length", new ByteSizeValue(100, ByteSizeUnit.MB), SettingsProperty.ClusterScope); + Setting.byteSizeSetting("http.max_content_length", new ByteSizeValue(100, ByteSizeUnit.MB), Property.NodeScope); public static final Setting SETTING_HTTP_MAX_CHUNK_SIZE = - Setting.byteSizeSetting("http.max_chunk_size", new ByteSizeValue(8, ByteSizeUnit.KB), SettingsProperty.ClusterScope); + Setting.byteSizeSetting("http.max_chunk_size", new ByteSizeValue(8, ByteSizeUnit.KB), Property.NodeScope); public static final Setting SETTING_HTTP_MAX_HEADER_SIZE = - Setting.byteSizeSetting("http.max_header_size", new ByteSizeValue(8, ByteSizeUnit.KB), SettingsProperty.ClusterScope); + Setting.byteSizeSetting("http.max_header_size", new ByteSizeValue(8, ByteSizeUnit.KB), Property.NodeScope); public static final Setting SETTING_HTTP_MAX_INITIAL_LINE_LENGTH = - Setting.byteSizeSetting("http.max_initial_line_length", new ByteSizeValue(4, ByteSizeUnit.KB), SettingsProperty.ClusterScope); + Setting.byteSizeSetting("http.max_initial_line_length", new ByteSizeValue(4, ByteSizeUnit.KB), Property.NodeScope); // don't reset cookies by default, since I don't think we really need to // note, parsing cookies was fixed in netty 3.5.1 regarding stack allocation, but still, currently, we don't need cookies public static final Setting SETTING_HTTP_RESET_COOKIES = - Setting.boolSetting("http.reset_cookies", false, SettingsProperty.ClusterScope); + Setting.boolSetting("http.reset_cookies", false, Property.NodeScope); private HttpTransportSettings() { } diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java index b072afc486d0..525fe96e07fb 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java +++ b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java @@ -29,7 +29,7 @@ import org.elasticsearch.common.netty.OpenChannelsHandler; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.InetSocketTransportAddress; @@ -120,29 +120,29 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent SETTING_HTTP_NETTY_MAX_CUMULATION_BUFFER_CAPACITY = Setting.byteSizeSetting("http.netty.max_cumulation_buffer_capacity", new ByteSizeValue(-1), - SettingsProperty.ClusterScope); + Property.NodeScope); public static Setting SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = - Setting.intSetting("http.netty.max_composite_buffer_components", -1, SettingsProperty.ClusterScope); + Setting.intSetting("http.netty.max_composite_buffer_components", -1, Property.NodeScope); public static final Setting SETTING_HTTP_WORKER_COUNT = new Setting<>("http.netty.worker_count", (s) -> Integer.toString(EsExecutors.boundedNumberOfProcessors(s) * 2), - (s) -> Setting.parseInt(s, 1, "http.netty.worker_count"), SettingsProperty.ClusterScope); + (s) -> Setting.parseInt(s, 1, "http.netty.worker_count"), Property.NodeScope); public static final Setting SETTING_HTTP_TCP_NO_DELAY = - boolSetting("http.tcp_no_delay", NetworkService.TcpSettings.TCP_NO_DELAY, SettingsProperty.ClusterScope); + boolSetting("http.tcp_no_delay", NetworkService.TcpSettings.TCP_NO_DELAY, Property.NodeScope); public static final Setting SETTING_HTTP_TCP_KEEP_ALIVE = - boolSetting("http.tcp.keep_alive", NetworkService.TcpSettings.TCP_KEEP_ALIVE, SettingsProperty.ClusterScope); + boolSetting("http.tcp.keep_alive", NetworkService.TcpSettings.TCP_KEEP_ALIVE, Property.NodeScope); public static final Setting SETTING_HTTP_TCP_BLOCKING_SERVER = - boolSetting("http.tcp.blocking_server", NetworkService.TcpSettings.TCP_BLOCKING_SERVER, SettingsProperty.ClusterScope); + boolSetting("http.tcp.blocking_server", NetworkService.TcpSettings.TCP_BLOCKING_SERVER, Property.NodeScope); public static final Setting SETTING_HTTP_TCP_REUSE_ADDRESS = - boolSetting("http.tcp.reuse_address", NetworkService.TcpSettings.TCP_REUSE_ADDRESS, SettingsProperty.ClusterScope); + boolSetting("http.tcp.reuse_address", NetworkService.TcpSettings.TCP_REUSE_ADDRESS, Property.NodeScope); public static final Setting SETTING_HTTP_TCP_SEND_BUFFER_SIZE = Setting.byteSizeSetting("http.tcp.send_buffer_size", NetworkService.TcpSettings.TCP_SEND_BUFFER_SIZE, - SettingsProperty.ClusterScope); + Property.NodeScope); public static final Setting SETTING_HTTP_TCP_RECEIVE_BUFFER_SIZE = Setting.byteSizeSetting("http.tcp.receive_buffer_size", NetworkService.TcpSettings.TCP_RECEIVE_BUFFER_SIZE, - SettingsProperty.ClusterScope); + Property.NodeScope); public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE = Setting.byteSizeSetting("transport.netty.receive_predictor_size", settings -> { @@ -154,11 +154,11 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MIN = - byteSizeSetting("http.netty.receive_predictor_min", SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE, SettingsProperty.ClusterScope); + byteSizeSetting("http.netty.receive_predictor_min", SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE, Property.NodeScope); public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MAX = - byteSizeSetting("http.netty.receive_predictor_max", SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE, SettingsProperty.ClusterScope); + byteSizeSetting("http.netty.receive_predictor_max", SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE, Property.NodeScope); protected final NetworkService networkService; diff --git a/core/src/main/java/org/elasticsearch/index/IndexModule.java b/core/src/main/java/org/elasticsearch/index/IndexModule.java index 5c1217a863e1..b6120bd9d780 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/core/src/main/java/org/elasticsearch/index/IndexModule.java @@ -22,7 +22,7 @@ package org.elasticsearch.index; import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.analysis.AnalysisRegistry; @@ -67,15 +67,15 @@ import java.util.function.Function; public final class IndexModule { public static final Setting INDEX_STORE_TYPE_SETTING = - new Setting<>("index.store.type", "", Function.identity(), SettingsProperty.IndexScope); + new Setting<>("index.store.type", "", Function.identity(), Property.IndexScope); public static final String SIMILARITY_SETTINGS_PREFIX = "index.similarity"; public static final String INDEX_QUERY_CACHE = "index"; public static final String NONE_QUERY_CACHE = "none"; public static final Setting INDEX_QUERY_CACHE_TYPE_SETTING = - new Setting<>("index.queries.cache.type", INDEX_QUERY_CACHE, Function.identity(), SettingsProperty.IndexScope); + new Setting<>("index.queries.cache.type", INDEX_QUERY_CACHE, Function.identity(), Property.IndexScope); // for test purposes only public static final Setting INDEX_QUERY_CACHE_EVERYTHING_SETTING = - Setting.boolSetting("index.queries.cache.everything", false, SettingsProperty.IndexScope); + Setting.boolSetting("index.queries.cache.everything", false, Property.IndexScope); private final IndexSettings indexSettings; private final IndexStoreConfig indexStoreConfig; private final AnalysisRegistry analysisRegistry; diff --git a/core/src/main/java/org/elasticsearch/index/IndexSettings.java b/core/src/main/java/org/elasticsearch/index/IndexSettings.java index bb859f04652e..b996e70b1e52 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/core/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -51,25 +51,25 @@ import java.util.function.Predicate; public final class IndexSettings { public static final Setting DEFAULT_FIELD_SETTING = - new Setting<>("index.query.default_field", AllFieldMapper.NAME, Function.identity(), SettingsProperty.IndexScope); + new Setting<>("index.query.default_field", AllFieldMapper.NAME, Function.identity(), Property.IndexScope); public static final Setting QUERY_STRING_LENIENT_SETTING = - Setting.boolSetting("index.query_string.lenient", false, SettingsProperty.IndexScope); + Setting.boolSetting("index.query_string.lenient", false, Property.IndexScope); public static final Setting QUERY_STRING_ANALYZE_WILDCARD = - Setting.boolSetting("indices.query.query_string.analyze_wildcard", false, SettingsProperty.ClusterScope); + Setting.boolSetting("indices.query.query_string.analyze_wildcard", false, Property.NodeScope); public static final Setting QUERY_STRING_ALLOW_LEADING_WILDCARD = - Setting.boolSetting("indices.query.query_string.allowLeadingWildcard", true, SettingsProperty.ClusterScope); + Setting.boolSetting("indices.query.query_string.allowLeadingWildcard", true, Property.NodeScope); public static final Setting ALLOW_UNMAPPED = - Setting.boolSetting("index.query.parse.allow_unmapped_fields", true, SettingsProperty.IndexScope); + Setting.boolSetting("index.query.parse.allow_unmapped_fields", true, Property.IndexScope); public static final Setting INDEX_TRANSLOG_SYNC_INTERVAL_SETTING = Setting.timeSetting("index.translog.sync_interval", TimeValue.timeValueSeconds(5), TimeValue.timeValueMillis(100), - SettingsProperty.IndexScope); + Property.IndexScope); public static final Setting INDEX_TRANSLOG_DURABILITY_SETTING = new Setting<>("index.translog.durability", Translog.Durability.REQUEST.name(), - (value) -> Translog.Durability.valueOf(value.toUpperCase(Locale.ROOT)), SettingsProperty.Dynamic, SettingsProperty.IndexScope); + (value) -> Translog.Durability.valueOf(value.toUpperCase(Locale.ROOT)), Property.Dynamic, Property.IndexScope); public static final Setting INDEX_WARMER_ENABLED_SETTING = - Setting.boolSetting("index.warmer.enabled", true, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting.boolSetting("index.warmer.enabled", true, Property.Dynamic, Property.IndexScope); public static final Setting INDEX_TTL_DISABLE_PURGE_SETTING = - Setting.boolSetting("index.ttl.disable_purge", false, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting.boolSetting("index.ttl.disable_purge", false, Property.Dynamic, Property.IndexScope); public static final Setting INDEX_CHECK_ON_STARTUP = new Setting<>("index.shard.check_on_startup", "false", (s) -> { switch(s) { case "false": @@ -80,7 +80,7 @@ public final class IndexSettings { default: throw new IllegalArgumentException("unknown value for [index.shard.check_on_startup] must be one of [true, false, fix, checksum] but was: " + s); } - }, SettingsProperty.IndexScope); + }, Property.IndexScope); /** * Index setting describing the maximum value of from + size on a query. @@ -91,14 +91,14 @@ public final class IndexSettings { * safely. */ public static final Setting MAX_RESULT_WINDOW_SETTING = - Setting.intSetting("index.max_result_window", 10000, 1, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting.intSetting("index.max_result_window", 10000, 1, Property.Dynamic, Property.IndexScope); public static final TimeValue DEFAULT_REFRESH_INTERVAL = new TimeValue(1, TimeUnit.SECONDS); public static final Setting INDEX_REFRESH_INTERVAL_SETTING = Setting.timeSetting("index.refresh_interval", DEFAULT_REFRESH_INTERVAL, new TimeValue(-1, TimeUnit.MILLISECONDS), - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); public static final Setting INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING = - Setting.byteSizeSetting("index.translog.flush_threshold_size", new ByteSizeValue(512, ByteSizeUnit.MB), SettingsProperty.Dynamic, - SettingsProperty.IndexScope); + Setting.byteSizeSetting("index.translog.flush_threshold_size", new ByteSizeValue(512, ByteSizeUnit.MB), Property.Dynamic, + Property.IndexScope); /** @@ -107,8 +107,8 @@ public final class IndexSettings { */ public static final TimeValue DEFAULT_GC_DELETES = TimeValue.timeValueSeconds(60); public static final Setting INDEX_GC_DELETES_SETTING = - Setting.timeSetting("index.gc_deletes", DEFAULT_GC_DELETES, new TimeValue(-1, TimeUnit.MILLISECONDS), SettingsProperty.Dynamic, - SettingsProperty.IndexScope); + Setting.timeSetting("index.gc_deletes", DEFAULT_GC_DELETES, new TimeValue(-1, TimeUnit.MILLISECONDS), Property.Dynamic, + Property.IndexScope); private final Index index; private final Version version; diff --git a/core/src/main/java/org/elasticsearch/index/IndexWarmer.java b/core/src/main/java/org/elasticsearch/index/IndexWarmer.java index b0b06e7ec67a..ed1814681ac6 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexWarmer.java +++ b/core/src/main/java/org/elasticsearch/index/IndexWarmer.java @@ -27,7 +27,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.engine.Engine; @@ -57,7 +57,7 @@ public final class IndexWarmer extends AbstractComponent { public static final Setting INDEX_NORMS_LOADING_SETTING = new Setting<>("index.norms.loading", MappedFieldType.Loading.LAZY.toString(), (s) -> MappedFieldType.Loading.parse(s, MappedFieldType.Loading.LAZY), - SettingsProperty.IndexScope); + Property.IndexScope); private final List listeners; IndexWarmer(Settings settings, ThreadPool threadPool, Listener... listeners) { diff --git a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java index eff27e6e04d6..21596b3eb001 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java +++ b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java @@ -24,7 +24,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.engine.Engine; @@ -57,21 +57,21 @@ public final class IndexingSlowLog implements IndexingOperationListener { private static final String INDEX_INDEXING_SLOWLOG_PREFIX = "index.indexing.slowlog"; public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.warn", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), Property.Dynamic, Property.IndexScope); public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.info", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), Property.Dynamic, Property.IndexScope); public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.debug", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), Property.Dynamic, Property.IndexScope); public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.trace", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), Property.Dynamic, Property.IndexScope); public static final Setting INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING = - Setting.boolSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".reformat", true, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting.boolSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".reformat", true, Property.Dynamic, Property.IndexScope); public static final Setting INDEX_INDEXING_SLOWLOG_LEVEL_SETTING = - new Setting<>(INDEX_INDEXING_SLOWLOG_PREFIX +".level", SlowLogLevel.TRACE.name(), SlowLogLevel::parse, SettingsProperty.Dynamic, - SettingsProperty.IndexScope); + new Setting<>(INDEX_INDEXING_SLOWLOG_PREFIX +".level", SlowLogLevel.TRACE.name(), SlowLogLevel::parse, Property.Dynamic, + Property.IndexScope); /** * Reads how much of the source to log. The user can specify any value they * like and numbers are interpreted the maximum number of characters to log @@ -84,7 +84,7 @@ public final class IndexingSlowLog implements IndexingOperationListener { } catch (NumberFormatException e) { return Booleans.parseBoolean(value, true) ? Integer.MAX_VALUE : 0; } - }, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + }, Property.Dynamic, Property.IndexScope); IndexingSlowLog(IndexSettings indexSettings) { this(indexSettings, Loggers.getLogger(INDEX_INDEXING_SLOWLOG_PREFIX + ".index"), diff --git a/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java b/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java index 35ead01981c8..c8d82eae8885 100644 --- a/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java +++ b/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java @@ -25,7 +25,7 @@ import org.apache.lucene.index.TieredMergePolicy; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -129,29 +129,29 @@ public final class MergePolicyConfig { public static final double DEFAULT_RECLAIM_DELETES_WEIGHT = 2.0d; public static final Setting INDEX_COMPOUND_FORMAT_SETTING = new Setting<>("index.compound_format", Double.toString(TieredMergePolicy.DEFAULT_NO_CFS_RATIO), MergePolicyConfig::parseNoCFSRatio, - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); public static final Setting INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING = Setting.doubleSetting("index.merge.policy.expunge_deletes_allowed", DEFAULT_EXPUNGE_DELETES_ALLOWED, 0.0d, - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); public static final Setting INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING = Setting.byteSizeSetting("index.merge.policy.floor_segment", DEFAULT_FLOOR_SEGMENT, - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); public static final Setting INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING = Setting.intSetting("index.merge.policy.max_merge_at_once", DEFAULT_MAX_MERGE_AT_ONCE, 2, - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); public static final Setting INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING = Setting.intSetting("index.merge.policy.max_merge_at_once_explicit", DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT, 2, - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); public static final Setting INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING = Setting.byteSizeSetting("index.merge.policy.max_merged_segment", DEFAULT_MAX_MERGED_SEGMENT, - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); public static final Setting INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING = Setting.doubleSetting("index.merge.policy.segments_per_tier", DEFAULT_SEGMENTS_PER_TIER, 2.0d, - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); public static final Setting INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING = Setting.doubleSetting("index.merge.policy.reclaim_deletes_weight", DEFAULT_RECLAIM_DELETES_WEIGHT, 0.0d, - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); public static final String INDEX_MERGE_ENABLED = "index.merge.enabled"; // don't convert to Setting<> and register... we only set this in tests and register via a plugin diff --git a/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java b/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java index 1cfc5c82a70b..2eb43a50ee47 100644 --- a/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java +++ b/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java @@ -21,7 +21,7 @@ package org.elasticsearch.index; import org.apache.lucene.index.ConcurrentMergeScheduler; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.util.concurrent.EsExecutors; /** @@ -55,14 +55,14 @@ public final class MergeSchedulerConfig { public static final Setting MAX_THREAD_COUNT_SETTING = new Setting<>("index.merge.scheduler.max_thread_count", (s) -> Integer.toString(Math.max(1, Math.min(4, EsExecutors.boundedNumberOfProcessors(s) / 2))), - (s) -> Setting.parseInt(s, 1, "index.merge.scheduler.max_thread_count"), SettingsProperty.Dynamic, - SettingsProperty.IndexScope); + (s) -> Setting.parseInt(s, 1, "index.merge.scheduler.max_thread_count"), Property.Dynamic, + Property.IndexScope); public static final Setting MAX_MERGE_COUNT_SETTING = new Setting<>("index.merge.scheduler.max_merge_count", (s) -> Integer.toString(MAX_THREAD_COUNT_SETTING.get(s) + 5), - (s) -> Setting.parseInt(s, 1, "index.merge.scheduler.max_merge_count"), SettingsProperty.Dynamic, SettingsProperty.IndexScope); + (s) -> Setting.parseInt(s, 1, "index.merge.scheduler.max_merge_count"), Property.Dynamic, Property.IndexScope); public static final Setting AUTO_THROTTLE_SETTING = - Setting.boolSetting("index.merge.scheduler.auto_throttle", true, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting.boolSetting("index.merge.scheduler.auto_throttle", true, Property.Dynamic, Property.IndexScope); private volatile boolean autoThrottle; private volatile int maxThreadCount; diff --git a/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java b/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java index 2770f7e6e084..cfa779d64aa3 100644 --- a/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java +++ b/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.search.internal.SearchContext; @@ -53,33 +53,33 @@ public final class SearchSlowLog { private static final String INDEX_SEARCH_SLOWLOG_PREFIX = "index.search.slowlog"; public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.warn", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), Property.Dynamic, Property.IndexScope); public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.info", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), Property.Dynamic, Property.IndexScope); public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.debug", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), Property.Dynamic, Property.IndexScope); public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.trace", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), Property.Dynamic, Property.IndexScope); public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.warn", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), Property.Dynamic, Property.IndexScope); public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.info", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), Property.Dynamic, Property.IndexScope); public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.debug", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), Property.Dynamic, Property.IndexScope); public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.trace", TimeValue.timeValueNanos(-1), - TimeValue.timeValueMillis(-1), SettingsProperty.Dynamic, SettingsProperty.IndexScope); + TimeValue.timeValueMillis(-1), Property.Dynamic, Property.IndexScope); public static final Setting INDEX_SEARCH_SLOWLOG_REFORMAT = - Setting.boolSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".reformat", true, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting.boolSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".reformat", true, Property.Dynamic, Property.IndexScope); public static final Setting INDEX_SEARCH_SLOWLOG_LEVEL = - new Setting<>(INDEX_SEARCH_SLOWLOG_PREFIX + ".level", SlowLogLevel.TRACE.name(), SlowLogLevel::parse, SettingsProperty.Dynamic, - SettingsProperty.IndexScope); + new Setting<>(INDEX_SEARCH_SLOWLOG_PREFIX + ".level", SlowLogLevel.TRACE.name(), SlowLogLevel::parse, Property.Dynamic, + Property.IndexScope); public SearchSlowLog(IndexSettings indexSettings) { diff --git a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java index 756d79c2440e..19ec3c8402ec 100644 --- a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +++ b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java @@ -38,7 +38,7 @@ import org.elasticsearch.common.cache.RemovalListener; import org.elasticsearch.common.cache.RemovalNotification; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; @@ -72,7 +72,7 @@ import java.util.concurrent.Executor; public final class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener>, Closeable { public static final Setting INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING = - Setting.boolSetting("index.load_fixed_bitset_filters_eagerly", true, SettingsProperty.IndexScope); + Setting.boolSetting("index.load_fixed_bitset_filters_eagerly", true, Property.IndexScope); private final boolean loadRandomAccessFiltersEagerly; private final Cache> loadedFilters; diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java index e1bcd5bb6984..14a8f043234a 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java +++ b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java @@ -26,7 +26,7 @@ import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.similarities.Similarity; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -40,8 +40,6 @@ import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.IndexingMemoryController; import org.elasticsearch.threadpool.ThreadPool; -import java.util.Set; - /* * Holds all the configuration that is used to create an {@link Engine}. * Once {@link Engine} has been created with this object, changes to this @@ -84,7 +82,7 @@ public final class EngineConfig { } return s; } - }, SettingsProperty.IndexScope); + }, Property.IndexScope); /** if set to true the engine will start even if the translog id in the commit point can not be found */ public static final String INDEX_FORCE_NEW_TRANSLOG = "index.engine.force_new_translog"; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java index 394f965f97cb..a7a76e5e7aac 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java @@ -24,7 +24,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.plain.AbstractGeoPointDVIndexFieldData; @@ -68,7 +68,7 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo default: throw new IllegalArgumentException("failed to parse [" + s + "] must be one of [node,node]"); } - }, SettingsProperty.IndexScope); + }, Property.IndexScope); private static final IndexFieldData.Builder MISSING_DOC_VALUES_BUILDER = (indexProperties, fieldType, cache, breakerService, mapperService1) -> { throw new IllegalStateException("Can't load fielddata on [" + fieldType.name() diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 4a1da492130a..528c8a8ee038 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -29,7 +29,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -51,9 +51,9 @@ import java.util.stream.StreamSupport; public abstract class FieldMapper extends Mapper implements Cloneable { public static final Setting IGNORE_MALFORMED_SETTING = - Setting.boolSetting("index.mapping.ignore_malformed", false, SettingsProperty.IndexScope); + Setting.boolSetting("index.mapping.ignore_malformed", false, Property.IndexScope); public static final Setting COERCE_SETTING = - Setting.boolSetting("index.mapping.coerce", false, SettingsProperty.IndexScope); + Setting.boolSetting("index.mapping.coerce", false, Property.IndexScope); public abstract static class Builder extends Mapper.Builder { protected final MappedFieldType fieldType; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 0acf52b87288..414ea0f7e9c2 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -27,7 +27,7 @@ import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisService; @@ -83,10 +83,10 @@ public class MapperService extends AbstractIndexComponent implements Closeable { public static final String DEFAULT_MAPPING = "_default_"; public static final Setting INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING = - Setting.longSetting("index.mapping.nested_fields.limit", 50L, 0, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting.longSetting("index.mapping.nested_fields.limit", 50L, 0, Property.Dynamic, Property.IndexScope); public static final boolean INDEX_MAPPER_DYNAMIC_DEFAULT = true; public static final Setting INDEX_MAPPER_DYNAMIC_SETTING = - Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT, SettingsProperty.IndexScope); + Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT, Property.IndexScope); private static ObjectHashSet META_FIELDS = ObjectHashSet.from( "_uid", "_id", "_type", "_all", "_parent", "_routing", "_index", "_size", "_timestamp", "_ttl" diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java index 02974157aed1..8a0e4140193b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java @@ -33,7 +33,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -55,7 +55,7 @@ import java.util.List; public abstract class NumberFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { // this is private since it has a different default private static final Setting COERCE_SETTING = - Setting.boolSetting("index.mapping.coerce", true, SettingsProperty.IndexScope); + Setting.boolSetting("index.mapping.coerce", true, Property.IndexScope); public static class Defaults { diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java index 17e54ced15f8..0a0cb9e96d9c 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java @@ -31,14 +31,12 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.fielddata.IndexFieldDataService; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; @@ -63,7 +61,7 @@ import java.util.concurrent.TimeUnit; public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent implements Closeable { public final static Setting INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING = - Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, SettingsProperty.IndexScope); + Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, Property.IndexScope); private final ConcurrentMap percolateQueries = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency(); private final QueryShardContext queryShardContext; diff --git a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java index ac84fd76179e..933fd7845887 100644 --- a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java +++ b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java @@ -36,8 +36,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; @@ -63,7 +62,7 @@ public class FsDirectoryService extends DirectoryService implements StoreRateLim default: throw new IllegalArgumentException("unrecognized [index.store.fs.fs_lock] \"" + s + "\": must be native or simple"); } - }, SettingsProperty.IndexScope); + }, Property.IndexScope); private final CounterMetric rateLimitingTimeInNanos = new CounterMetric(); private final ShardPath path; diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStore.java b/core/src/main/java/org/elasticsearch/index/store/IndexStore.java index be4e69662264..9e01d871765f 100644 --- a/core/src/main/java/org/elasticsearch/index/store/IndexStore.java +++ b/core/src/main/java/org/elasticsearch/index/store/IndexStore.java @@ -21,7 +21,7 @@ package org.elasticsearch.index.store; import org.apache.lucene.store.StoreRateLimiting; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; @@ -32,10 +32,10 @@ import org.elasticsearch.index.shard.ShardPath; public class IndexStore extends AbstractIndexComponent { public static final Setting INDEX_STORE_THROTTLE_TYPE_SETTING = new Setting<>("index.store.throttle.type", "none", IndexRateLimitingType::fromString, - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); public static final Setting INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING = Setting.byteSizeSetting("index.store.throttle.max_bytes_per_sec", new ByteSizeValue(0), - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); protected final IndexStoreConfig indexStoreConfig; private final StoreRateLimiting rateLimiting = new StoreRateLimiting(); diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java index 28a4c32ab75c..12558bb9554f 100644 --- a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java +++ b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java @@ -22,7 +22,7 @@ import org.apache.lucene.store.StoreRateLimiting; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -39,13 +39,13 @@ public class IndexStoreConfig { */ public static final Setting INDICES_STORE_THROTTLE_TYPE_SETTING = new Setting<>("indices.store.throttle.type", StoreRateLimiting.Type.NONE.name(),StoreRateLimiting.Type::fromString, - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); /** * Configures the node / cluster level throttle intensity. The default is 10240 MB */ public static final Setting INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING = Setting.byteSizeSetting("indices.store.throttle.max_bytes_per_sec", new ByteSizeValue(0), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); private volatile StoreRateLimiting.Type rateLimitingType; private volatile ByteSizeValue rateLimitingThrottle; private final StoreRateLimiting rateLimiting = new StoreRateLimiting(); diff --git a/core/src/main/java/org/elasticsearch/index/store/Store.java b/core/src/main/java/org/elasticsearch/index/store/Store.java index f467d25c5941..772edb1d1b2e 100644 --- a/core/src/main/java/org/elasticsearch/index/store/Store.java +++ b/core/src/main/java/org/elasticsearch/index/store/Store.java @@ -49,7 +49,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -61,7 +60,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.Callback; @@ -91,7 +90,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantReadWriteLock; -import java.util.zip.Adler32; import java.util.zip.CRC32; import java.util.zip.Checksum; @@ -126,7 +124,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref static final int VERSION = VERSION_WRITE_THROWABLE; static final String CORRUPTED = "corrupted_"; public static final Setting INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING = - Setting.timeSetting("index.store.stats_refresh_interval", TimeValue.timeValueSeconds(10), SettingsProperty.IndexScope); + Setting.timeSetting("index.store.stats_refresh_interval", TimeValue.timeValueSeconds(10), Property.IndexScope); private final AtomicBoolean isClosed = new AtomicBoolean(false); private final StoreDirectory directory; diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java b/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java index 9dbb673fa5bb..bd01e7f0183f 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java @@ -32,7 +32,7 @@ import org.apache.lucene.search.Weight; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.lucene.ShardCoreKeyMap; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.cache.query.QueryCacheStats; @@ -49,9 +49,9 @@ import java.util.concurrent.ConcurrentHashMap; public class IndicesQueryCache extends AbstractComponent implements QueryCache, Closeable { public static final Setting INDICES_CACHE_QUERY_SIZE_SETTING = Setting.byteSizeSetting( - "indices.queries.cache.size", "10%", SettingsProperty.ClusterScope); + "indices.queries.cache.size", "10%", Property.NodeScope); public static final Setting INDICES_CACHE_QUERY_COUNT_SETTING = Setting.intSetting( - "indices.queries.cache.count", 10000, 1, SettingsProperty.ClusterScope); + "indices.queries.cache.count", 10000, 1, Property.NodeScope); private final LRUQueryCache cache; private final ShardCoreKeyMap shardKeyMap = new ShardCoreKeyMap(); diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java b/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java index 02aa09f138cd..0e7b5dd9eb0c 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java @@ -34,7 +34,7 @@ import org.elasticsearch.common.cache.RemovalNotification; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -70,11 +70,11 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo * since we are checking on the cluster state IndexMetaData always. */ public static final Setting INDEX_CACHE_REQUEST_ENABLED_SETTING = - Setting.boolSetting("index.requests.cache.enable", false, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting.boolSetting("index.requests.cache.enable", false, Property.Dynamic, Property.IndexScope); public static final Setting INDICES_CACHE_QUERY_SIZE = - Setting.byteSizeSetting("indices.requests.cache.size", "1%", SettingsProperty.ClusterScope); + Setting.byteSizeSetting("indices.requests.cache.size", "1%", Property.NodeScope); public static final Setting INDICES_CACHE_QUERY_EXPIRE = - Setting.positiveTimeSetting("indices.requests.cache.expire", new TimeValue(0), SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("indices.requests.cache.expire", new TimeValue(0), Property.NodeScope); private final ConcurrentMap registeredClosedListeners = ConcurrentCollections.newConcurrentMap(); private final Set keysToClean = ConcurrentCollections.newConcurrentSet(); diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index c37a5d5cf6d4..4b6c954a71a3 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -47,7 +47,7 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -117,7 +117,7 @@ public class IndicesService extends AbstractLifecycleComponent i public static final String INDICES_SHARDS_CLOSED_TIMEOUT = "indices.shards_closed_timeout"; public static final Setting INDICES_CACHE_CLEAN_INTERVAL_SETTING = - Setting.positiveTimeSetting("indices.cache.cleanup_interval", TimeValue.timeValueMinutes(1), SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("indices.cache.cleanup_interval", TimeValue.timeValueMinutes(1), Property.NodeScope); private final PluginsService pluginsService; private final NodeEnvironment nodeEnv; private final TimeValue shardsClosedTimeout; diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index b1189fbd2862..06d5b2164e03 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -24,7 +24,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -73,11 +73,11 @@ import java.util.function.Function; public class HunspellService extends AbstractComponent { public final static Setting HUNSPELL_LAZY_LOAD = - Setting.boolSetting("indices.analysis.hunspell.dictionary.lazy", Boolean.FALSE, SettingsProperty.ClusterScope); + Setting.boolSetting("indices.analysis.hunspell.dictionary.lazy", Boolean.FALSE, Property.NodeScope); public final static Setting HUNSPELL_IGNORE_CASE = - Setting.boolSetting("indices.analysis.hunspell.dictionary.ignore_case", Boolean.FALSE, SettingsProperty.ClusterScope); + Setting.boolSetting("indices.analysis.hunspell.dictionary.ignore_case", Boolean.FALSE, Property.NodeScope); public final static Setting HUNSPELL_DICTIONARY_OPTIONS = - Setting.groupSetting("indices.analysis.hunspell.dictionary.", SettingsProperty.ClusterScope); + Setting.groupSetting("indices.analysis.hunspell.dictionary.", Property.NodeScope); private final ConcurrentHashMap dictionaries = new ConcurrentHashMap<>(); private final Map knownDictionaries; private final boolean defaultIgnoreCase; diff --git a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index 3a3fede9af0c..d2d96092186e 100644 --- a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -48,21 +48,21 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { private final ConcurrentMap breakers = new ConcurrentHashMap(); public static final Setting TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING = - Setting.byteSizeSetting("indices.breaker.total.limit", "70%", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("indices.breaker.total.limit", "70%", Property.Dynamic, Property.NodeScope); public static final Setting FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING = - Setting.byteSizeSetting("indices.breaker.fielddata.limit", "60%", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("indices.breaker.fielddata.limit", "60%", Property.Dynamic, Property.NodeScope); public static final Setting FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING = - Setting.doubleSetting("indices.breaker.fielddata.overhead", 1.03d, 0.0d, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.doubleSetting("indices.breaker.fielddata.overhead", 1.03d, 0.0d, Property.Dynamic, Property.NodeScope); public static final Setting FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING = - new Setting<>("indices.breaker.fielddata.type", "memory", CircuitBreaker.Type::parseValue, SettingsProperty.ClusterScope); + new Setting<>("indices.breaker.fielddata.type", "memory", CircuitBreaker.Type::parseValue, Property.NodeScope); public static final Setting REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING = - Setting.byteSizeSetting("indices.breaker.request.limit", "40%", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("indices.breaker.request.limit", "40%", Property.Dynamic, Property.NodeScope); public static final Setting REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING = - Setting.doubleSetting("indices.breaker.request.overhead", 1.0d, 0.0d, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.doubleSetting("indices.breaker.request.overhead", 1.0d, 0.0d, Property.Dynamic, Property.NodeScope); public static final Setting REQUEST_CIRCUIT_BREAKER_TYPE_SETTING = - new Setting<>("indices.breaker.request.type", "memory", CircuitBreaker.Type::parseValue, SettingsProperty.ClusterScope); + new Setting<>("indices.breaker.request.type", "memory", CircuitBreaker.Type::parseValue, Property.NodeScope); diff --git a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java index a5bb86969993..46744f4d8481 100644 --- a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java +++ b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java @@ -34,7 +34,7 @@ import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.Index; @@ -54,7 +54,7 @@ import java.util.function.ToLongBiFunction; public class IndicesFieldDataCache extends AbstractComponent implements RemovalListener, Releasable{ public static final Setting INDICES_FIELDDATA_CACHE_SIZE_KEY = - Setting.byteSizeSetting("indices.fielddata.cache.size", new ByteSizeValue(-1), SettingsProperty.ClusterScope); + Setting.byteSizeSetting("indices.fielddata.cache.size", new ByteSizeValue(-1), Property.NodeScope); private final IndexFieldDataCache.Listener indicesFieldDataCacheListener; private final Cache cache; diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java index f58dc1ca8b87..82595458479b 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java @@ -25,7 +25,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -35,7 +35,7 @@ public class RecoverySettings extends AbstractComponent { public static final Setting INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING = Setting.byteSizeSetting("indices.recovery.max_bytes_per_sec", new ByteSizeValue(40, ByteSizeUnit.MB), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); /** * how long to wait before retrying after issues cause by cluster state syncing between nodes @@ -43,17 +43,17 @@ public class RecoverySettings extends AbstractComponent { */ public static final Setting INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING = Setting.positiveTimeSetting("indices.recovery.retry_delay_state_sync", TimeValue.timeValueMillis(500), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); /** how long to wait before retrying after network related issues */ public static final Setting INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING = Setting.positiveTimeSetting("indices.recovery.retry_delay_network", TimeValue.timeValueSeconds(5), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); /** timeout value to use for requests made as part of the recovery process */ public static final Setting INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING = Setting.positiveTimeSetting("indices.recovery.internal_action_timeout", TimeValue.timeValueMinutes(15), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); /** * timeout value to use for requests made as part of the recovery process that are expected to take long time. @@ -62,7 +62,7 @@ public class RecoverySettings extends AbstractComponent { public static final Setting INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING = Setting.timeSetting("indices.recovery.internal_action_long_timeout", (s) -> TimeValue.timeValueMillis(INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.get(s).millis() * 2).toString(), - TimeValue.timeValueSeconds(0), SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + TimeValue.timeValueSeconds(0), Property.Dynamic, Property.NodeScope); /** * recoveries that don't show any activity for more then this interval will be failed. @@ -71,7 +71,7 @@ public class RecoverySettings extends AbstractComponent { public static final Setting INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING = Setting.timeSetting("indices.recovery.recovery_activity_timeout", (s) -> INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.getRaw(s) , TimeValue.timeValueSeconds(0), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); public static final ByteSizeValue DEFAULT_CHUNK_SIZE = new ByteSizeValue(512, ByteSizeUnit.KB); diff --git a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index 3851b4571b6f..fa724bf9511e 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -36,7 +36,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; @@ -58,7 +58,6 @@ import org.elasticsearch.transport.TransportService; import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.EnumSet; import java.util.List; import java.util.concurrent.TimeUnit; @@ -72,7 +71,7 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe // TODO this class can be foled into either IndicesService and partially into IndicesClusterStateService there is no need for a separate public service public static final Setting INDICES_STORE_DELETE_SHARD_TIMEOUT = Setting.positiveTimeSetting("indices.store.delete.shard.timeout", new TimeValue(30, TimeUnit.SECONDS), - SettingsProperty.ClusterScope); + Property.NodeScope); public static final String ACTION_SHARD_EXISTS = "internal:index/shard/exists"; private static final EnumSet ACTIVE_STATES = EnumSet.of(IndexShardState.STARTED, IndexShardState.RELOCATED); private final IndicesService indicesService; diff --git a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java index 0f9c9d425e1c..b73fce540dc3 100644 --- a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java +++ b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java @@ -38,7 +38,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -70,7 +70,7 @@ public class IndicesTTLService extends AbstractLifecycleComponent INDICES_TTL_INTERVAL_SETTING = Setting.positiveTimeSetting("indices.ttl.interval", TimeValue.timeValueSeconds(60), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); private final ClusterService clusterService; private final IndicesService indicesService; diff --git a/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java b/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java index 71a9743f78a7..0287d5c522c6 100644 --- a/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java +++ b/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java @@ -21,7 +21,7 @@ package org.elasticsearch.monitor.fs; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.SingleObjectCache; @@ -39,7 +39,7 @@ public class FsService extends AbstractComponent { public final static Setting REFRESH_INTERVAL_SETTING = Setting.timeSetting("monitor.fs.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), - SettingsProperty.ClusterScope); + Property.NodeScope); public FsService(Settings settings, NodeEnvironment nodeEnvironment) throws IOException { super(settings); diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java index 301b86674c61..5a2d591c7dc4 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java @@ -21,7 +21,7 @@ package org.elasticsearch.monitor.jvm; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.FutureUtils; @@ -48,13 +48,13 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent ENABLED_SETTING = - Setting.boolSetting("monitor.jvm.gc.enabled", true, SettingsProperty.ClusterScope); + Setting.boolSetting("monitor.jvm.gc.enabled", true, Property.NodeScope); public final static Setting REFRESH_INTERVAL_SETTING = Setting.timeSetting("monitor.jvm.gc.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), - SettingsProperty.ClusterScope); + Property.NodeScope); private static String GC_COLLECTOR_PREFIX = "monitor.jvm.gc.collector."; - public final static Setting GC_SETTING = Setting.groupSetting(GC_COLLECTOR_PREFIX, SettingsProperty.ClusterScope); + public final static Setting GC_SETTING = Setting.groupSetting(GC_COLLECTOR_PREFIX, Property.NodeScope); static class GcThreshold { public final String name; diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java index 5e03ab3e31c9..e91c05e75ac0 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java @@ -21,7 +21,7 @@ package org.elasticsearch.monitor.jvm; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -38,7 +38,7 @@ public class JvmService extends AbstractComponent { public final static Setting REFRESH_INTERVAL_SETTING = Setting.timeSetting("monitor.jvm.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), - SettingsProperty.ClusterScope); + Property.NodeScope); public JvmService(Settings settings) { super(settings); diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsService.java b/core/src/main/java/org/elasticsearch/monitor/os/OsService.java index df750c7247cb..d452094d7b00 100644 --- a/core/src/main/java/org/elasticsearch/monitor/os/OsService.java +++ b/core/src/main/java/org/elasticsearch/monitor/os/OsService.java @@ -21,7 +21,7 @@ package org.elasticsearch.monitor.os; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.SingleObjectCache; @@ -40,7 +40,7 @@ public class OsService extends AbstractComponent { public final static Setting REFRESH_INTERVAL_SETTING = Setting.timeSetting("monitor.os.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), - SettingsProperty.ClusterScope); + Property.NodeScope); public OsService(Settings settings) { super(settings); diff --git a/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java b/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java index 0370011e7c06..30c24f34c668 100644 --- a/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java +++ b/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java @@ -21,7 +21,7 @@ package org.elasticsearch.monitor.process; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.SingleObjectCache; @@ -37,7 +37,7 @@ public final class ProcessService extends AbstractComponent { public final static Setting REFRESH_INTERVAL_SETTING = Setting.timeSetting("monitor.process.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), - SettingsProperty.ClusterScope); + Property.NodeScope); public ProcessService(Settings settings) { super(settings); diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index 7169be7d1f14..17b0b0d1e057 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -53,7 +53,7 @@ import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.transport.BoundTransportAddress; @@ -131,22 +131,22 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; public class Node implements Closeable { public static final Setting WRITE_PORTS_FIELD_SETTING = - Setting.boolSetting("node.portsfile", false, SettingsProperty.ClusterScope); + Setting.boolSetting("node.portsfile", false, Property.NodeScope); public static final Setting NODE_CLIENT_SETTING = - Setting.boolSetting("node.client", false, SettingsProperty.ClusterScope); - public static final Setting NODE_DATA_SETTING = Setting.boolSetting("node.data", true, SettingsProperty.ClusterScope); + Setting.boolSetting("node.client", false, Property.NodeScope); + public static final Setting NODE_DATA_SETTING = Setting.boolSetting("node.data", true, Property.NodeScope); public static final Setting NODE_MASTER_SETTING = - Setting.boolSetting("node.master", true, SettingsProperty.ClusterScope); + Setting.boolSetting("node.master", true, Property.NodeScope); public static final Setting NODE_LOCAL_SETTING = - Setting.boolSetting("node.local", false, SettingsProperty.ClusterScope); + Setting.boolSetting("node.local", false, Property.NodeScope); public static final Setting NODE_MODE_SETTING = - new Setting<>("node.mode", "network", Function.identity(), SettingsProperty.ClusterScope); + new Setting<>("node.mode", "network", Function.identity(), Property.NodeScope); public static final Setting NODE_INGEST_SETTING = - Setting.boolSetting("node.ingest", true, SettingsProperty.ClusterScope); - public static final Setting NODE_NAME_SETTING = Setting.simpleString("node.name", SettingsProperty.ClusterScope); + Setting.boolSetting("node.ingest", true, Property.NodeScope); + public static final Setting NODE_NAME_SETTING = Setting.simpleString("node.name", Property.NodeScope); // this sucks that folks can mistype client etc and get away with it. // TODO: we should move this to node.attribute.${name} = ${value} instead. - public static final Setting NODE_ATTRIBUTES = Setting.groupSetting("node.", SettingsProperty.ClusterScope); + public static final Setting NODE_ATTRIBUTES = Setting.groupSetting("node.", Property.NodeScope); private static final String CLIENT_TYPE = "node"; diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java index 5f8107ba7585..218016814375 100644 --- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.cli.Terminal; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.env.Environment; @@ -59,7 +59,7 @@ public class InternalSettingsPreparer { public static final String SECRET_PROMPT_VALUE = "${prompt.secret}"; public static final String TEXT_PROMPT_VALUE = "${prompt.text}"; public static final Setting IGNORE_SYSTEM_PROPERTIES_SETTING = - Setting.boolSetting("config.ignore_system_properties", false, SettingsProperty.ClusterScope); + Setting.boolSetting("config.ignore_system_properties", false, Property.NodeScope); /** * Prepares the settings by gathering all elasticsearch system properties and setting defaults. diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java index 1aafcd0f6fb0..cf953cd15299 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -37,7 +37,7 @@ import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; @@ -73,7 +73,7 @@ public class PluginsService extends AbstractComponent { private final List> plugins; private final PluginsAndModules info; public static final Setting> MANDATORY_SETTING = - Setting.listSetting("plugin.mandatory", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); + Setting.listSetting("plugin.mandatory", Collections.emptyList(), Function.identity(), Property.NodeScope); private final Map> onModuleReferences; diff --git a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java index 8ac297e072f2..56e7e08c2c3f 100644 --- a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java @@ -24,7 +24,7 @@ import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.fs.FsBlobStore; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.index.snapshots.IndexShardRepository; @@ -53,16 +53,16 @@ public class FsRepository extends BlobStoreRepository { public final static String TYPE = "fs"; public static final Setting LOCATION_SETTING = - new Setting<>("location", "", Function.identity(), SettingsProperty.ClusterScope); + new Setting<>("location", "", Function.identity(), Property.NodeScope); public static final Setting REPOSITORIES_LOCATION_SETTING = - new Setting<>("repositories.fs.location", LOCATION_SETTING, Function.identity(), SettingsProperty.ClusterScope); + new Setting<>("repositories.fs.location", LOCATION_SETTING, Function.identity(), Property.NodeScope); public static final Setting CHUNK_SIZE_SETTING = - Setting.byteSizeSetting("chunk_size", "-1", SettingsProperty.ClusterScope); + Setting.byteSizeSetting("chunk_size", "-1", Property.NodeScope); public static final Setting REPOSITORIES_CHUNK_SIZE_SETTING = - Setting.byteSizeSetting("repositories.fs.chunk_size", "-1", SettingsProperty.ClusterScope); - public static final Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("repositories.fs.chunk_size", "-1", Property.NodeScope); + public static final Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, Property.NodeScope); public static final Setting REPOSITORIES_COMPRESS_SETTING = - Setting.boolSetting("repositories.fs.compress", false, SettingsProperty.ClusterScope); + Setting.boolSetting("repositories.fs.compress", false, Property.NodeScope); private final FsBlobStore blobStore; diff --git a/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java b/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java index 5086902d9fce..77d4f1cc816f 100644 --- a/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java @@ -25,7 +25,7 @@ import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.url.URLBlobStore; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.util.URIPattern; import org.elasticsearch.env.Environment; import org.elasticsearch.index.snapshots.IndexShardRepository; @@ -58,20 +58,20 @@ public class URLRepository extends BlobStoreRepository { public static final Setting> SUPPORTED_PROTOCOLS_SETTING = Setting.listSetting("repositories.url.supported_protocols", Arrays.asList("http", "https", "ftp", "file", "jar"), - Function.identity(), SettingsProperty.ClusterScope); + Function.identity(), Property.NodeScope); public static final Setting> ALLOWED_URLS_SETTING = - Setting.listSetting("repositories.url.allowed_urls", Collections.emptyList(), URIPattern::new, SettingsProperty.ClusterScope); + Setting.listSetting("repositories.url.allowed_urls", Collections.emptyList(), URIPattern::new, Property.NodeScope); - public static final Setting URL_SETTING = new Setting<>("url", "http:", URLRepository::parseURL, SettingsProperty.ClusterScope); + public static final Setting URL_SETTING = new Setting<>("url", "http:", URLRepository::parseURL, Property.NodeScope); public static final Setting REPOSITORIES_URL_SETTING = new Setting<>("repositories.url.url", (s) -> s.get("repositories.uri.url", "http:"), URLRepository::parseURL, - SettingsProperty.ClusterScope); + Property.NodeScope); public static final Setting LIST_DIRECTORIES_SETTING = - Setting.boolSetting("list_directories", true, SettingsProperty.ClusterScope); + Setting.boolSetting("list_directories", true, Property.NodeScope); public static final Setting REPOSITORIES_LIST_DIRECTORIES_SETTING = - Setting.boolSetting("repositories.uri.list_directories", true, SettingsProperty.ClusterScope); + Setting.boolSetting("repositories.uri.list_directories", true, Property.NodeScope); private final List supportedProtocols; diff --git a/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index 5066b4884aff..b406dfca5450 100644 --- a/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -23,7 +23,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; /** @@ -36,7 +36,7 @@ import org.elasticsearch.common.settings.Settings; */ public abstract class BaseRestHandler extends AbstractComponent implements RestHandler { public static final Setting MULTI_ALLOW_EXPLICIT_INDEX = - Setting.boolSetting("rest.action.multi.allow_explicit_index", true, SettingsProperty.ClusterScope); + Setting.boolSetting("rest.action.multi.allow_explicit_index", true, Property.NodeScope); private final Client client; protected final ParseFieldMatcher parseFieldMatcher; diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index c6cdd79e121f..d46a12fb4961 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -46,7 +46,7 @@ import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -56,7 +56,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.TemplateQueryParser; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.watcher.FileChangesListener; import org.elasticsearch.watcher.FileWatcher; @@ -86,12 +85,12 @@ public class ScriptService extends AbstractComponent implements Closeable { static final String DISABLE_DYNAMIC_SCRIPTING_SETTING = "script.disable_dynamic"; public static final Setting SCRIPT_CACHE_SIZE_SETTING = - Setting.intSetting("script.cache.max_size", 100, 0, SettingsProperty.ClusterScope); + Setting.intSetting("script.cache.max_size", 100, 0, Property.NodeScope); public static final Setting SCRIPT_CACHE_EXPIRE_SETTING = - Setting.positiveTimeSetting("script.cache.expire", TimeValue.timeValueMillis(0), SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("script.cache.expire", TimeValue.timeValueMillis(0), Property.NodeScope); public static final String SCRIPT_INDEX = ".scripts"; public static final Setting SCRIPT_AUTO_RELOAD_ENABLED_SETTING = - Setting.boolSetting("script.auto_reload_enabled", true, SettingsProperty.ClusterScope); + Setting.boolSetting("script.auto_reload_enabled", true, Property.NodeScope); private final String defaultLang; diff --git a/core/src/main/java/org/elasticsearch/script/ScriptSettings.java b/core/src/main/java/org/elasticsearch/script/ScriptSettings.java index 26cb7eaa2780..1bf7fdfc8433 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptSettings.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptSettings.java @@ -21,7 +21,7 @@ package org.elasticsearch.script; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import java.util.ArrayList; @@ -45,7 +45,7 @@ public class ScriptSettings { ScriptModes.sourceKey(scriptType), scriptType.getDefaultScriptMode().getMode(), ScriptMode::parse, - SettingsProperty.ClusterScope)); + Property.NodeScope)); } SCRIPT_TYPE_SETTING_MAP = Collections.unmodifiableMap(scriptTypeSettingMap); } @@ -66,7 +66,7 @@ public class ScriptSettings { throw new IllegalArgumentException("unregistered default language [" + setting + "]"); } return setting; - }, SettingsProperty.ClusterScope); + }, Property.NodeScope); } private static Map> contextSettings(ScriptContextRegistry scriptContextRegistry) { @@ -76,7 +76,7 @@ public class ScriptSettings { ScriptModes.operationKey(scriptContext), ScriptMode.OFF.getMode(), ScriptMode::parse, - SettingsProperty.ClusterScope + Property.NodeScope )); } return scriptContextSettingMap; @@ -136,7 +136,7 @@ public class ScriptSettings { ScriptModes.getKey(language, scriptType, scriptContext), defaultSetting, ScriptMode::parse, - SettingsProperty.ClusterScope); + Property.NodeScope); scriptModeSettings.add(setting); } } diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 856885c3ae48..d94d83b3070b 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -34,7 +34,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; @@ -55,7 +55,6 @@ import org.elasticsearch.index.search.stats.StatsGroupsParseElement; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.indices.IndicesRequestCache; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; @@ -113,13 +112,13 @@ public class SearchService extends AbstractLifecycleComponent imp // we can have 5 minutes here, since we make sure to clean with search requests and when shard/index closes public static final Setting DEFAULT_KEEPALIVE_SETTING = - Setting.positiveTimeSetting("search.default_keep_alive", timeValueMinutes(5), SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("search.default_keep_alive", timeValueMinutes(5), Property.NodeScope); public static final Setting KEEPALIVE_INTERVAL_SETTING = - Setting.positiveTimeSetting("search.keep_alive_interval", timeValueMinutes(1), SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("search.keep_alive_interval", timeValueMinutes(1), Property.NodeScope); public static final TimeValue NO_TIMEOUT = timeValueMillis(-1); public static final Setting DEFAULT_SEARCH_TIMEOUT_SETTING = - Setting.timeSetting("search.default_search_timeout", NO_TIMEOUT, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.timeSetting("search.default_search_timeout", NO_TIMEOUT, Property.Dynamic, Property.NodeScope); private final ThreadPool threadPool; diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 150325ff2423..ebff17bfb760 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -28,7 +28,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.SizeValue; @@ -190,7 +190,7 @@ public class ThreadPool extends AbstractComponent implements Closeable { } public static final Setting THREADPOOL_GROUP_SETTING = - Setting.groupSetting("threadpool.", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.groupSetting("threadpool.", Property.Dynamic, Property.NodeScope); private volatile Map executors; diff --git a/core/src/main/java/org/elasticsearch/transport/Transport.java b/core/src/main/java/org/elasticsearch/transport/Transport.java index d6dee1953a46..532c9d99ace4 100644 --- a/core/src/main/java/org/elasticsearch/transport/Transport.java +++ b/core/src/main/java/org/elasticsearch/transport/Transport.java @@ -22,7 +22,7 @@ package org.elasticsearch.transport; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; @@ -36,7 +36,7 @@ import java.util.Map; public interface Transport extends LifecycleComponent { - Setting TRANSPORT_TCP_COMPRESS = Setting.boolSetting("transport.tcp.compress", false, SettingsProperty.ClusterScope); + Setting TRANSPORT_TCP_COMPRESS = Setting.boolSetting("transport.tcp.compress", false, Property.NodeScope); void transportServiceAdapter(TransportServiceAdapter service); diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index daa3409b40a2..735a04455fb4 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -33,7 +33,7 @@ import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; @@ -99,10 +99,10 @@ public class TransportService extends AbstractLifecycleComponent> TRACE_LOG_INCLUDE_SETTING = - listSetting("transport.tracer.include", emptyList(), Function.identity(), SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + listSetting("transport.tracer.include", emptyList(), Function.identity(), Property.Dynamic, Property.NodeScope); public static final Setting> TRACE_LOG_EXCLUDE_SETTING = listSetting("transport.tracer.exclude", Arrays.asList("internal:discovery/zen/fd*", TransportLivenessAction.NAME), - Function.identity(), SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Function.identity(), Property.Dynamic, Property.NodeScope); private final ESLogger tracerLog; diff --git a/core/src/main/java/org/elasticsearch/transport/TransportSettings.java b/core/src/main/java/org/elasticsearch/transport/TransportSettings.java index b52a54509bd0..e36d4e403c4f 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportSettings.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportSettings.java @@ -19,7 +19,7 @@ package org.elasticsearch.transport; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import java.util.List; @@ -37,18 +37,18 @@ import static org.elasticsearch.common.settings.Setting.listSetting; final public class TransportSettings { public static final Setting> HOST = - listSetting("transport.host", emptyList(), Function.identity(), SettingsProperty.ClusterScope); + listSetting("transport.host", emptyList(), Function.identity(), Property.NodeScope); public static final Setting> PUBLISH_HOST = - listSetting("transport.publish_host", HOST, Function.identity(), SettingsProperty.ClusterScope); + listSetting("transport.publish_host", HOST, Function.identity(), Property.NodeScope); public static final Setting> BIND_HOST = - listSetting("transport.bind_host", HOST, Function.identity(), SettingsProperty.ClusterScope); + listSetting("transport.bind_host", HOST, Function.identity(), Property.NodeScope); public static final Setting PORT = - new Setting<>("transport.tcp.port", "9300-9400", Function.identity(), SettingsProperty.ClusterScope); + new Setting<>("transport.tcp.port", "9300-9400", Function.identity(), Property.NodeScope); public static final Setting PUBLISH_PORT = - intSetting("transport.publish_port", -1, -1, SettingsProperty.ClusterScope); + intSetting("transport.publish_port", -1, -1, Property.NodeScope); public static final String DEFAULT_PROFILE = "default"; public static final Setting TRANSPORT_PROFILES_SETTING = - groupSetting("transport.profiles.", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + groupSetting("transport.profiles.", Property.Dynamic, Property.NodeScope); private TransportSettings() { diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java index 29c5fe05ee77..d844bd556589 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java @@ -44,7 +44,7 @@ import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.network.NetworkService.TcpSettings; import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.InetSocketTransportAddress; @@ -152,42 +152,42 @@ public class NettyTransport extends AbstractLifecycleComponent implem public static final Setting WORKER_COUNT = new Setting<>("transport.netty.worker_count", (s) -> Integer.toString(EsExecutors.boundedNumberOfProcessors(s) * 2), - (s) -> Setting.parseInt(s, 1, "transport.netty.worker_count"), SettingsProperty.ClusterScope); + (s) -> Setting.parseInt(s, 1, "transport.netty.worker_count"), Property.NodeScope); public static final Setting CONNECTIONS_PER_NODE_RECOVERY = - intSetting("transport.connections_per_node.recovery", 2, 1, SettingsProperty.ClusterScope); + intSetting("transport.connections_per_node.recovery", 2, 1, Property.NodeScope); public static final Setting CONNECTIONS_PER_NODE_BULK = - intSetting("transport.connections_per_node.bulk", 3, 1, SettingsProperty.ClusterScope); + intSetting("transport.connections_per_node.bulk", 3, 1, Property.NodeScope); public static final Setting CONNECTIONS_PER_NODE_REG = - intSetting("transport.connections_per_node.reg", 6, 1, SettingsProperty.ClusterScope); + intSetting("transport.connections_per_node.reg", 6, 1, Property.NodeScope); public static final Setting CONNECTIONS_PER_NODE_STATE = - intSetting("transport.connections_per_node.state", 1, 1, SettingsProperty.ClusterScope); + intSetting("transport.connections_per_node.state", 1, 1, Property.NodeScope); public static final Setting CONNECTIONS_PER_NODE_PING = - intSetting("transport.connections_per_node.ping", 1, 1, SettingsProperty.ClusterScope); + intSetting("transport.connections_per_node.ping", 1, 1, Property.NodeScope); // the scheduled internal ping interval setting, defaults to disabled (-1) public static final Setting PING_SCHEDULE = - timeSetting("transport.ping_schedule", TimeValue.timeValueSeconds(-1), SettingsProperty.ClusterScope); + timeSetting("transport.ping_schedule", TimeValue.timeValueSeconds(-1), Property.NodeScope); public static final Setting TCP_BLOCKING_CLIENT = - boolSetting("transport.tcp.blocking_client", TcpSettings.TCP_BLOCKING_CLIENT, SettingsProperty.ClusterScope); + boolSetting("transport.tcp.blocking_client", TcpSettings.TCP_BLOCKING_CLIENT, Property.NodeScope); public static final Setting TCP_CONNECT_TIMEOUT = - timeSetting("transport.tcp.connect_timeout", TcpSettings.TCP_CONNECT_TIMEOUT, SettingsProperty.ClusterScope); + timeSetting("transport.tcp.connect_timeout", TcpSettings.TCP_CONNECT_TIMEOUT, Property.NodeScope); public static final Setting TCP_NO_DELAY = - boolSetting("transport.tcp_no_delay", TcpSettings.TCP_NO_DELAY, SettingsProperty.ClusterScope); + boolSetting("transport.tcp_no_delay", TcpSettings.TCP_NO_DELAY, Property.NodeScope); public static final Setting TCP_KEEP_ALIVE = - boolSetting("transport.tcp.keep_alive", TcpSettings.TCP_KEEP_ALIVE, SettingsProperty.ClusterScope); + boolSetting("transport.tcp.keep_alive", TcpSettings.TCP_KEEP_ALIVE, Property.NodeScope); public static final Setting TCP_BLOCKING_SERVER = - boolSetting("transport.tcp.blocking_server", TcpSettings.TCP_BLOCKING_SERVER, SettingsProperty.ClusterScope); + boolSetting("transport.tcp.blocking_server", TcpSettings.TCP_BLOCKING_SERVER, Property.NodeScope); public static final Setting TCP_REUSE_ADDRESS = - boolSetting("transport.tcp.reuse_address", TcpSettings.TCP_REUSE_ADDRESS, SettingsProperty.ClusterScope); + boolSetting("transport.tcp.reuse_address", TcpSettings.TCP_REUSE_ADDRESS, Property.NodeScope); public static final Setting TCP_SEND_BUFFER_SIZE = - Setting.byteSizeSetting("transport.tcp.send_buffer_size", TcpSettings.TCP_SEND_BUFFER_SIZE, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("transport.tcp.send_buffer_size", TcpSettings.TCP_SEND_BUFFER_SIZE, Property.NodeScope); public static final Setting TCP_RECEIVE_BUFFER_SIZE = - Setting.byteSizeSetting("transport.tcp.receive_buffer_size", TcpSettings.TCP_RECEIVE_BUFFER_SIZE, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("transport.tcp.receive_buffer_size", TcpSettings.TCP_RECEIVE_BUFFER_SIZE, Property.NodeScope); public static final Setting NETTY_MAX_CUMULATION_BUFFER_CAPACITY = - Setting.byteSizeSetting("transport.netty.max_cumulation_buffer_capacity", new ByteSizeValue(-1), SettingsProperty.ClusterScope); + Setting.byteSizeSetting("transport.netty.max_cumulation_buffer_capacity", new ByteSizeValue(-1), Property.NodeScope); public static final Setting NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = - Setting.intSetting("transport.netty.max_composite_buffer_components", -1, -1, SettingsProperty.ClusterScope); + Setting.intSetting("transport.netty.max_composite_buffer_components", -1, -1, Property.NodeScope); // See AdaptiveReceiveBufferSizePredictor#DEFAULT_XXX for default values in netty..., we can use higher ones for us, even fixed one public static final Setting NETTY_RECEIVE_PREDICTOR_SIZE = Setting.byteSizeSetting( @@ -200,13 +200,13 @@ public class NettyTransport extends AbstractLifecycleComponent implem defaultReceiverPredictor = Math.min(defaultReceiverPredictor, Math.max(l, 64 * 1024)); } return new ByteSizeValue(defaultReceiverPredictor).toString(); - }, SettingsProperty.ClusterScope); + }, Property.NodeScope); public static final Setting NETTY_RECEIVE_PREDICTOR_MIN = - byteSizeSetting("transport.netty.receive_predictor_min", NETTY_RECEIVE_PREDICTOR_SIZE, SettingsProperty.ClusterScope); + byteSizeSetting("transport.netty.receive_predictor_min", NETTY_RECEIVE_PREDICTOR_SIZE, Property.NodeScope); public static final Setting NETTY_RECEIVE_PREDICTOR_MAX = - byteSizeSetting("transport.netty.receive_predictor_max", NETTY_RECEIVE_PREDICTOR_SIZE, SettingsProperty.ClusterScope); + byteSizeSetting("transport.netty.receive_predictor_max", NETTY_RECEIVE_PREDICTOR_SIZE, Property.NodeScope); public static final Setting NETTY_BOSS_COUNT = - intSetting("transport.netty.boss_count", 1, 1, SettingsProperty.ClusterScope); + intSetting("transport.netty.boss_count", 1, 1, Property.NodeScope); protected final NetworkService networkService; protected final Version version; diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index 8872bcdd4067..4f316f568ab6 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -43,7 +43,7 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.set.Sets; @@ -120,7 +120,7 @@ public class TribeService extends AbstractLifecycleComponent { } // internal settings only - public static final Setting TRIBE_NAME_SETTING = Setting.simpleString("tribe.name", SettingsProperty.ClusterScope); + public static final Setting TRIBE_NAME_SETTING = Setting.simpleString("tribe.name", Property.NodeScope); private final ClusterService clusterService; private final String[] blockIndicesWrite; private final String[] blockIndicesRead; @@ -139,18 +139,18 @@ public class TribeService extends AbstractLifecycleComponent { throw new IllegalArgumentException( "Invalid value for [tribe.on_conflict] must be either [any, drop or start with prefer_] but was: [" + s + "]"); } - }, SettingsProperty.ClusterScope); + }, Property.NodeScope); public static final Setting BLOCKS_METADATA_SETTING = - Setting.boolSetting("tribe.blocks.metadata", false, SettingsProperty.ClusterScope); + Setting.boolSetting("tribe.blocks.metadata", false, Property.NodeScope); public static final Setting BLOCKS_WRITE_SETTING = - Setting.boolSetting("tribe.blocks.write", false, SettingsProperty.ClusterScope); + Setting.boolSetting("tribe.blocks.write", false, Property.NodeScope); public static final Setting> BLOCKS_WRITE_INDICES_SETTING = - Setting.listSetting("tribe.blocks.write.indices", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); + Setting.listSetting("tribe.blocks.write.indices", Collections.emptyList(), Function.identity(), Property.NodeScope); public static final Setting> BLOCKS_READ_INDICES_SETTING = - Setting.listSetting("tribe.blocks.read.indices", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); + Setting.listSetting("tribe.blocks.read.indices", Collections.emptyList(), Function.identity(), Property.NodeScope); public static final Setting> BLOCKS_METADATA_INDICES_SETTING = - Setting.listSetting("tribe.blocks.metadata.indices", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); + Setting.listSetting("tribe.blocks.metadata.indices", Collections.emptyList(), Function.identity(), Property.NodeScope); public static final Set TRIBE_SETTING_KEYS = Sets.newHashSet(TRIBE_NAME_SETTING.getKey(), ON_CONFLICT_SETTING.getKey(), BLOCKS_METADATA_INDICES_SETTING.getKey(), BLOCKS_METADATA_SETTING.getKey(), BLOCKS_READ_INDICES_SETTING.getKey(), BLOCKS_WRITE_INDICES_SETTING.getKey(), BLOCKS_WRITE_SETTING.getKey()); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/repositories/RepositoryBlocksIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/repositories/RepositoryBlocksIT.java index 9c554da781ab..503db65e8108 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/repositories/RepositoryBlocksIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/repositories/RepositoryBlocksIT.java @@ -34,7 +34,7 @@ import static org.hamcrest.Matchers.hasSize; /** * This class tests that repository operations (Put, Delete, Verify) are blocked when the cluster is read-only. * - * The @ClusterScope TEST is needed because this class updates the cluster setting "cluster.blocks.read_only". + * The @NodeScope TEST is needed because this class updates the cluster setting "cluster.blocks.read_only". */ @ClusterScope(scope = ESIntegTestCase.Scope.TEST) public class RepositoryBlocksIT extends ESIntegTestCase { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/SnapshotBlocksIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/SnapshotBlocksIT.java index f3a23be919da..82a2637d76b6 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/SnapshotBlocksIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/SnapshotBlocksIT.java @@ -41,7 +41,7 @@ import static org.hamcrest.Matchers.hasSize; /** * This class tests that snapshot operations (Create, Delete, Restore) are blocked when the cluster is read-only. * - * The @ClusterScope TEST is needed because this class updates the cluster setting "cluster.blocks.read_only". + * The @NodeScope TEST is needed because this class updates the cluster setting "cluster.blocks.read_only". */ @ClusterScope(scope = ESIntegTestCase.Scope.TEST) public class SnapshotBlocksIT extends ESIntegTestCase { diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java index cdf4185c94dd..c266943737c8 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java @@ -35,7 +35,7 @@ import org.elasticsearch.common.inject.ModuleTestCase; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; @@ -84,7 +84,7 @@ public class ClusterModuleTests extends ModuleTestCase { public void testRegisterClusterDynamicSetting() { SettingsModule module = new SettingsModule(Settings.EMPTY); - module.registerSetting(Setting.boolSetting("foo.bar", false, SettingsProperty.Dynamic, SettingsProperty.ClusterScope)); + module.registerSetting(Setting.boolSetting("foo.bar", false, Property.Dynamic, Property.NodeScope)); assertInstanceBinding(module, ClusterSettings.class, service -> service.hasDynamicSetting("foo.bar")); } @@ -99,7 +99,7 @@ public class ClusterModuleTests extends ModuleTestCase { public void testRegisterIndexDynamicSetting() { SettingsModule module = new SettingsModule(Settings.EMPTY); - module.registerSetting(Setting.boolSetting("foo.bar", false, SettingsProperty.Dynamic, SettingsProperty.IndexScope)); + module.registerSetting(Setting.boolSetting("foo.bar", false, Property.Dynamic, Property.IndexScope)); assertInstanceBinding(module, IndexScopedSettings.class, service -> service.hasDynamicSetting("foo.bar")); } diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java index 811ddc7ae5a4..e61bbc5f7191 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.plugins.Plugin; @@ -47,9 +47,9 @@ public class SettingsFilteringIT extends ESIntegTestCase { public static class SettingsFilteringPlugin extends Plugin { public static final Setting SOME_NODE_SETTING = - Setting.boolSetting("some.node.setting", false, SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.boolSetting("some.node.setting", false, Property.NodeScope, Property.Filtered); public static final Setting SOME_OTHER_NODE_SETTING = - Setting.boolSetting("some.other.node.setting", false, SettingsProperty.ClusterScope); + Setting.boolSetting("some.other.node.setting", false, Property.NodeScope); /** * The name of the plugin. @@ -75,7 +75,7 @@ public class SettingsFilteringIT extends ESIntegTestCase { public void onModule(SettingsModule module) { module.registerSetting(SOME_NODE_SETTING); module.registerSetting(SOME_OTHER_NODE_SETTING); - module.registerSetting(Setting.groupSetting("index.filter_test.", SettingsProperty.IndexScope)); + module.registerSetting(Setting.groupSetting("index.filter_test.", Property.IndexScope)); module.registerSettingsFilter("index.filter_test.foo"); module.registerSettingsFilter("index.filter_test.bar*"); } diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 84adee21b320..48a49616b7bb 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.elasticsearch.common.logging.ESLoggerFactory; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.index.IndexModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; @@ -42,8 +42,8 @@ import java.util.function.Function; public class ScopedSettingsTests extends ESTestCase { public void testAddConsumer() { - Setting testSetting = Setting.intSetting("foo.bar", 1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); - Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting testSetting = Setting.intSetting("foo.bar", 1, Property.Dynamic, Property.NodeScope); + Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, Property.Dynamic, Property.NodeScope); AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY, Collections.singleton(testSetting)); AtomicInteger consumer = new AtomicInteger(); @@ -70,8 +70,8 @@ public class ScopedSettingsTests extends ESTestCase { } public void testApply() { - Setting testSetting = Setting.intSetting("foo.bar", 1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); - Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting testSetting = Setting.intSetting("foo.bar", 1, Property.Dynamic, Property.NodeScope); + Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, Property.Dynamic, Property.NodeScope); AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(testSetting, testSetting2))); AtomicInteger consumer = new AtomicInteger(); @@ -142,8 +142,8 @@ public class ScopedSettingsTests extends ESTestCase { public void testIsDynamic(){ ClusterSettings settings = new ClusterSettings(Settings.EMPTY, - new HashSet<>(Arrays.asList(Setting.intSetting("foo.bar", 1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope), - Setting.intSetting("foo.bar.baz", 1, SettingsProperty.ClusterScope)))); + new HashSet<>(Arrays.asList(Setting.intSetting("foo.bar", 1, Property.Dynamic, Property.NodeScope), + Setting.intSetting("foo.bar.baz", 1, Property.NodeScope)))); assertFalse(settings.hasDynamicSetting("foo.bar.baz")); assertTrue(settings.hasDynamicSetting("foo.bar")); assertNotNull(settings.get("foo.bar.baz")); @@ -154,8 +154,8 @@ public class ScopedSettingsTests extends ESTestCase { } public void testDiff() throws IOException { - Setting foobarbaz = Setting.intSetting("foo.bar.baz", 1, SettingsProperty.ClusterScope); - Setting foobar = Setting.intSetting("foo.bar", 1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting foobarbaz = Setting.intSetting("foo.bar.baz", 1, Property.NodeScope); + Setting foobar = Setting.intSetting("foo.bar", 1, Property.Dynamic, Property.NodeScope); ClusterSettings settings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(foobar, foobarbaz))); Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.EMPTY); assertEquals(diff.getAsMap().size(), 1); @@ -244,22 +244,22 @@ public class ScopedSettingsTests extends ESTestCase { try { new IndexScopedSettings( - Settings.EMPTY, Collections.singleton(Setting.groupSetting("boo .", SettingsProperty.IndexScope))); + Settings.EMPTY, Collections.singleton(Setting.groupSetting("boo .", Property.IndexScope))); fail(); } catch (IllegalArgumentException e) { assertEquals("illegal settings key: [boo .]", e.getMessage()); } new IndexScopedSettings( - Settings.EMPTY, Collections.singleton(Setting.groupSetting("boo.", SettingsProperty.IndexScope))); + Settings.EMPTY, Collections.singleton(Setting.groupSetting("boo.", Property.IndexScope))); try { new IndexScopedSettings( - Settings.EMPTY, Collections.singleton(Setting.boolSetting("boo.", true, SettingsProperty.IndexScope))); + Settings.EMPTY, Collections.singleton(Setting.boolSetting("boo.", true, Property.IndexScope))); fail(); } catch (IllegalArgumentException e) { assertEquals("illegal settings key: [boo.]", e.getMessage()); } new IndexScopedSettings( - Settings.EMPTY, Collections.singleton(Setting.boolSetting("boo", true, SettingsProperty.IndexScope))); + Settings.EMPTY, Collections.singleton(Setting.boolSetting("boo", true, Property.IndexScope))); } public void testLoggingUpdates() { @@ -310,9 +310,9 @@ public class ScopedSettingsTests extends ESTestCase { public void testOverlappingComplexMatchSettings() { Set> settings = new LinkedHashSet<>(2); final boolean groupFirst = randomBoolean(); - final Setting groupSetting = Setting.groupSetting("foo.", SettingsProperty.ClusterScope); + final Setting groupSetting = Setting.groupSetting("foo.", Property.NodeScope); final Setting listSetting = - Setting.listSetting("foo.bar", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); + Setting.listSetting("foo.bar", Collections.emptyList(), Function.identity(), Property.NodeScope); settings.add(groupFirst ? groupSetting : listSetting); settings.add(groupFirst ? listSetting : groupSetting); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 841126d1415b..e8f754e15c53 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.settings; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; @@ -38,7 +38,7 @@ public class SettingTests extends ESTestCase { public void testGet() { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, Property.Dynamic, Property.NodeScope); assertFalse(booleanSetting.get(Settings.EMPTY)); assertFalse(booleanSetting.get(Settings.builder().put("foo.bar", false).build())); assertTrue(booleanSetting.get(Settings.builder().put("foo.bar", true).build())); @@ -46,12 +46,12 @@ public class SettingTests extends ESTestCase { public void testByteSize() { Setting byteSizeValueSetting = - Setting.byteSizeSetting("a.byte.size", new ByteSizeValue(1024), SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("a.byte.size", new ByteSizeValue(1024), Property.Dynamic, Property.NodeScope); assertFalse(byteSizeValueSetting.isGroupSetting()); ByteSizeValue byteSizeValue = byteSizeValueSetting.get(Settings.EMPTY); assertEquals(byteSizeValue.bytes(), 1024); - byteSizeValueSetting = Setting.byteSizeSetting("a.byte.size", s -> "2048b", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + byteSizeValueSetting = Setting.byteSizeSetting("a.byte.size", s -> "2048b", Property.Dynamic, Property.NodeScope); byteSizeValue = byteSizeValueSetting.get(Settings.EMPTY); assertEquals(byteSizeValue.bytes(), 2048); @@ -70,7 +70,7 @@ public class SettingTests extends ESTestCase { } public void testSimpleUpdate() { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, Property.Dynamic, Property.NodeScope); AtomicReference atomicBoolean = new AtomicReference<>(null); ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(atomicBoolean::set, logger); Settings build = Settings.builder().put("foo.bar", false).build(); @@ -91,7 +91,7 @@ public class SettingTests extends ESTestCase { } public void testUpdateNotDynamic() { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, SettingsProperty.ClusterScope); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, Property.NodeScope); assertFalse(booleanSetting.isGroupSetting()); AtomicReference atomicBoolean = new AtomicReference<>(null); try { @@ -103,7 +103,7 @@ public class SettingTests extends ESTestCase { } public void testUpdaterIsIsolated() { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, Property.Dynamic, Property.NodeScope); AtomicReference ab1 = new AtomicReference<>(null); AtomicReference ab2 = new AtomicReference<>(null); ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(ab1::set, logger); @@ -115,7 +115,7 @@ public class SettingTests extends ESTestCase { public void testDefault() { TimeValue defautlValue = TimeValue.timeValueMillis(randomIntBetween(0, 1000000)); Setting setting = - Setting.positiveTimeSetting("my.time.value", defautlValue, SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("my.time.value", defautlValue, Property.NodeScope); assertFalse(setting.isGroupSetting()); String aDefault = setting.getDefaultRaw(Settings.EMPTY); assertEquals(defautlValue.millis() + "ms", aDefault); @@ -123,11 +123,11 @@ public class SettingTests extends ESTestCase { assertEquals(defautlValue, setting.getDefault(Settings.EMPTY)); Setting secondaryDefault = - new Setting<>("foo.bar", (s) -> s.get("old.foo.bar", "some_default"), Function.identity(), SettingsProperty.ClusterScope); + new Setting<>("foo.bar", (s) -> s.get("old.foo.bar", "some_default"), Function.identity(), Property.NodeScope); assertEquals("some_default", secondaryDefault.get(Settings.EMPTY)); assertEquals("42", secondaryDefault.get(Settings.builder().put("old.foo.bar", 42).build())); Setting secondaryDefaultViaSettings = - new Setting<>("foo.bar", secondaryDefault, Function.identity(), SettingsProperty.ClusterScope); + new Setting<>("foo.bar", secondaryDefault, Function.identity(), Property.NodeScope); assertEquals("some_default", secondaryDefaultViaSettings.get(Settings.EMPTY)); assertEquals("42", secondaryDefaultViaSettings.get(Settings.builder().put("old.foo.bar", 42).build())); } @@ -135,7 +135,7 @@ public class SettingTests extends ESTestCase { public void testComplexType() { AtomicReference ref = new AtomicReference<>(null); Setting setting = new Setting<>("foo.bar", (s) -> "", (s) -> new ComplexType(s), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); assertFalse(setting.isGroupSetting()); ref.set(setting.get(Settings.EMPTY)); ComplexType type = ref.get(); @@ -156,19 +156,17 @@ public class SettingTests extends ESTestCase { } public void testType() { - Setting integerSetting = Setting.intSetting("foo.int.bar", 1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); - assertThat(integerSetting.hasClusterScope(), is(true)); + Setting integerSetting = Setting.intSetting("foo.int.bar", 1, Property.Dynamic, Property.NodeScope); + assertThat(integerSetting.hasNodeScope(), is(true)); assertThat(integerSetting.hasIndexScope(), is(false)); - assertThat(integerSetting.hasNodeScope(), is(false)); - integerSetting = Setting.intSetting("foo.int.bar", 1, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + integerSetting = Setting.intSetting("foo.int.bar", 1, Property.Dynamic, Property.IndexScope); assertThat(integerSetting.hasIndexScope(), is(true)); - assertThat(integerSetting.hasClusterScope(), is(false)); assertThat(integerSetting.hasNodeScope(), is(false)); } public void testGroups() { AtomicReference ref = new AtomicReference<>(null); - Setting setting = Setting.groupSetting("foo.bar.", SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting setting = Setting.groupSetting("foo.bar.", Property.Dynamic, Property.NodeScope); assertTrue(setting.isGroupSetting()); ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger); @@ -246,8 +244,8 @@ public class SettingTests extends ESTestCase { public void testComposite() { Composite c = new Composite(); - Setting a = Setting.intSetting("foo.int.bar.a", 1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); - Setting b = Setting.intSetting("foo.int.bar.b", 1, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Setting a = Setting.intSetting("foo.int.bar.a", 1, Property.Dynamic, Property.NodeScope); + Setting b = Setting.intSetting("foo.int.bar.b", 1, Property.Dynamic, Property.NodeScope); ClusterSettings.SettingUpdater> settingUpdater = Setting.compoundUpdater(c::set, a, b, logger); assertFalse(settingUpdater.apply(Settings.EMPTY, Settings.EMPTY)); assertNull(c.a); @@ -276,7 +274,7 @@ public class SettingTests extends ESTestCase { public void testListSettings() { Setting> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); List value = listSetting.get(Settings.EMPTY); assertEquals(1, value.size()); assertEquals("foo,bar", value.get(0)); @@ -316,7 +314,7 @@ public class SettingTests extends ESTestCase { assertEquals("foo,bar", ref.get().get(0)); Setting> otherSettings = Setting.listSetting("foo.bar", Collections.emptyList(), Integer::parseInt, - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); List defaultValue = otherSettings.get(Settings.EMPTY); assertEquals(0, defaultValue.size()); List intValues = otherSettings.get(Settings.builder().put("foo.bar", "0,1,2,3").build()); @@ -326,7 +324,7 @@ public class SettingTests extends ESTestCase { } Setting> settingWithFallback = Setting.listSetting("foo.baz", listSetting, Function.identity(), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); value = settingWithFallback.get(Settings.EMPTY); assertEquals(1, value.size()); assertEquals("foo,bar", value.get(0)); @@ -349,7 +347,7 @@ public class SettingTests extends ESTestCase { public void testListSettingAcceptsNumberSyntax() { Setting> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(), - SettingsProperty.Dynamic, SettingsProperty.ClusterScope); + Property.Dynamic, Property.NodeScope); List input = Arrays.asList("test", "test1, test2", "test", ",,,,"); Settings.Builder builder = Settings.builder().putArray("foo.bar", input.toArray(new String[0])); // try to parse this really annoying format @@ -367,7 +365,7 @@ public class SettingTests extends ESTestCase { } public void testDynamicKeySetting() { - Setting setting = Setting.prefixKeySetting("foo.", "false", Boolean::parseBoolean, SettingsProperty.ClusterScope); + Setting setting = Setting.prefixKeySetting("foo.", "false", Boolean::parseBoolean, Property.NodeScope); assertTrue(setting.hasComplexMatcher()); assertTrue(setting.match("foo.bar")); assertFalse(setting.match("foo")); @@ -385,7 +383,7 @@ public class SettingTests extends ESTestCase { public void testAdfixKeySetting() { Setting setting = - Setting.adfixKeySetting("foo", "enable", "false", Boolean::parseBoolean, Setting.SettingsProperty.ClusterScope); + Setting.adfixKeySetting("foo", "enable", "false", Boolean::parseBoolean, Property.NodeScope); assertTrue(setting.hasComplexMatcher()); assertTrue(setting.match("foo.bar.enable")); assertTrue(setting.match("foo.baz.enable")); @@ -406,7 +404,7 @@ public class SettingTests extends ESTestCase { } public void testMinMaxInt() { - Setting integerSetting = Setting.intSetting("foo.bar", 1, 0, 10, SettingsProperty.ClusterScope); + Setting integerSetting = Setting.intSetting("foo.bar", 1, 0, 10, Property.NodeScope); try { integerSetting.get(Settings.builder().put("foo.bar", 11).build()); fail(); @@ -430,46 +428,21 @@ public class SettingTests extends ESTestCase { */ public void testMutuallyExclusiveScopes() { // Those should pass - Setting setting = Setting.simpleString("foo.bar", SettingsProperty.ClusterScope); - assertThat(setting.hasClusterScope(), is(true)); - assertThat(setting.hasNodeScope(), is(false)); - assertThat(setting.hasIndexScope(), is(false)); - setting = Setting.simpleString("foo.bar", SettingsProperty.NodeScope); + Setting setting = Setting.simpleString("foo.bar", Property.NodeScope); assertThat(setting.hasNodeScope(), is(true)); assertThat(setting.hasIndexScope(), is(false)); - assertThat(setting.hasClusterScope(), is(false)); - setting = Setting.simpleString("foo.bar", SettingsProperty.IndexScope); + setting = Setting.simpleString("foo.bar", Property.IndexScope); assertThat(setting.hasIndexScope(), is(true)); assertThat(setting.hasNodeScope(), is(false)); - assertThat(setting.hasClusterScope(), is(false)); // We test the default scope setting = Setting.simpleString("foo.bar"); assertThat(setting.hasNodeScope(), is(true)); assertThat(setting.hasIndexScope(), is(false)); - assertThat(setting.hasClusterScope(), is(false)); // Those should fail try { - Setting.simpleString("foo.bar", SettingsProperty.IndexScope, SettingsProperty.ClusterScope); - fail("Multiple scopes should fail"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("More than one scope has been added to the setting")); - } - try { - Setting.simpleString("foo.bar", SettingsProperty.IndexScope, SettingsProperty.NodeScope); - fail("Multiple scopes should fail"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("More than one scope has been added to the setting")); - } - try { - Setting.simpleString("foo.bar", SettingsProperty.ClusterScope, SettingsProperty.NodeScope); - fail("Multiple scopes should fail"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("More than one scope has been added to the setting")); - } - try { - Setting.simpleString("foo.bar", SettingsProperty.IndexScope, SettingsProperty.ClusterScope, SettingsProperty.NodeScope); + Setting.simpleString("foo.bar", Property.IndexScope, Property.NodeScope); fail("Multiple scopes should fail"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("More than one scope has been added to the setting")); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java index f5fd760297ba..a8b5824f8ec9 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.common.settings; import org.elasticsearch.common.inject.ModuleTestCase; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; public class SettingsModuleTests extends ModuleTestCase { @@ -46,13 +46,13 @@ public class SettingsModuleTests extends ModuleTestCase { { Settings settings = Settings.builder().put("some.custom.setting", "2.0").build(); SettingsModule module = new SettingsModule(settings); - module.registerSetting(Setting.floatSetting("some.custom.setting", 1.0f, SettingsProperty.ClusterScope)); + module.registerSetting(Setting.floatSetting("some.custom.setting", 1.0f, Property.NodeScope)); assertInstanceBinding(module, Settings.class, (s) -> s == settings); } { Settings settings = Settings.builder().put("some.custom.setting", "false").build(); SettingsModule module = new SettingsModule(settings); - module.registerSetting(Setting.floatSetting("some.custom.setting", 1.0f, SettingsProperty.ClusterScope)); + module.registerSetting(Setting.floatSetting("some.custom.setting", 1.0f, Property.NodeScope)); try { assertInstanceBinding(module, Settings.class, (s) -> s == settings); fail(); @@ -132,9 +132,9 @@ public class SettingsModuleTests extends ModuleTestCase { public void testRegisterSettingsFilter() { Settings settings = Settings.builder().put("foo.bar", "false").put("bar.foo", false).put("bar.baz", false).build(); SettingsModule module = new SettingsModule(settings); - module.registerSetting(Setting.boolSetting("foo.bar", true, SettingsProperty.ClusterScope)); - module.registerSetting(Setting.boolSetting("bar.foo", true, SettingsProperty.ClusterScope, SettingsProperty.Filtered)); - module.registerSetting(Setting.boolSetting("bar.baz", true, SettingsProperty.ClusterScope)); + module.registerSetting(Setting.boolSetting("foo.bar", true, Property.NodeScope)); + module.registerSetting(Setting.boolSetting("bar.foo", true, Property.NodeScope, Property.Filtered)); + module.registerSetting(Setting.boolSetting("bar.baz", true, Property.NodeScope)); module.registerSettingsFilter("foo.*"); try { diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 7da2b0aaa2d5..aa4fa81bc5bb 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -34,7 +34,7 @@ import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.env.Environment; @@ -195,9 +195,9 @@ public class IndexModuleTests extends ESTestCase { public void testListener() throws IOException { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, Property.Dynamic, Property.IndexScope); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings, booleanSetting), null, new AnalysisRegistry(null, environment)); - Setting booleanSetting2 = Setting.boolSetting("foo.bar.baz", false, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting booleanSetting2 = Setting.boolSetting("foo.bar.baz", false, Property.Dynamic, Property.IndexScope); AtomicBoolean atomicBoolean = new AtomicBoolean(false); module.addSettingsUpdateConsumer(booleanSetting, atomicBoolean::set); diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index 18af7e13f7d3..46d99e3b4bcb 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -46,7 +46,7 @@ public class IndexSettingsTests extends ESTestCase { Settings theSettings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build(); final AtomicInteger integer = new AtomicInteger(0); Setting integerSetting = Setting.intSetting("index.test.setting.int", -1, - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); IndexMetaData metaData = newIndexMeta("index", theSettings); IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, integerSetting); settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, integer::set); @@ -68,9 +68,9 @@ public class IndexSettingsTests extends ESTestCase { final AtomicInteger integer = new AtomicInteger(0); final StringBuilder builder = new StringBuilder(); Setting integerSetting = Setting.intSetting("index.test.setting.int", -1, - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); Setting notUpdated = new Setting<>("index.not.updated", "", Function.identity(), - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, integerSetting, notUpdated); settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, integer::set); @@ -132,7 +132,7 @@ public class IndexSettingsTests extends ESTestCase { Settings nodeSettings = Settings.settingsBuilder().put("index.foo.bar", 43).build(); final AtomicInteger indexValue = new AtomicInteger(0); - Setting integerSetting = Setting.intSetting("index.foo.bar", -1, SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Setting integerSetting = Setting.intSetting("index.foo.bar", -1, Property.Dynamic, Property.IndexScope); IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), nodeSettings, integerSetting); settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, indexValue::set); assertEquals(numReplicas, settings.getNumberOfReplicas()); diff --git a/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java b/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java index 666994e823a8..e9e8dcfc0073 100644 --- a/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java +++ b/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java @@ -21,7 +21,7 @@ package org.elasticsearch.index; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.plugins.Plugin; @@ -45,7 +45,7 @@ public class SettingsListenerIT extends ESIntegTestCase { public static class SettingsListenerPlugin extends Plugin { private final SettingsTestingService service = new SettingsTestingService(); private static final Setting SETTING = Setting.intSetting("index.test.new.setting", 0, - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); /** * The name of the plugin. */ @@ -96,7 +96,7 @@ public class SettingsListenerIT extends ESIntegTestCase { public static class SettingsTestingService { public volatile int value; public static Setting VALUE = Setting.intSetting("index.test.new.setting", -1, -1, - SettingsProperty.Dynamic, SettingsProperty.IndexScope); + Property.Dynamic, Property.IndexScope); public void setValue(int value) { this.value = value; diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index 9f9a075ea28a..aaac9c0cca4b 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -47,7 +47,7 @@ import org.elasticsearch.action.suggest.SuggestRequestBuilder; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.index.IndexNotFoundException; @@ -644,11 +644,11 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { } private static final Setting INDEX_A = - new Setting<>("index.a", "", Function.identity(), SettingsProperty.Dynamic, SettingsProperty.IndexScope); + new Setting<>("index.a", "", Function.identity(), Property.Dynamic, Property.IndexScope); private static final Setting INDEX_C = - new Setting<>("index.c", "", Function.identity(), SettingsProperty.Dynamic, SettingsProperty.IndexScope); + new Setting<>("index.c", "", Function.identity(), Property.Dynamic, Property.IndexScope); private static final Setting INDEX_E = - new Setting<>("index.e", "", Function.identity(), SettingsProperty.IndexScope); + new Setting<>("index.e", "", Function.identity(), Property.IndexScope); public void onModule(SettingsModule module) { diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index e025246a2f0a..6bea3217894c 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -31,7 +31,7 @@ import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.client.Requests; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.unit.TimeValue; @@ -199,9 +199,9 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase { public static class RandomExceptionDirectoryReaderWrapper extends MockEngineSupport.DirectoryReaderWrapper { public static final Setting EXCEPTION_TOP_LEVEL_RATIO_SETTING = - Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, SettingsProperty.IndexScope); + Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, Property.IndexScope); public static final Setting EXCEPTION_LOW_LEVEL_RATIO_SETTING = - Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, SettingsProperty.IndexScope); + Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, Property.IndexScope); public static class TestPlugin extends Plugin { @Override public String name() { diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java index f53ca39941a4..489a604292d9 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java @@ -29,7 +29,7 @@ import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.common.settings.SettingsModule; @@ -154,9 +154,9 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase { public static class TestPlugin extends Plugin { public static final Setting EXCEPTION_TOP_LEVEL_RATIO_SETTING = - Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, SettingsProperty.IndexScope); + Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, Property.IndexScope); public static final Setting EXCEPTION_LOW_LEVEL_RATIO_SETTING = - Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, SettingsProperty.IndexScope); + Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, Property.IndexScope); @Override public String name() { return "random-exception-reader-wrapper"; diff --git a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java index 8669a38087ee..0dcc345f01b8 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java +++ b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java @@ -31,7 +31,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.env.Environment; @@ -60,9 +60,9 @@ public class MockRepository extends FsRepository { public static class Plugin extends org.elasticsearch.plugins.Plugin { - public static final Setting USERNAME_SETTING = Setting.simpleString("secret.mock.username", SettingsProperty.ClusterScope); + public static final Setting USERNAME_SETTING = Setting.simpleString("secret.mock.username", Property.NodeScope); public static final Setting PASSWORD_SETTING = - Setting.simpleString("secret.mock.password", SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("secret.mock.password", Property.NodeScope, Property.Filtered); @Override public String name() { diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java index a59510b8873c..acc1e76bde40 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java @@ -22,7 +22,7 @@ package org.elasticsearch.cloud.azure.management; import com.microsoft.windowsazure.core.utils.KeyStoreType; import com.microsoft.windowsazure.management.compute.models.HostedServiceGetDetailedResponse; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.azure.AzureUnicastHostsProvider; @@ -30,28 +30,28 @@ public interface AzureComputeService { final class Management { public static final Setting SUBSCRIPTION_ID_SETTING = - Setting.simpleString("cloud.azure.management.subscription.id", SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("cloud.azure.management.subscription.id", Property.NodeScope, Property.Filtered); public static final Setting SERVICE_NAME_SETTING = - Setting.simpleString("cloud.azure.management.cloud.service.name", SettingsProperty.ClusterScope); + Setting.simpleString("cloud.azure.management.cloud.service.name", Property.NodeScope); // Keystore settings public static final Setting KEYSTORE_PATH_SETTING = - Setting.simpleString("cloud.azure.management.keystore.path", SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("cloud.azure.management.keystore.path", Property.NodeScope, Property.Filtered); public static final Setting KEYSTORE_PASSWORD_SETTING = - Setting.simpleString("cloud.azure.management.keystore.password", SettingsProperty.ClusterScope, - SettingsProperty.Filtered); + Setting.simpleString("cloud.azure.management.keystore.password", Property.NodeScope, + Property.Filtered); public static final Setting KEYSTORE_TYPE_SETTING = new Setting<>("cloud.azure.management.keystore.type", KeyStoreType.pkcs12.name(), KeyStoreType::fromString, - SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Property.NodeScope, Property.Filtered); } final class Discovery { public static final Setting REFRESH_SETTING = - Setting.positiveTimeSetting("discovery.azure.refresh_interval", TimeValue.timeValueSeconds(0), SettingsProperty.ClusterScope); + Setting.positiveTimeSetting("discovery.azure.refresh_interval", TimeValue.timeValueSeconds(0), Property.NodeScope); public static final Setting HOST_TYPE_SETTING = new Setting<>("discovery.azure.host.type", AzureUnicastHostsProvider.HostType.PRIVATE_IP.name(), - AzureUnicastHostsProvider.HostType::fromString, SettingsProperty.ClusterScope); + AzureUnicastHostsProvider.HostType::fromString, Property.NodeScope); public static final String ENDPOINT_NAME = "discovery.azure.endpoint.name"; public static final String DEPLOYMENT_NAME = "discovery.azure.deployment.name"; diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java index 2f2c423afb78..8cfe6c431085 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java @@ -22,7 +22,7 @@ package org.elasticsearch.cloud.aws; import com.amazonaws.Protocol; import com.amazonaws.services.ec2.AmazonEC2; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -33,7 +33,7 @@ import java.util.Locale; import java.util.function.Function; public interface AwsEc2Service { - Setting AUTO_ATTRIBUTE_SETTING = Setting.boolSetting("cloud.node.auto_attributes", false, SettingsProperty.ClusterScope); + Setting AUTO_ATTRIBUTE_SETTING = Setting.boolSetting("cloud.node.auto_attributes", false, Property.NodeScope); // Global AWS settings (shared between discovery-ec2 and repository-s3) // Each setting starting with `cloud.aws` also exists in repository-s3 project. Don't forget to update @@ -42,43 +42,43 @@ public interface AwsEc2Service { * cloud.aws.access_key: AWS Access key. Shared with repository-s3 plugin */ Setting KEY_SETTING = - Setting.simpleString("cloud.aws.access_key", SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("cloud.aws.access_key", Property.NodeScope, Property.Filtered); /** * cloud.aws.secret_key: AWS Secret key. Shared with repository-s3 plugin */ Setting SECRET_SETTING = - Setting.simpleString("cloud.aws.secret_key", SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("cloud.aws.secret_key", Property.NodeScope, Property.Filtered); /** * cloud.aws.protocol: Protocol for AWS API: http or https. Defaults to https. Shared with repository-s3 plugin */ Setting PROTOCOL_SETTING = new Setting<>("cloud.aws.protocol", "https", s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), - SettingsProperty.ClusterScope); + Property.NodeScope); /** * cloud.aws.proxy.host: In case of proxy, define its hostname/IP. Shared with repository-s3 plugin */ - Setting PROXY_HOST_SETTING = Setting.simpleString("cloud.aws.proxy.host", SettingsProperty.ClusterScope); + Setting PROXY_HOST_SETTING = Setting.simpleString("cloud.aws.proxy.host", Property.NodeScope); /** * cloud.aws.proxy.port: In case of proxy, define its port. Defaults to 80. Shared with repository-s3 plugin */ - Setting PROXY_PORT_SETTING = Setting.intSetting("cloud.aws.proxy.port", 80, 0, 1<<16, SettingsProperty.ClusterScope); + Setting PROXY_PORT_SETTING = Setting.intSetting("cloud.aws.proxy.port", 80, 0, 1<<16, Property.NodeScope); /** * cloud.aws.proxy.username: In case of proxy with auth, define the username. Shared with repository-s3 plugin */ - Setting PROXY_USERNAME_SETTING = Setting.simpleString("cloud.aws.proxy.username", SettingsProperty.ClusterScope); + Setting PROXY_USERNAME_SETTING = Setting.simpleString("cloud.aws.proxy.username", Property.NodeScope); /** * cloud.aws.proxy.password: In case of proxy with auth, define the password. Shared with repository-s3 plugin */ Setting PROXY_PASSWORD_SETTING = - Setting.simpleString("cloud.aws.proxy.password", SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("cloud.aws.proxy.password", Property.NodeScope, Property.Filtered); /** * cloud.aws.signer: If you are using an old AWS API version, you can define a Signer. Shared with repository-s3 plugin */ - Setting SIGNER_SETTING = Setting.simpleString("cloud.aws.signer", SettingsProperty.ClusterScope); + Setting SIGNER_SETTING = Setting.simpleString("cloud.aws.signer", Property.NodeScope); /** * cloud.aws.region: Region. Shared with repository-s3 plugin */ Setting REGION_SETTING = - new Setting<>("cloud.aws.region", "", s -> s.toLowerCase(Locale.ROOT), SettingsProperty.ClusterScope); + new Setting<>("cloud.aws.region", "", s -> s.toLowerCase(Locale.ROOT), Property.NodeScope); /** * Defines specific ec2 settings starting with cloud.aws.ec2. @@ -89,62 +89,62 @@ public interface AwsEc2Service { * @see AwsEc2Service#KEY_SETTING */ Setting KEY_SETTING = new Setting<>("cloud.aws.ec2.access_key", AwsEc2Service.KEY_SETTING, Function.identity(), - SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Property.NodeScope, Property.Filtered); /** * cloud.aws.ec2.secret_key: AWS Secret key specific for EC2 API calls. Defaults to cloud.aws.secret_key. * @see AwsEc2Service#SECRET_SETTING */ Setting SECRET_SETTING = new Setting<>("cloud.aws.ec2.secret_key", AwsEc2Service.SECRET_SETTING, Function.identity(), - SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Property.NodeScope, Property.Filtered); /** * cloud.aws.ec2.protocol: Protocol for AWS API specific for EC2 API calls: http or https. Defaults to cloud.aws.protocol. * @see AwsEc2Service#PROTOCOL_SETTING */ Setting PROTOCOL_SETTING = new Setting<>("cloud.aws.ec2.protocol", AwsEc2Service.PROTOCOL_SETTING, - s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), SettingsProperty.ClusterScope); + s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope); /** * cloud.aws.ec2.proxy.host: In case of proxy, define its hostname/IP specific for EC2 API calls. Defaults to cloud.aws.proxy.host. * @see AwsEc2Service#PROXY_HOST_SETTING */ Setting PROXY_HOST_SETTING = new Setting<>("cloud.aws.ec2.proxy.host", AwsEc2Service.PROXY_HOST_SETTING, - Function.identity(), SettingsProperty.ClusterScope); + Function.identity(), Property.NodeScope); /** * cloud.aws.ec2.proxy.port: In case of proxy, define its port specific for EC2 API calls. Defaults to cloud.aws.proxy.port. * @see AwsEc2Service#PROXY_PORT_SETTING */ Setting PROXY_PORT_SETTING = new Setting<>("cloud.aws.ec2.proxy.port", AwsEc2Service.PROXY_PORT_SETTING, - s -> Setting.parseInt(s, 0, 1<<16, "cloud.aws.ec2.proxy.port"), SettingsProperty.ClusterScope); + s -> Setting.parseInt(s, 0, 1<<16, "cloud.aws.ec2.proxy.port"), Property.NodeScope); /** * cloud.aws.ec2.proxy.username: In case of proxy with auth, define the username specific for EC2 API calls. * Defaults to cloud.aws.proxy.username. * @see AwsEc2Service#PROXY_USERNAME_SETTING */ Setting PROXY_USERNAME_SETTING = new Setting<>("cloud.aws.ec2.proxy.username", AwsEc2Service.PROXY_USERNAME_SETTING, - Function.identity(), SettingsProperty.ClusterScope); + Function.identity(), Property.NodeScope); /** * cloud.aws.ec2.proxy.password: In case of proxy with auth, define the password specific for EC2 API calls. * Defaults to cloud.aws.proxy.password. * @see AwsEc2Service#PROXY_PASSWORD_SETTING */ Setting PROXY_PASSWORD_SETTING = new Setting<>("cloud.aws.ec2.proxy.password", AwsEc2Service.PROXY_PASSWORD_SETTING, - Function.identity(), SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Function.identity(), Property.NodeScope, Property.Filtered); /** * cloud.aws.ec2.signer: If you are using an old AWS API version, you can define a Signer. Specific for EC2 API calls. * Defaults to cloud.aws.signer. * @see AwsEc2Service#SIGNER_SETTING */ Setting SIGNER_SETTING = new Setting<>("cloud.aws.ec2.signer", AwsEc2Service.SIGNER_SETTING, Function.identity(), - SettingsProperty.ClusterScope); + Property.NodeScope); /** * cloud.aws.ec2.region: Region specific for EC2 API calls. Defaults to cloud.aws.region. * @see AwsEc2Service#REGION_SETTING */ Setting REGION_SETTING = new Setting<>("cloud.aws.ec2.region", AwsEc2Service.REGION_SETTING, - s -> s.toLowerCase(Locale.ROOT), SettingsProperty.ClusterScope); + s -> s.toLowerCase(Locale.ROOT), Property.NodeScope); /** * cloud.aws.ec2.endpoint: Endpoint. If not set, endpoint will be guessed based on region setting. */ - Setting ENDPOINT_SETTING = Setting.simpleString("cloud.aws.ec2.endpoint", SettingsProperty.ClusterScope); + Setting ENDPOINT_SETTING = Setting.simpleString("cloud.aws.ec2.endpoint", Property.NodeScope); } /** @@ -164,31 +164,31 @@ public interface AwsEc2Service { */ Setting HOST_TYPE_SETTING = new Setting<>("discovery.ec2.host_type", HostType.PRIVATE_IP.name(), s -> HostType.valueOf(s.toUpperCase(Locale.ROOT)), - SettingsProperty.ClusterScope); + Property.NodeScope); /** * discovery.ec2.any_group: If set to false, will require all security groups to be present for the instance to be used for the * discovery. Defaults to true. */ Setting ANY_GROUP_SETTING = - Setting.boolSetting("discovery.ec2.any_group", true, SettingsProperty.ClusterScope); + Setting.boolSetting("discovery.ec2.any_group", true, Property.NodeScope); /** * discovery.ec2.groups: Either a comma separated list or array based list of (security) groups. Only instances with the provided * security groups will be used in the cluster discovery. (NOTE: You could provide either group NAME or group ID.) */ Setting> GROUPS_SETTING = - Setting.listSetting("discovery.ec2.groups", new ArrayList<>(), s -> s.toString(), SettingsProperty.ClusterScope); + Setting.listSetting("discovery.ec2.groups", new ArrayList<>(), s -> s.toString(), Property.NodeScope); /** * discovery.ec2.availability_zones: Either a comma separated list or array based list of availability zones. Only instances within * the provided availability zones will be used in the cluster discovery. */ Setting> AVAILABILITY_ZONES_SETTING = Setting.listSetting("discovery.ec2.availability_zones", Collections.emptyList(), s -> s.toString(), - SettingsProperty.ClusterScope); + Property.NodeScope); /** * discovery.ec2.node_cache_time: How long the list of hosts is cached to prevent further requests to the AWS API. Defaults to 10s. */ Setting NODE_CACHE_TIME_SETTING = - Setting.timeSetting("discovery.ec2.node_cache_time", TimeValue.timeValueSeconds(10), SettingsProperty.ClusterScope); + Setting.timeSetting("discovery.ec2.node_cache_time", TimeValue.timeValueSeconds(10), Property.NodeScope); /** * discovery.ec2.tag.*: The ec2 discovery can filter machines to include in the cluster based on tags (and not just groups). @@ -196,7 +196,7 @@ public interface AwsEc2Service { * instances with a tag key set to stage, and a value of dev. Several tags set will require all of those tags to be set for the * instance to be included. */ - Setting TAG_SETTING = Setting.groupSetting("discovery.ec2.tag.", SettingsProperty.ClusterScope); + Setting TAG_SETTING = Setting.groupSetting("discovery.ec2.tag.", Property.NodeScope); } AmazonEC2 client(); diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java index 7a46768e5a04..a6faa390e5d9 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java @@ -22,7 +22,7 @@ package org.elasticsearch.cloud.gce; import com.google.api.services.compute.model.Instance; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.TimeValue; import java.io.IOException; @@ -43,25 +43,25 @@ public interface GceComputeService extends LifecycleComponent /** * cloud.gce.project_id: Google project id */ - Setting PROJECT_SETTING = Setting.simpleString("cloud.gce.project_id", SettingsProperty.ClusterScope); + Setting PROJECT_SETTING = Setting.simpleString("cloud.gce.project_id", Property.NodeScope); /** * cloud.gce.zone: Google Compute Engine zones */ Setting> ZONE_SETTING = - Setting.listSetting("cloud.gce.zone", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); + Setting.listSetting("cloud.gce.zone", Collections.emptyList(), Function.identity(), Property.NodeScope); /** * cloud.gce.refresh_interval: How long the list of hosts is cached to prevent further requests to the AWS API. 0 disables caching. * A negative value will cause infinite caching. Defaults to 0s. */ Setting REFRESH_SETTING = - Setting.timeSetting("cloud.gce.refresh_interval", TimeValue.timeValueSeconds(0), SettingsProperty.ClusterScope); + Setting.timeSetting("cloud.gce.refresh_interval", TimeValue.timeValueSeconds(0), Property.NodeScope); /** * cloud.gce.retry: Should we retry calling GCE API in case of error? Defaults to true. */ - Setting RETRY_SETTING = Setting.boolSetting("cloud.gce.retry", true, SettingsProperty.ClusterScope); + Setting RETRY_SETTING = Setting.boolSetting("cloud.gce.retry", true, Property.NodeScope); /** * cloud.gce.max_wait: How long exponential backoff should retry before definitely failing. @@ -69,7 +69,7 @@ public interface GceComputeService extends LifecycleComponent * A negative value will retry indefinitely. Defaults to `-1s` (retry indefinitely). */ Setting MAX_WAIT_SETTING = - Setting.timeSetting("cloud.gce.max_wait", TimeValue.timeValueSeconds(-1), SettingsProperty.ClusterScope); + Setting.timeSetting("cloud.gce.max_wait", TimeValue.timeValueSeconds(-1), Property.NodeScope); /** * Return a collection of running instances within the same GCE project diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java index b0cfeb16c516..85e0910736f3 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java @@ -38,14 +38,13 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.gce.RetryHttpInitializerWrapper; import java.io.IOException; import java.net.URL; -import java.nio.file.Files; import java.security.AccessController; import java.security.GeneralSecurityException; import java.security.PrivilegedAction; @@ -62,11 +61,11 @@ public class GceComputeServiceImpl extends AbstractLifecycleComponent GCE_VALIDATE_CERTIFICATES = - Setting.boolSetting("cloud.gce.validate_certificates", true, SettingsProperty.ClusterScope); + Setting.boolSetting("cloud.gce.validate_certificates", true, Property.NodeScope); public static final Setting GCE_HOST = - new Setting<>("cloud.gce.host", "http://metadata.google.internal", Function.identity(), SettingsProperty.ClusterScope); + new Setting<>("cloud.gce.host", "http://metadata.google.internal", Function.identity(), Property.NodeScope); public static final Setting GCE_ROOT_URL = - new Setting<>("cloud.gce.root_url", "https://www.googleapis.com", Function.identity(), SettingsProperty.ClusterScope); + new Setting<>("cloud.gce.root_url", "https://www.googleapis.com", Function.identity(), Property.NodeScope); private final String project; private final List zones; diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java index d162ad8894c3..85f3e3a9585b 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java @@ -31,7 +31,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; @@ -55,7 +55,7 @@ public class GceUnicastHostsProvider extends AbstractComponent implements Unicas * discovery.gce.tags: The gce discovery can filter machines to include in the cluster based on tags. */ public static final Setting> TAGS_SETTING = - Setting.listSetting("discovery.gce.tags", Collections.emptyList(), Function.identity(), SettingsProperty.ClusterScope); + Setting.listSetting("discovery.gce.tags", Collections.emptyList(), Function.identity(), Property.NodeScope); static final class Status { private static final String TERMINATED = "TERMINATED"; diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java index dd721f77ea02..cf5a0cf41d73 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java @@ -28,7 +28,7 @@ import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -73,11 +73,11 @@ public class AttachmentMapper extends FieldMapper { private static ESLogger logger = ESLoggerFactory.getLogger("mapper.attachment"); public static final Setting INDEX_ATTACHMENT_IGNORE_ERRORS_SETTING = - Setting.boolSetting("index.mapping.attachment.ignore_errors", true, SettingsProperty.IndexScope); + Setting.boolSetting("index.mapping.attachment.ignore_errors", true, Property.IndexScope); public static final Setting INDEX_ATTACHMENT_DETECT_LANGUAGE_SETTING = - Setting.boolSetting("index.mapping.attachment.detect_language", false, SettingsProperty.IndexScope); + Setting.boolSetting("index.mapping.attachment.detect_language", false, Property.IndexScope); public static final Setting INDEX_ATTACHMENT_INDEXED_CHARS_SETTING = - Setting.intSetting("index.mapping.attachment.indexed_chars", 100000, SettingsProperty.IndexScope); + Setting.intSetting("index.mapping.attachment.indexed_chars", 100000, Property.IndexScope); public static final String CONTENT_TYPE = "attachment"; diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java index f9509bea57a4..01d66c177a26 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java @@ -24,7 +24,7 @@ import com.microsoft.azure.storage.StorageException; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -42,19 +42,19 @@ public interface AzureStorageService { final class Storage { public static final String PREFIX = "cloud.azure.storage."; public static final Setting TIMEOUT_SETTING = - Setting.timeSetting("cloud.azure.storage.timeout", TimeValue.timeValueMinutes(-1), SettingsProperty.ClusterScope); + Setting.timeSetting("cloud.azure.storage.timeout", TimeValue.timeValueMinutes(-1), Property.NodeScope); public static final Setting ACCOUNT_SETTING = - Setting.simpleString("repositories.azure.account", SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("repositories.azure.account", Property.NodeScope, Property.Filtered); public static final Setting CONTAINER_SETTING = - Setting.simpleString("repositories.azure.container", SettingsProperty.ClusterScope); + Setting.simpleString("repositories.azure.container", Property.NodeScope); public static final Setting BASE_PATH_SETTING = - Setting.simpleString("repositories.azure.base_path", SettingsProperty.ClusterScope); + Setting.simpleString("repositories.azure.base_path", Property.NodeScope); public static final Setting LOCATION_MODE_SETTING = - Setting.simpleString("repositories.azure.location_mode", SettingsProperty.ClusterScope); + Setting.simpleString("repositories.azure.location_mode", Property.NodeScope); public static final Setting CHUNK_SIZE_SETTING = - Setting.byteSizeSetting("repositories.azure.chunk_size", new ByteSizeValue(-1), SettingsProperty.ClusterScope); + Setting.byteSizeSetting("repositories.azure.chunk_size", new ByteSizeValue(-1), Property.NodeScope); public static final Setting COMPRESS_SETTING = - Setting.boolSetting("repositories.azure.compress", false, SettingsProperty.ClusterScope); + Setting.boolSetting("repositories.azure.compress", false, Property.NodeScope); } boolean doesContainerExist(String account, LocationMode mode, String container); diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java index 06185845ffb9..281ef79cb271 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java @@ -46,13 +46,13 @@ public final class AzureStorageSettings { TIMEOUT_KEY, (s) -> Storage.TIMEOUT_SETTING.get(s).toString(), (s) -> Setting.parseTimeValue(s, TimeValue.timeValueSeconds(-1), TIMEOUT_KEY.toString()), - Setting.SettingsProperty.ClusterScope); + Setting.Property.NodeScope); private static final Setting ACCOUNT_SETTING = - Setting.adfixKeySetting(Storage.PREFIX, ACCOUNT_SUFFIX, "", Function.identity(), Setting.SettingsProperty.ClusterScope); + Setting.adfixKeySetting(Storage.PREFIX, ACCOUNT_SUFFIX, "", Function.identity(), Setting.Property.NodeScope); private static final Setting KEY_SETTING = - Setting.adfixKeySetting(Storage.PREFIX, KEY_SUFFIX, "", Function.identity(), Setting.SettingsProperty.ClusterScope); + Setting.adfixKeySetting(Storage.PREFIX, KEY_SUFFIX, "", Function.identity(), Setting.Property.NodeScope); private static final Setting DEFAULT_SETTING = - Setting.adfixKeySetting(Storage.PREFIX, DEFAULT_SUFFIX, "false", Boolean::valueOf, Setting.SettingsProperty.ClusterScope); + Setting.adfixKeySetting(Storage.PREFIX, DEFAULT_SUFFIX, "false", Boolean::valueOf, Setting.Property.NodeScope); private final String name; @@ -112,7 +112,7 @@ public final class AzureStorageSettings { } private static List createStorageSettings(Settings settings) { - Setting storageGroupSetting = Setting.groupSetting(Storage.PREFIX, Setting.SettingsProperty.ClusterScope); + Setting storageGroupSetting = Setting.groupSetting(Storage.PREFIX, Setting.Property.NodeScope); // ignore global timeout which has the same prefix but does not belong to any group Settings groups = storageGroupSetting.get(settings.filter((k) -> k.equals(Storage.TIMEOUT_SETTING.getKey()) == false)); List storageSettings = new ArrayList<>(); diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index 56b2d9fc2536..66db57fdd92d 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -30,7 +30,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -68,14 +68,14 @@ public class AzureRepository extends BlobStoreRepository { public final static String TYPE = "azure"; public static final class Repository { - public static final Setting ACCOUNT_SETTING = Setting.simpleString("account", SettingsProperty.ClusterScope); + public static final Setting ACCOUNT_SETTING = Setting.simpleString("account", Property.NodeScope); public static final Setting CONTAINER_SETTING = - new Setting<>("container", "elasticsearch-snapshots", Function.identity(), SettingsProperty.ClusterScope); - public static final Setting BASE_PATH_SETTING = Setting.simpleString("base_path", SettingsProperty.ClusterScope); - public static final Setting LOCATION_MODE_SETTING = Setting.simpleString("location_mode", SettingsProperty.ClusterScope); + new Setting<>("container", "elasticsearch-snapshots", Function.identity(), Property.NodeScope); + public static final Setting BASE_PATH_SETTING = Setting.simpleString("base_path", Property.NodeScope); + public static final Setting LOCATION_MODE_SETTING = Setting.simpleString("location_mode", Property.NodeScope); public static final Setting CHUNK_SIZE_SETTING = - Setting.byteSizeSetting("chunk_size", MAX_CHUNK_SIZE, SettingsProperty.ClusterScope); - public static final Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("chunk_size", MAX_CHUNK_SIZE, Property.NodeScope); + public static final Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, Property.NodeScope); } private final AzureBlobStore blobStore; diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java index 6f18bd3e6fd3..427c454fa280 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java @@ -23,7 +23,7 @@ import com.amazonaws.Protocol; import com.amazonaws.services.s3.AmazonS3; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import java.util.Locale; import java.util.function.Function; @@ -40,43 +40,43 @@ public interface AwsS3Service extends LifecycleComponent { * cloud.aws.access_key: AWS Access key. Shared with discovery-ec2 plugin */ Setting KEY_SETTING = - Setting.simpleString("cloud.aws.access_key", SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("cloud.aws.access_key", Property.NodeScope, Property.Filtered); /** * cloud.aws.secret_key: AWS Secret key. Shared with discovery-ec2 plugin */ Setting SECRET_SETTING = - Setting.simpleString("cloud.aws.secret_key", SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("cloud.aws.secret_key", Property.NodeScope, Property.Filtered); /** * cloud.aws.protocol: Protocol for AWS API: http or https. Defaults to https. Shared with discovery-ec2 plugin */ Setting PROTOCOL_SETTING = new Setting<>("cloud.aws.protocol", "https", s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), - SettingsProperty.ClusterScope); + Property.NodeScope); /** * cloud.aws.proxy.host: In case of proxy, define its hostname/IP. Shared with discovery-ec2 plugin */ - Setting PROXY_HOST_SETTING = Setting.simpleString("cloud.aws.proxy.host", SettingsProperty.ClusterScope); + Setting PROXY_HOST_SETTING = Setting.simpleString("cloud.aws.proxy.host", Property.NodeScope); /** * cloud.aws.proxy.port: In case of proxy, define its port. Defaults to 80. Shared with discovery-ec2 plugin */ - Setting PROXY_PORT_SETTING = Setting.intSetting("cloud.aws.proxy.port", 80, 0, 1<<16, SettingsProperty.ClusterScope); + Setting PROXY_PORT_SETTING = Setting.intSetting("cloud.aws.proxy.port", 80, 0, 1<<16, Property.NodeScope); /** * cloud.aws.proxy.username: In case of proxy with auth, define the username. Shared with discovery-ec2 plugin */ - Setting PROXY_USERNAME_SETTING = Setting.simpleString("cloud.aws.proxy.username", SettingsProperty.ClusterScope); + Setting PROXY_USERNAME_SETTING = Setting.simpleString("cloud.aws.proxy.username", Property.NodeScope); /** * cloud.aws.proxy.password: In case of proxy with auth, define the password. Shared with discovery-ec2 plugin */ Setting PROXY_PASSWORD_SETTING = - Setting.simpleString("cloud.aws.proxy.password", SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting.simpleString("cloud.aws.proxy.password", Property.NodeScope, Property.Filtered); /** * cloud.aws.signer: If you are using an old AWS API version, you can define a Signer. Shared with discovery-ec2 plugin */ - Setting SIGNER_SETTING = Setting.simpleString("cloud.aws.signer", SettingsProperty.ClusterScope); + Setting SIGNER_SETTING = Setting.simpleString("cloud.aws.signer", Property.NodeScope); /** * cloud.aws.region: Region. Shared with discovery-ec2 plugin */ Setting REGION_SETTING = - new Setting<>("cloud.aws.region", "", s -> s.toLowerCase(Locale.ROOT), SettingsProperty.ClusterScope); + new Setting<>("cloud.aws.region", "", s -> s.toLowerCase(Locale.ROOT), Property.NodeScope); /** * Defines specific s3 settings starting with cloud.aws.s3. @@ -88,35 +88,35 @@ public interface AwsS3Service extends LifecycleComponent { */ Setting KEY_SETTING = new Setting<>("cloud.aws.s3.access_key", AwsS3Service.KEY_SETTING, Function.identity(), - SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Property.NodeScope, Property.Filtered); /** * cloud.aws.s3.secret_key: AWS Secret key specific for S3 API calls. Defaults to cloud.aws.secret_key. * @see AwsS3Service#SECRET_SETTING */ Setting SECRET_SETTING = new Setting<>("cloud.aws.s3.secret_key", AwsS3Service.SECRET_SETTING, Function.identity(), - SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Property.NodeScope, Property.Filtered); /** * cloud.aws.s3.protocol: Protocol for AWS API specific for S3 API calls: http or https. Defaults to cloud.aws.protocol. * @see AwsS3Service#PROTOCOL_SETTING */ Setting PROTOCOL_SETTING = new Setting<>("cloud.aws.s3.protocol", AwsS3Service.PROTOCOL_SETTING, s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), - SettingsProperty.ClusterScope); + Property.NodeScope); /** * cloud.aws.s3.proxy.host: In case of proxy, define its hostname/IP specific for S3 API calls. Defaults to cloud.aws.proxy.host. * @see AwsS3Service#PROXY_HOST_SETTING */ Setting PROXY_HOST_SETTING = new Setting<>("cloud.aws.s3.proxy.host", AwsS3Service.PROXY_HOST_SETTING, Function.identity(), - SettingsProperty.ClusterScope); + Property.NodeScope); /** * cloud.aws.s3.proxy.port: In case of proxy, define its port specific for S3 API calls. Defaults to cloud.aws.proxy.port. * @see AwsS3Service#PROXY_PORT_SETTING */ Setting PROXY_PORT_SETTING = new Setting<>("cloud.aws.s3.proxy.port", AwsS3Service.PROXY_PORT_SETTING, - s -> Setting.parseInt(s, 0, 1<<16, "cloud.aws.s3.proxy.port"), SettingsProperty.ClusterScope); + s -> Setting.parseInt(s, 0, 1<<16, "cloud.aws.s3.proxy.port"), Property.NodeScope); /** * cloud.aws.s3.proxy.username: In case of proxy with auth, define the username specific for S3 API calls. * Defaults to cloud.aws.proxy.username. @@ -124,7 +124,7 @@ public interface AwsS3Service extends LifecycleComponent { */ Setting PROXY_USERNAME_SETTING = new Setting<>("cloud.aws.s3.proxy.username", AwsS3Service.PROXY_USERNAME_SETTING, Function.identity(), - SettingsProperty.ClusterScope); + Property.NodeScope); /** * cloud.aws.s3.proxy.password: In case of proxy with auth, define the password specific for S3 API calls. * Defaults to cloud.aws.proxy.password. @@ -132,25 +132,25 @@ public interface AwsS3Service extends LifecycleComponent { */ Setting PROXY_PASSWORD_SETTING = new Setting<>("cloud.aws.s3.proxy.password", AwsS3Service.PROXY_PASSWORD_SETTING, Function.identity(), - SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Property.NodeScope, Property.Filtered); /** * cloud.aws.s3.signer: If you are using an old AWS API version, you can define a Signer. Specific for S3 API calls. * Defaults to cloud.aws.signer. * @see AwsS3Service#SIGNER_SETTING */ Setting SIGNER_SETTING = - new Setting<>("cloud.aws.s3.signer", AwsS3Service.SIGNER_SETTING, Function.identity(), SettingsProperty.ClusterScope); + new Setting<>("cloud.aws.s3.signer", AwsS3Service.SIGNER_SETTING, Function.identity(), Property.NodeScope); /** * cloud.aws.s3.region: Region specific for S3 API calls. Defaults to cloud.aws.region. * @see AwsS3Service#REGION_SETTING */ Setting REGION_SETTING = new Setting<>("cloud.aws.s3.region", AwsS3Service.REGION_SETTING, s -> s.toLowerCase(Locale.ROOT), - SettingsProperty.ClusterScope); + Property.NodeScope); /** * cloud.aws.s3.endpoint: Endpoint. If not set, endpoint will be guessed based on region setting. */ - Setting ENDPOINT_SETTING = Setting.simpleString("cloud.aws.s3.endpoint", SettingsProperty.ClusterScope); + Setting ENDPOINT_SETTING = Setting.simpleString("cloud.aws.s3.endpoint", Property.NodeScope); } AmazonS3 client(String endpoint, Protocol protocol, String region, String account, String key, Integer maxRetries); diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index dc0915cd2769..fde774a6b92e 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -28,7 +28,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.snapshots.IndexShardRepository; @@ -67,41 +67,41 @@ public class S3Repository extends BlobStoreRepository { * @see CLOUD_S3#KEY_SETTING */ Setting KEY_SETTING = - new Setting<>("repositories.s3.access_key", CLOUD_S3.KEY_SETTING, Function.identity(), SettingsProperty.ClusterScope); + new Setting<>("repositories.s3.access_key", CLOUD_S3.KEY_SETTING, Function.identity(), Property.NodeScope); /** * repositories.s3.secret_key: AWS Secret key specific for all S3 Repositories API calls. Defaults to cloud.aws.s3.secret_key. * @see CLOUD_S3#SECRET_SETTING */ Setting SECRET_SETTING = - new Setting<>("repositories.s3.secret_key", CLOUD_S3.SECRET_SETTING, Function.identity(), SettingsProperty.ClusterScope); + new Setting<>("repositories.s3.secret_key", CLOUD_S3.SECRET_SETTING, Function.identity(), Property.NodeScope); /** * repositories.s3.region: Region specific for all S3 Repositories API calls. Defaults to cloud.aws.s3.region. * @see CLOUD_S3#REGION_SETTING */ Setting REGION_SETTING = - new Setting<>("repositories.s3.region", CLOUD_S3.REGION_SETTING, s -> s.toLowerCase(Locale.ROOT), SettingsProperty.ClusterScope); + new Setting<>("repositories.s3.region", CLOUD_S3.REGION_SETTING, s -> s.toLowerCase(Locale.ROOT), Property.NodeScope); /** * repositories.s3.endpoint: Endpoint specific for all S3 Repositories API calls. Defaults to cloud.aws.s3.endpoint. * @see CLOUD_S3#ENDPOINT_SETTING */ Setting ENDPOINT_SETTING = - new Setting<>("repositories.s3.endpoint", CLOUD_S3.ENDPOINT_SETTING, s -> s.toLowerCase(Locale.ROOT), SettingsProperty.ClusterScope); + new Setting<>("repositories.s3.endpoint", CLOUD_S3.ENDPOINT_SETTING, s -> s.toLowerCase(Locale.ROOT), Property.NodeScope); /** * repositories.s3.protocol: Protocol specific for all S3 Repositories API calls. Defaults to cloud.aws.s3.protocol. * @see CLOUD_S3#PROTOCOL_SETTING */ Setting PROTOCOL_SETTING = - new Setting<>("repositories.s3.protocol", CLOUD_S3.PROTOCOL_SETTING, s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), SettingsProperty.ClusterScope); + new Setting<>("repositories.s3.protocol", CLOUD_S3.PROTOCOL_SETTING, s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope); /** * repositories.s3.bucket: The name of the bucket to be used for snapshots. */ - Setting BUCKET_SETTING = Setting.simpleString("repositories.s3.bucket", SettingsProperty.ClusterScope); + Setting BUCKET_SETTING = Setting.simpleString("repositories.s3.bucket", Property.NodeScope); /** * repositories.s3.server_side_encryption: When set to true files are encrypted on server side using AES256 algorithm. * Defaults to false. */ Setting SERVER_SIDE_ENCRYPTION_SETTING = - Setting.boolSetting("repositories.s3.server_side_encryption", false, SettingsProperty.ClusterScope); + Setting.boolSetting("repositories.s3.server_side_encryption", false, Property.NodeScope); /** * repositories.s3.buffer_size: Minimum threshold below which the chunk is uploaded using a single request. Beyond this threshold, * the S3 repository will use the AWS Multipart Upload API to split the chunk into several parts, each of buffer_size length, and @@ -109,35 +109,35 @@ public class S3Repository extends BlobStoreRepository { * use of the Multipart API and may result in upload errors. Defaults to 5mb. */ Setting BUFFER_SIZE_SETTING = - Setting.byteSizeSetting("repositories.s3.buffer_size", S3BlobStore.MIN_BUFFER_SIZE, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("repositories.s3.buffer_size", S3BlobStore.MIN_BUFFER_SIZE, Property.NodeScope); /** * repositories.s3.max_retries: Number of retries in case of S3 errors. Defaults to 3. */ - Setting MAX_RETRIES_SETTING = Setting.intSetting("repositories.s3.max_retries", 3, SettingsProperty.ClusterScope); + Setting MAX_RETRIES_SETTING = Setting.intSetting("repositories.s3.max_retries", 3, Property.NodeScope); /** * repositories.s3.chunk_size: Big files can be broken down into chunks during snapshotting if needed. Defaults to 100m. */ Setting CHUNK_SIZE_SETTING = - Setting.byteSizeSetting("repositories.s3.chunk_size", new ByteSizeValue(100, ByteSizeUnit.MB), SettingsProperty.ClusterScope); + Setting.byteSizeSetting("repositories.s3.chunk_size", new ByteSizeValue(100, ByteSizeUnit.MB), Property.NodeScope); /** * repositories.s3.compress: When set to true metadata files are stored in compressed format. This setting doesn’t affect index * files that are already compressed by default. Defaults to false. */ - Setting COMPRESS_SETTING = Setting.boolSetting("repositories.s3.compress", false, SettingsProperty.ClusterScope); + Setting COMPRESS_SETTING = Setting.boolSetting("repositories.s3.compress", false, Property.NodeScope); /** * repositories.s3.storage_class: Sets the S3 storage class type for the backup files. Values may be standard, reduced_redundancy, * standard_ia. Defaults to standard. */ - Setting STORAGE_CLASS_SETTING = Setting.simpleString("repositories.s3.storage_class", SettingsProperty.ClusterScope); + Setting STORAGE_CLASS_SETTING = Setting.simpleString("repositories.s3.storage_class", Property.NodeScope); /** * repositories.s3.canned_acl: The S3 repository supports all S3 canned ACLs : private, public-read, public-read-write, * authenticated-read, log-delivery-write, bucket-owner-read, bucket-owner-full-control. Defaults to private. */ - Setting CANNED_ACL_SETTING = Setting.simpleString("repositories.s3.canned_acl", SettingsProperty.ClusterScope); + Setting CANNED_ACL_SETTING = Setting.simpleString("repositories.s3.canned_acl", Property.NodeScope); /** * repositories.s3.base_path: Specifies the path within bucket to repository data. Defaults to root directory. */ - Setting BASE_PATH_SETTING = Setting.simpleString("repositories.s3.base_path", SettingsProperty.ClusterScope); + Setting BASE_PATH_SETTING = Setting.simpleString("repositories.s3.base_path", Property.NodeScope); } /** @@ -149,75 +149,75 @@ public class S3Repository extends BlobStoreRepository { * access_key * @see Repositories#KEY_SETTING */ - Setting KEY_SETTING = Setting.simpleString("access_key", SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting KEY_SETTING = Setting.simpleString("access_key", Property.NodeScope, Property.Filtered); /** * secret_key * @see Repositories#SECRET_SETTING */ - Setting SECRET_SETTING = Setting.simpleString("secret_key", SettingsProperty.ClusterScope, SettingsProperty.Filtered); + Setting SECRET_SETTING = Setting.simpleString("secret_key", Property.NodeScope, Property.Filtered); /** * bucket * @see Repositories#BUCKET_SETTING */ - Setting BUCKET_SETTING = Setting.simpleString("bucket", SettingsProperty.ClusterScope); + Setting BUCKET_SETTING = Setting.simpleString("bucket", Property.NodeScope); /** * endpoint * @see Repositories#ENDPOINT_SETTING */ - Setting ENDPOINT_SETTING = Setting.simpleString("endpoint", SettingsProperty.ClusterScope); + Setting ENDPOINT_SETTING = Setting.simpleString("endpoint", Property.NodeScope); /** * protocol * @see Repositories#PROTOCOL_SETTING */ Setting PROTOCOL_SETTING = - new Setting<>("protocol", "https", s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), SettingsProperty.ClusterScope); + new Setting<>("protocol", "https", s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope); /** * region * @see Repositories#REGION_SETTING */ - Setting REGION_SETTING = new Setting<>("region", "", s -> s.toLowerCase(Locale.ROOT), SettingsProperty.ClusterScope); + Setting REGION_SETTING = new Setting<>("region", "", s -> s.toLowerCase(Locale.ROOT), Property.NodeScope); /** * server_side_encryption * @see Repositories#SERVER_SIDE_ENCRYPTION_SETTING */ Setting SERVER_SIDE_ENCRYPTION_SETTING = - Setting.boolSetting("server_side_encryption", false, SettingsProperty.ClusterScope); + Setting.boolSetting("server_side_encryption", false, Property.NodeScope); /** * buffer_size * @see Repositories#BUFFER_SIZE_SETTING */ Setting BUFFER_SIZE_SETTING = - Setting.byteSizeSetting("buffer_size", S3BlobStore.MIN_BUFFER_SIZE, SettingsProperty.ClusterScope); + Setting.byteSizeSetting("buffer_size", S3BlobStore.MIN_BUFFER_SIZE, Property.NodeScope); /** * max_retries * @see Repositories#MAX_RETRIES_SETTING */ - Setting MAX_RETRIES_SETTING = Setting.intSetting("max_retries", 3, SettingsProperty.ClusterScope); + Setting MAX_RETRIES_SETTING = Setting.intSetting("max_retries", 3, Property.NodeScope); /** * chunk_size * @see Repositories#CHUNK_SIZE_SETTING */ - Setting CHUNK_SIZE_SETTING = Setting.byteSizeSetting("chunk_size", "-1", SettingsProperty.ClusterScope); + Setting CHUNK_SIZE_SETTING = Setting.byteSizeSetting("chunk_size", "-1", Property.NodeScope); /** * compress * @see Repositories#COMPRESS_SETTING */ - Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, SettingsProperty.ClusterScope); + Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, Property.NodeScope); /** * storage_class * @see Repositories#STORAGE_CLASS_SETTING */ - Setting STORAGE_CLASS_SETTING = Setting.simpleString("storage_class", SettingsProperty.ClusterScope); + Setting STORAGE_CLASS_SETTING = Setting.simpleString("storage_class", Property.NodeScope); /** * canned_acl * @see Repositories#CANNED_ACL_SETTING */ - Setting CANNED_ACL_SETTING = Setting.simpleString("canned_acl", SettingsProperty.ClusterScope); + Setting CANNED_ACL_SETTING = Setting.simpleString("canned_acl", Property.NodeScope); /** * base_path * @see Repositories#BASE_PATH_SETTING */ - Setting BASE_PATH_SETTING = Setting.simpleString("base_path", SettingsProperty.ClusterScope); + Setting BASE_PATH_SETTING = Setting.simpleString("base_path", Property.NodeScope); } private final S3BlobStore blobStore; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 22a06957d387..d92f450d689e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -77,7 +77,7 @@ import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.SettingsProperty; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.transport.InetSocketTransportAddress; @@ -197,7 +197,7 @@ import static org.hamcrest.Matchers.startsWith; * should be used, here is an example: *
        *
      - * {@literal @}ClusterScope(scope=Scope.TEST) public class SomeIT extends ESIntegTestCase {
      + * {@literal @}NodeScope(scope=Scope.TEST) public class SomeIT extends ESIntegTestCase {
        * public void testMethod() {}
        * }
        * 
      @@ -208,7 +208,7 @@ import static org.hamcrest.Matchers.startsWith; * determined at random and can change across tests. The {@link ClusterScope} allows configuring the initial number of nodes * that are created before the tests start. *
      - * {@literal @}ClusterScope(scope=Scope.SUITE, numDataNodes=3)
      + * {@literal @}NodeScope(scope=Scope.SUITE, numDataNodes=3)
        * public class SomeIT extends ESIntegTestCase {
        * public void testMethod() {}
        * }
      @@ -270,7 +270,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
            * It's set once per test via a generic index template.
            */
           public static final Setting INDEX_TEST_SEED_SETTING =
      -        Setting.longSetting("index.tests.seed", 0, Long.MIN_VALUE, SettingsProperty.IndexScope);
      +        Setting.longSetting("index.tests.seed", 0, Long.MIN_VALUE, Property.IndexScope);
       
           /**
            * A boolean value to enable or disable mock modules. This is useful to test the
      diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java
      index f10039391dbe..4a03389cb9c0 100644
      --- a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java
      +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java
      @@ -20,7 +20,7 @@ package org.elasticsearch.test;
       
       import org.elasticsearch.cluster.metadata.IndexMetaData;
       import org.elasticsearch.common.settings.Setting;
      -import org.elasticsearch.common.settings.Setting.SettingsProperty;
      +import org.elasticsearch.common.settings.Setting.Property;
       import org.elasticsearch.common.settings.SettingsModule;
       import org.elasticsearch.plugins.Plugin;
       
      @@ -36,11 +36,11 @@ public final class InternalSettingsPlugin extends Plugin {
           }
       
           public static final Setting VERSION_CREATED =
      -        Setting.intSetting("index.version.created", 0, SettingsProperty.IndexScope);
      +        Setting.intSetting("index.version.created", 0, Property.IndexScope);
           public static final Setting MERGE_ENABLED =
      -        Setting.boolSetting("index.merge.enabled", true, SettingsProperty.IndexScope);
      +        Setting.boolSetting("index.merge.enabled", true, Property.IndexScope);
           public static final Setting INDEX_CREATION_DATE_SETTING =
      -        Setting.longSetting(IndexMetaData.SETTING_CREATION_DATE, -1, -1, SettingsProperty.IndexScope);
      +        Setting.longSetting(IndexMetaData.SETTING_CREATION_DATE, -1, -1, Property.IndexScope);
       
           public void onModule(SettingsModule module) {
               module.registerSetting(VERSION_CREATED);
      diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java b/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java
      index d7bc9a7e0db5..f17fe024f14d 100644
      --- a/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java
      +++ b/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java
      @@ -22,7 +22,7 @@ import org.elasticsearch.cluster.routing.ShardRouting;
       import org.elasticsearch.common.Nullable;
       import org.elasticsearch.common.inject.Module;
       import org.elasticsearch.common.settings.Setting;
      -import org.elasticsearch.common.settings.Setting.SettingsProperty;
      +import org.elasticsearch.common.settings.Setting.Property;
       import org.elasticsearch.common.settings.Settings;
       import org.elasticsearch.common.settings.SettingsModule;
       import org.elasticsearch.index.Index;
      @@ -64,7 +64,7 @@ public final class MockIndexEventListener {
               /**
                * For tests to pass in to fail on listener invocation
                */
      -        public static final Setting INDEX_FAIL = Setting.boolSetting("index.fail", false, SettingsProperty.IndexScope);
      +        public static final Setting INDEX_FAIL = Setting.boolSetting("index.fail", false, Property.IndexScope);
               public void onModule(SettingsModule module) {
                   module.registerSetting(INDEX_FAIL);
               }
      diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java
      index 2fad1fc05e88..bf32b6b85750 100644
      --- a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java
      +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java
      @@ -31,7 +31,7 @@ import org.elasticsearch.ElasticsearchException;
       import org.elasticsearch.common.logging.ESLogger;
       import org.elasticsearch.common.logging.Loggers;
       import org.elasticsearch.common.settings.Setting;
      -import org.elasticsearch.common.settings.Setting.SettingsProperty;
      +import org.elasticsearch.common.settings.Setting.Property;
       import org.elasticsearch.common.settings.Settings;
       import org.elasticsearch.index.engine.Engine;
       import org.elasticsearch.index.engine.EngineConfig;
      @@ -57,12 +57,12 @@ public final class MockEngineSupport {
            * slow if {@link org.apache.lucene.index.AssertingDirectoryReader} is used.
            */
           public static final Setting WRAP_READER_RATIO =
      -        Setting.doubleSetting("index.engine.mock.random.wrap_reader_ratio", 0.0d, 0.0d, SettingsProperty.IndexScope);
      +        Setting.doubleSetting("index.engine.mock.random.wrap_reader_ratio", 0.0d, 0.0d, Property.IndexScope);
           /**
            * Allows tests to prevent an engine from being flushed on close ie. to test translog recovery...
            */
           public static final Setting DISABLE_FLUSH_ON_CLOSE =
      -        Setting.boolSetting("index.mock.disable_flush_on_close", false, SettingsProperty.IndexScope);
      +        Setting.boolSetting("index.mock.disable_flush_on_close", false, Property.IndexScope);
       
       
           private final AtomicBoolean closing = new AtomicBoolean(false);
      diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java
      index 2cb5367e642e..d267e9d7d51a 100644
      --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java
      +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java
      @@ -23,7 +23,6 @@ import com.carrotsearch.randomizedtesting.SeedUtils;
       import com.carrotsearch.randomizedtesting.generators.RandomPicks;
       
       import org.apache.lucene.index.CheckIndex;
      -import org.apache.lucene.index.IndexWriter;
       import org.apache.lucene.store.BaseDirectoryWrapper;
       import org.apache.lucene.store.Directory;
       import org.apache.lucene.store.LockFactory;
      @@ -32,14 +31,13 @@ import org.apache.lucene.store.MockDirectoryWrapper;
       import org.apache.lucene.store.StoreRateLimiting;
       import org.apache.lucene.util.LuceneTestCase;
       import org.apache.lucene.util.TestRuleMarkFailure;
      -import org.elasticsearch.cluster.metadata.AliasOrIndex;
       import org.elasticsearch.cluster.metadata.IndexMetaData;
       import org.elasticsearch.common.inject.Inject;
       import org.elasticsearch.common.io.stream.BytesStreamOutput;
       import org.elasticsearch.common.logging.ESLogger;
       import org.elasticsearch.common.lucene.Lucene;
       import org.elasticsearch.common.settings.Setting;
      -import org.elasticsearch.common.settings.Setting.SettingsProperty;
      +import org.elasticsearch.common.settings.Setting.Property;
       import org.elasticsearch.common.settings.Settings;
       import org.elasticsearch.index.IndexModule;
       import org.elasticsearch.index.IndexSettings;
      @@ -63,15 +61,15 @@ import java.util.Random;
       public class MockFSDirectoryService extends FsDirectoryService {
       
           public static final Setting RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING =
      -        Setting.doubleSetting("index.store.mock.random.io_exception_rate_on_open", 0.0d,  0.0d, SettingsProperty.IndexScope);
      +        Setting.doubleSetting("index.store.mock.random.io_exception_rate_on_open", 0.0d,  0.0d, Property.IndexScope);
           public static final Setting RANDOM_IO_EXCEPTION_RATE_SETTING =
      -        Setting.doubleSetting("index.store.mock.random.io_exception_rate", 0.0d,  0.0d, SettingsProperty.IndexScope);
      +        Setting.doubleSetting("index.store.mock.random.io_exception_rate", 0.0d,  0.0d, Property.IndexScope);
           public static final Setting RANDOM_PREVENT_DOUBLE_WRITE_SETTING =
      -        Setting.boolSetting("index.store.mock.random.prevent_double_write", true, SettingsProperty.IndexScope);// true is default in MDW
      +        Setting.boolSetting("index.store.mock.random.prevent_double_write", true, Property.IndexScope);// true is default in MDW
           public static final Setting RANDOM_NO_DELETE_OPEN_FILE_SETTING =
      -        Setting.boolSetting("index.store.mock.random.no_delete_open_file", true, SettingsProperty.IndexScope);// true is default in MDW
      +        Setting.boolSetting("index.store.mock.random.no_delete_open_file", true, Property.IndexScope);// true is default in MDW
           public static final Setting CRASH_INDEX_SETTING =
      -        Setting.boolSetting("index.store.mock.random.crash_index", true, SettingsProperty.IndexScope);// true is default in MDW
      +        Setting.boolSetting("index.store.mock.random.crash_index", true, Property.IndexScope);// true is default in MDW
       
           private final FsDirectoryService delegateService;
           private final Random random;
      diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java
      index 3d535d677993..44e3ad598ebb 100644
      --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java
      +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java
      @@ -23,7 +23,7 @@ import org.elasticsearch.common.Nullable;
       import org.elasticsearch.common.logging.ESLogger;
       import org.elasticsearch.common.logging.Loggers;
       import org.elasticsearch.common.settings.Setting;
      -import org.elasticsearch.common.settings.Setting.SettingsProperty;
      +import org.elasticsearch.common.settings.Setting.Property;
       import org.elasticsearch.common.settings.Settings;
       import org.elasticsearch.common.settings.SettingsModule;
       import org.elasticsearch.index.IndexModule;
      @@ -46,7 +46,7 @@ import java.util.Map;
       public class MockFSIndexStore extends IndexStore {
       
           public static final Setting INDEX_CHECK_INDEX_ON_CLOSE_SETTING =
      -        Setting.boolSetting("index.store.mock.check_index_on_close", true, SettingsProperty.IndexScope);
      +        Setting.boolSetting("index.store.mock.check_index_on_close", true, Property.IndexScope);
       
           public static class TestPlugin extends Plugin {
               @Override
      diff --git a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java
      index 6009929e38e7..fc090e151a36 100644
      --- a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java
      +++ b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java
      @@ -20,7 +20,7 @@
       package org.elasticsearch.test.tasks;
       
       import org.elasticsearch.common.settings.Setting;
      -import org.elasticsearch.common.settings.Setting.SettingsProperty;
      +import org.elasticsearch.common.settings.Setting.Property;
       import org.elasticsearch.common.settings.Settings;
       import org.elasticsearch.tasks.Task;
       import org.elasticsearch.tasks.TaskManager;
      @@ -35,7 +35,7 @@ import java.util.concurrent.CopyOnWriteArrayList;
       public class MockTaskManager extends TaskManager {
       
           public static final Setting USE_MOCK_TASK_MANAGER_SETTING =
      -        Setting.boolSetting("tests.mock.taskmanager.enabled", false, SettingsProperty.ClusterScope);
      +        Setting.boolSetting("tests.mock.taskmanager.enabled", false, Property.NodeScope);
       
           private final Collection listeners = new CopyOnWriteArrayList<>();
       
      diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java
      index 88cdd325448d..322882a7b3c5 100644
      --- a/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java
      +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java
      @@ -25,7 +25,7 @@ import org.elasticsearch.common.inject.Inject;
       import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
       import org.elasticsearch.common.network.NetworkModule;
       import org.elasticsearch.common.settings.Setting;
      -import org.elasticsearch.common.settings.Setting.SettingsProperty;
      +import org.elasticsearch.common.settings.Setting.Property;
       import org.elasticsearch.common.settings.Settings;
       import org.elasticsearch.common.settings.SettingsModule;
       import org.elasticsearch.plugins.Plugin;
      @@ -70,10 +70,10 @@ public class AssertingLocalTransport extends LocalTransport {
       
           public static final Setting ASSERTING_TRANSPORT_MIN_VERSION_KEY =
               new Setting<>("transport.asserting.version.min", Integer.toString(Version.CURRENT.minimumCompatibilityVersion().id),
      -            (s) -> Version.fromId(Integer.parseInt(s)), SettingsProperty.ClusterScope);
      +            (s) -> Version.fromId(Integer.parseInt(s)), Property.NodeScope);
           public static final Setting ASSERTING_TRANSPORT_MAX_VERSION_KEY =
               new Setting<>("transport.asserting.version.max", Integer.toString(Version.CURRENT.id),
      -            (s) -> Version.fromId(Integer.parseInt(s)), SettingsProperty.ClusterScope);
      +            (s) -> Version.fromId(Integer.parseInt(s)), Property.NodeScope);
           private final Random random;
           private final Version minVersion;
           private final Version maxVersion;
      
      From 5c2ca3c9f526ae2bba0ed022d62e925fb64c3900 Mon Sep 17 00:00:00 2001
      From: David Pilato 
      Date: Fri, 4 Mar 2016 17:05:31 +0100
      Subject: [PATCH 053/320] Check that we must have one and only one scope for a 
       Setting
      
      ---
       .../org/elasticsearch/common/settings/Setting.java  | 12 ++++--------
       .../elasticsearch/common/settings/SettingTests.java | 13 +++++++------
       2 files changed, 11 insertions(+), 14 deletions(-)
      
      diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java
      index 7464e06c1797..606326dd2958 100644
      --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java
      +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java
      @@ -119,12 +119,7 @@ public class Setting extends ToXContentToBytes {
               this.key = key;
               this.defaultValue = defaultValue;
               this.parser = parser;
      -        if (properties.length == 0) {
      -            this.properties = EnumSet.of(Property.NodeScope);
      -        } else {
      -            this.properties = EnumSet.copyOf(Arrays.asList(properties));
      -        }
      -        // We validate scope settings. They are mutually exclusive
      +        // We validate scope settings. We should have one and only one.
               int numScopes = 0;
               for (Property property : properties) {
                   if (property == Property.NodeScope ||
      @@ -132,9 +127,10 @@ public class Setting extends ToXContentToBytes {
                       numScopes++;
                   }
               }
      -        if (numScopes > 1) {
      -            throw new IllegalArgumentException("More than one scope has been added to the setting [" + key + "]");
      +        if (numScopes != 1) {
      +            throw new IllegalArgumentException("Zero or more than one scope has been added to the setting [" + key + "]");
               }
      +        this.properties = EnumSet.copyOf(Arrays.asList(properties));
           }
       
           /**
      diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java
      index e8f754e15c53..b26540920679 100644
      --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java
      +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java
      @@ -435,17 +435,18 @@ public class SettingTests extends ESTestCase {
               assertThat(setting.hasIndexScope(), is(true));
               assertThat(setting.hasNodeScope(), is(false));
       
      -        // We test the default scope
      -        setting = Setting.simpleString("foo.bar");
      -        assertThat(setting.hasNodeScope(), is(true));
      -        assertThat(setting.hasIndexScope(), is(false));
      -
               // Those should fail
      +        try {
      +            Setting.simpleString("foo.bar");
      +            fail("Zero scope should fail");
      +        } catch (IllegalArgumentException e) {
      +            assertThat(e.getMessage(), containsString("Zero or more than one scope has been added to the setting"));
      +        }
               try {
                   Setting.simpleString("foo.bar", Property.IndexScope, Property.NodeScope);
                   fail("Multiple scopes should fail");
               } catch (IllegalArgumentException e) {
      -            assertThat(e.getMessage(), containsString("More than one scope has been added to the setting"));
      +            assertThat(e.getMessage(), containsString("Zero or more than one scope has been added to the setting"));
               }
           }
       }
      
      From d079830f10ca161dbf0280c845df33de5425be72 Mon Sep 17 00:00:00 2001
      From: Nik Everett 
      Date: Fri, 4 Mar 2016 14:34:30 -0500
      Subject: [PATCH 054/320] [docs] Fix bad link in reindex docs
      
      ---
       docs/reference/docs/reindex.asciidoc         | 2 +-
       docs/reference/docs/update-by-query.asciidoc | 2 +-
       2 files changed, 2 insertions(+), 2 deletions(-)
      
      diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc
      index c8d59cb2477f..1b5483d5ee12 100644
      --- a/docs/reference/docs/reindex.asciidoc
      +++ b/docs/reference/docs/reindex.asciidoc
      @@ -277,7 +277,7 @@ POST /_reindex
       --------------------------------------------------
       // AUTOSENSE
       
      -Reindex can also use the link:ingest.html[Ingest] feature by specifying a
      +Reindex can also use the <> feature by specifying a
       `pipeline` like this:
       
       [source,js]
      diff --git a/docs/reference/docs/update-by-query.asciidoc b/docs/reference/docs/update-by-query.asciidoc
      index 9aac11b2c863..c3f57deeaae7 100644
      --- a/docs/reference/docs/update-by-query.asciidoc
      +++ b/docs/reference/docs/update-by-query.asciidoc
      @@ -138,7 +138,7 @@ POST /twitter/_update_by_query?scroll_size=1000
       --------------------------------------------------
       // AUTOSENSE
       
      -`_update_by_query` can also use the link:ingest.html[Ingest] feature by
      +`_update_by_query` can also use the <> feature by
       specifying a `pipeline` like this:
       
       [source,js]
      
      From 8321d7c5c24444b82cba2f2b0468564a525688fb Mon Sep 17 00:00:00 2001
      From: Ryan Ernst 
      Date: Fri, 4 Mar 2016 12:11:25 -0800
      Subject: [PATCH 055/320] Catch option error during execution too, since
       OptionSet is passed there
      
      ---
       core/src/main/java/org/elasticsearch/cli/Command.java | 8 ++++++--
       1 file changed, 6 insertions(+), 2 deletions(-)
      
      diff --git a/core/src/main/java/org/elasticsearch/cli/Command.java b/core/src/main/java/org/elasticsearch/cli/Command.java
      index bc44a8eb6354..d688347099d8 100644
      --- a/core/src/main/java/org/elasticsearch/cli/Command.java
      +++ b/core/src/main/java/org/elasticsearch/cli/Command.java
      @@ -56,7 +56,7 @@ public abstract class Command {
                   options = parser.parse(args);
               } catch (OptionException e) {
                   printHelp(terminal);
      -            terminal.println(Terminal.Verbosity.SILENT, "ERROR: " + e.getMessage());
      +            terminal.println("ERROR: " + e.getMessage());
                   return ExitCodes.USAGE;
               }
       
      @@ -69,7 +69,7 @@ public abstract class Command {
                   if (options.has(verboseOption)) {
                       // mutually exclusive, we can remove this with jopt-simple 5.0, which natively supports it
                       printHelp(terminal);
      -                terminal.println(Terminal.Verbosity.SILENT, "ERROR: Cannot specify -s and -v together");
      +                terminal.println("ERROR: Cannot specify -s and -v together");
                       return ExitCodes.USAGE;
                   }
                   terminal.setVerbosity(Terminal.Verbosity.SILENT);
      @@ -81,6 +81,10 @@ public abstract class Command {
       
               try {
                   return execute(terminal, options);
      +        } catch (OptionException e) {
      +            printHelp(terminal);
      +            terminal.println(Terminal.Verbosity.SILENT, "ERROR: " + e.getMessage());
      +            return ExitCodes.USAGE;
               } catch (UserError e) {
                   terminal.println(Terminal.Verbosity.SILENT, "ERROR: " + e.getMessage());
                   return e.exitCode;
      
      From bde97e1d9c2fe2104065e95f78d98cf9954c4b28 Mon Sep 17 00:00:00 2001
      From: David Pilato 
      Date: Fri, 4 Mar 2016 21:44:01 +0100
      Subject: [PATCH 056/320] Move validation logic to
       `SettingsModule.registerSetting`
      
      ---
       .../common/settings/Setting.java              | 17 +++++---------
       .../common/settings/SettingsModule.java       |  5 ++++
       .../common/settings/SettingTests.java         | 22 ++++++++----------
       .../common/settings/SettingsModuleTests.java  | 23 +++++++++++++++++++
       4 files changed, 43 insertions(+), 24 deletions(-)
      
      diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java
      index 606326dd2958..44e440500ffb 100644
      --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java
      +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java
      @@ -38,7 +38,9 @@ import org.elasticsearch.common.xcontent.XContentType;
       import java.io.IOException;
       import java.util.ArrayList;
       import java.util.Arrays;
      +import java.util.Collections;
       import java.util.EnumSet;
      +import java.util.Enumeration;
       import java.util.List;
       import java.util.Objects;
       import java.util.function.BiConsumer;
      @@ -119,18 +121,11 @@ public class Setting extends ToXContentToBytes {
               this.key = key;
               this.defaultValue = defaultValue;
               this.parser = parser;
      -        // We validate scope settings. We should have one and only one.
      -        int numScopes = 0;
      -        for (Property property : properties) {
      -            if (property == Property.NodeScope ||
      -                property == Property.IndexScope) {
      -                numScopes++;
      -            }
      +        if (properties.length == 0) {
      +            this.properties = EnumSet.noneOf(Property.class);
      +        } else {
      +            this.properties = EnumSet.copyOf(Arrays.asList(properties));
               }
      -        if (numScopes != 1) {
      -            throw new IllegalArgumentException("Zero or more than one scope has been added to the setting [" + key + "]");
      -        }
      -        this.properties = EnumSet.copyOf(Arrays.asList(properties));
           }
       
           /**
      diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java
      index ee770f747562..9fc2ee257a00 100644
      --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java
      +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java
      @@ -76,6 +76,11 @@ public class SettingsModule extends AbstractModule {
                       registerSettingsFilter(setting.getKey());
                   }
               }
      +
      +        // We validate scope settings. We should have one and only one scope.
      +        if (setting.hasNodeScope() && setting.hasIndexScope()) {
      +            throw new IllegalArgumentException("More than one scope has been added to the setting [" + setting.getKey() + "]");
      +        }
               if (setting.hasNodeScope()) {
                   if (nodeSettings.containsKey(setting.getKey())) {
                       throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice");
      diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java
      index b26540920679..1c1f06f5914f 100644
      --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java
      +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java
      @@ -435,18 +435,14 @@ public class SettingTests extends ESTestCase {
               assertThat(setting.hasIndexScope(), is(true));
               assertThat(setting.hasNodeScope(), is(false));
       
      -        // Those should fail
      -        try {
      -            Setting.simpleString("foo.bar");
      -            fail("Zero scope should fail");
      -        } catch (IllegalArgumentException e) {
      -            assertThat(e.getMessage(), containsString("Zero or more than one scope has been added to the setting"));
      -        }
      -        try {
      -            Setting.simpleString("foo.bar", Property.IndexScope, Property.NodeScope);
      -            fail("Multiple scopes should fail");
      -        } catch (IllegalArgumentException e) {
      -            assertThat(e.getMessage(), containsString("Zero or more than one scope has been added to the setting"));
      -        }
      +        // We accept settings with no scope but they will be rejected when we register with SettingsModule.registerSetting
      +        setting = Setting.simpleString("foo.bar");
      +        assertThat(setting.hasIndexScope(), is(false));
      +        assertThat(setting.hasNodeScope(), is(false));
      +
      +        // We accept settings with multiple scopes but they will be rejected when we register with SettingsModule.registerSetting
      +        setting = Setting.simpleString("foo.bar", Property.IndexScope, Property.NodeScope);
      +        assertThat(setting.hasIndexScope(), is(true));
      +        assertThat(setting.hasNodeScope(), is(true));
           }
       }
      diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java
      index a8b5824f8ec9..bc6afda9a011 100644
      --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java
      +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java
      @@ -22,6 +22,9 @@ package org.elasticsearch.common.settings;
       import org.elasticsearch.common.inject.ModuleTestCase;
       import org.elasticsearch.common.settings.Setting.Property;
       
      +import static org.hamcrest.Matchers.containsString;
      +import static org.hamcrest.Matchers.is;
      +
       public class SettingsModuleTests extends ModuleTestCase {
       
           public void testValidate() {
      @@ -149,4 +152,24 @@ public class SettingsModuleTests extends ModuleTestCase {
               assertInstanceBinding(module, SettingsFilter.class, (s) -> s.filter(settings).getAsMap().get("bar.baz").equals("false"));
       
           }
      +
      +    public void testMutuallyExclusiveScopes() {
      +        new SettingsModule(Settings.EMPTY).registerSetting(Setting.simpleString("foo.bar", Property.NodeScope));
      +        new SettingsModule(Settings.EMPTY).registerSetting(Setting.simpleString("foo.bar", Property.IndexScope));
      +
      +        // Those should fail
      +        try {
      +            new SettingsModule(Settings.EMPTY).registerSetting(Setting.simpleString("foo.bar"));
      +            fail("No scope should fail");
      +        } catch (IllegalArgumentException e) {
      +            assertThat(e.getMessage(), containsString("No scope found for setting"));
      +        }
      +        // Those should fail
      +        try {
      +            new SettingsModule(Settings.EMPTY).registerSetting(Setting.simpleString("foo.bar", Property.IndexScope, Property.NodeScope));
      +            fail("Multiple scopes should fail");
      +        } catch (IllegalArgumentException e) {
      +            assertThat(e.getMessage(), containsString("More than one scope has been added to the setting"));
      +        }
      +    }
       }
      
      From 198a79edf574543b4f7e7568265c36ddbe53f72d Mon Sep 17 00:00:00 2001
      From: "George P. Stathis" 
      Date: Fri, 4 Mar 2016 19:32:44 -0500
      Subject: [PATCH 057/320] Fix spacing, assert key contains period
      
      ---
       .../common/settings/IndexScopedSettings.java        | 13 +++++++------
       .../org/elasticsearch/common/settings/Setting.java  |  2 +-
       .../index/similarity/SimilarityService.java         |  4 ++--
       .../common/settings/ScopedSettingsTests.java        |  6 +++---
       4 files changed, 13 insertions(+), 12 deletions(-)
      
      diff --git a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java
      index 4d550e53dac1..94e736cadec9 100644
      --- a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java
      +++ b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java
      @@ -136,12 +136,13 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
               IndexWarmer.INDEX_NORMS_LOADING_SETTING,
               // validate that built-in similarities don't get redefined
               Setting.groupSetting("index.similarity.", false, Setting.Scope.INDEX, (s) -> {
      -          boolean valid = true;
      -          String similarityName = s.substring(0, s.indexOf("."));
      -          if(SimilarityService.BUILT_IN.keySet().contains(similarityName)) {
      -            throw new IllegalArgumentException("Cannot redefine built-in Similarity [" + similarityName + "]");
      -          }
      -          return valid;
      +            boolean valid = true;
      +            assert(s.indexOf(".") > 1);
      +            String similarityName = s.substring(0, s.indexOf("."));
      +            if(SimilarityService.BUILT_IN.keySet().contains(similarityName)) {
      +                throw new IllegalArgumentException("Cannot redefine built-in Similarity [" + similarityName + "]");
      +            }
      +            return valid;
               }), // this allows similarity settings to be passed
               Setting.groupSetting("index.analysis.", false, Setting.Scope.INDEX) // this allows analysis settings to be passed
       
      diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java
      index a6c86edde779..0ce70f89ce78 100644
      --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java
      +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java
      @@ -487,7 +487,7 @@ public class Setting extends ToXContentToBytes {
           }
       
           public static Setting groupSetting(String key, boolean dynamic, Scope scope) {
      -      return groupSetting(key, dynamic, scope, (s) -> true);
      +        return groupSetting(key, dynamic, scope, (s) -> true);
           }
       
           public static Setting groupSetting(String key, boolean dynamic, Scope scope, Predicate settingsValidator) {
      diff --git a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java
      index cdeaacb9f28a..49307af079c7 100644
      --- a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java
      +++ b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java
      @@ -114,7 +114,7 @@ public final class SimilarityService extends AbstractIndexComponent {
                   }
                   providers.put(name, factory.apply(name, settings));
               }
      -      return providers;
      +        return providers;
           }
       
           public SimilarityProvider getSimilarity(String name) {
      @@ -122,7 +122,7 @@ public final class SimilarityService extends AbstractIndexComponent {
           }
       
           public SimilarityProvider getDefaultSimilarity() {
      -      return similarities.get("default");
      +        return similarities.get("default");
           }
       
           static class PerFieldSimilarity extends PerFieldSimilarityWrapper {
      diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
      index fa5a018aa9b7..ec64ebb6566b 100644
      --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
      +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
      @@ -215,10 +215,10 @@ public class ScopedSettingsTests extends ESTestCase {
               }
       
               try {
      -          settings.validate("index.similarity.classic.type", Settings.builder().put("index.similarity.classic.type", "mine").build());
      -          fail();
      +            settings.validate("index.similarity.classic.type", Settings.builder().put("index.similarity.classic.type", "mine").build());
      +            fail();
               } catch (IllegalArgumentException e) {
      -          assertEquals("Cannot redefine built-in Similarity [classic]", e.getMessage());
      +            assertEquals("Cannot redefine built-in Similarity [classic]", e.getMessage());
               }
           }
       
      
      From 9135c3c9676b1022adc4a53ae8013bae90c6a9da Mon Sep 17 00:00:00 2001
      From: Simon Willnauer 
      Date: Fri, 4 Mar 2016 17:30:15 +0100
      Subject: [PATCH 058/320] Only wait for initial state unless we already got a
       master
      
      This seems to be an error introduced in refactoring around #16821
      where we now wait 30seconds by default if the node already joined
      a cluster and got a master. This can slow down tests dramatically
      espeically on slow boxes and notebooks.
      
      Closes #16956
      ---
       .../java/org/elasticsearch/node/Node.java     | 24 +++++++++----------
       1 file changed, 11 insertions(+), 13 deletions(-)
      
      diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java
      index d115fbb87732..e279d3e819f2 100644
      --- a/core/src/main/java/org/elasticsearch/node/Node.java
      +++ b/core/src/main/java/org/elasticsearch/node/Node.java
      @@ -317,18 +317,15 @@ public class Node implements Closeable {
               discovery.start();
               transportService.acceptIncomingRequests();
               discovery.startInitialJoin();
      -
               // tribe nodes don't have a master so we shouldn't register an observer
               if (DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.get(settings).millis() > 0) {
                   final ThreadPool thread = injector.getInstance(ThreadPool.class);
                   ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger, thread.getThreadContext());
      -            final CountDownLatch latch = new CountDownLatch(1);
                   if (observer.observedState().nodes().masterNodeId() == null) {
      +                final CountDownLatch latch = new CountDownLatch(1);
                       observer.waitForNextChange(new ClusterStateObserver.Listener() {
                           @Override
      -                    public void onNewClusterState(ClusterState state) {
      -                        latch.countDown();
      -                    }
      +                    public void onNewClusterState(ClusterState state) { latch.countDown(); }
       
                           @Override
                           public void onClusterServiceClose() {
      @@ -337,16 +334,17 @@ public class Node implements Closeable {
       
                           @Override
                           public void onTimeout(TimeValue timeout) {
      -                        assert false;
      +                        logger.warn("timed out while waiting for initial discovery state - timeout: {}",
      +                            DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.get(settings));
      +                        latch.countDown();
                           }
      -                    // use null timeout as we use timeout on the latchwait
      -                }, MasterNodeChangePredicate.INSTANCE, null);
      -            }
      +                }, MasterNodeChangePredicate.INSTANCE, DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.get(settings));
       
      -            try {
      -                latch.await(DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.get(settings).millis(), TimeUnit.MILLISECONDS);
      -            } catch (InterruptedException e) {
      -                throw new ElasticsearchTimeoutException("Interrupted while waiting for initial discovery state");
      +                try {
      +                    latch.await();
      +                } catch (InterruptedException e) {
      +                    throw new ElasticsearchTimeoutException("Interrupted while waiting for initial discovery state");
      +                }
                   }
               }
       
      
      From 6d7e8814d6ed22f56bfff1fe6dce3c111ea692fc Mon Sep 17 00:00:00 2001
      From: Clinton Gormley 
      Date: Sat, 5 Mar 2016 16:28:43 +0100
      Subject: [PATCH 059/320] Redocument the
       `index.merge.scheduler.max_thread_count` setting
      
      Closes #16961
      ---
       docs/reference/index-modules.asciidoc       |  6 +++++
       docs/reference/index-modules/merge.asciidoc | 30 +++++++++++++++++++++
       2 files changed, 36 insertions(+)
       create mode 100644 docs/reference/index-modules/merge.asciidoc
      
      diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc
      index 7d6614342b52..f7e1f68dec5e 100644
      --- a/docs/reference/index-modules.asciidoc
      +++ b/docs/reference/index-modules.asciidoc
      @@ -148,6 +148,10 @@ Other index settings are available in index modules:
       
           Enable or disable dynamic mapping for an index.
       
      +<>::
      +
      +    Control over how shards are merged by the background merge process.
      +
       <>::
       
           Configure custom similarity settings to customize how search results are
      @@ -173,6 +177,8 @@ include::index-modules/allocation.asciidoc[]
       
       include::index-modules/mapper.asciidoc[]
       
      +include::index-modules/merge.asciidoc[]
      +
       include::index-modules/similarity.asciidoc[]
       
       include::index-modules/slowlog.asciidoc[]
      diff --git a/docs/reference/index-modules/merge.asciidoc b/docs/reference/index-modules/merge.asciidoc
      new file mode 100644
      index 000000000000..7e5260f95d40
      --- /dev/null
      +++ b/docs/reference/index-modules/merge.asciidoc
      @@ -0,0 +1,30 @@
      +[[index-modules-merge]]
      +== Merge
      +
      +A shard in elasticsearch is a Lucene index, and a Lucene index is broken down
      +into segments. Segments are internal storage elements in the index where the
      +index data is stored, and are immutable. Smaller segments are periodically
      +merged into larger segments to keep the index size at bay and to expunge
      +deletes.
      +
      +The merge process uses auto-throttling to balance the use of hardware
      +resources between merging and other activities like search.
      +
      +[float]
      +[[merge-scheduling]]
      +=== Merge scheduling
      +
      +The merge scheduler (ConcurrentMergeScheduler) controls the execution of merge
      +operations when they are needed.  Merges run in separate threads, and when the
      +maximum number of threads is reached, further merges will wait until a merge
      +thread becomes available.
      +
      +The merge scheduler supports the following _dynamic_ setting:
      +
      +`index.merge.scheduler.max_thread_count`::
      +
      +    The maximum number of threads that may be merging at once. Defaults to
      +    `Math.max(1, Math.min(4, Runtime.getRuntime().availableProcessors() / 2))`
      +    which works well for a good solid-state-disk (SSD).  If your index is on
      +    spinning platter drives instead, decrease this to 1.
      +
      
      From 0322d490a89f28396b310b2b2bf80d94e3577c87 Mon Sep 17 00:00:00 2001
      From: Adrien Grand 
      Date: Sat, 5 Mar 2016 19:09:45 +0100
      Subject: [PATCH 060/320] Temporarily re allow string fields to give Kibana
       more time to switch to text/keyword.
      
      ---
       .../index/mapper/core/StringFieldMapper.java           | 10 ++++++----
       1 file changed, 6 insertions(+), 4 deletions(-)
      
      diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java
      index eda7b7fc87fb..c4659a6571ec 100644
      --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java
      +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java
      @@ -132,10 +132,11 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
           public static class TypeParser implements Mapper.TypeParser {
               @Override
               public Mapper.Builder parse(String fieldName, Map node, ParserContext parserContext) throws MapperParsingException {
      -            if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0)) {
      +            // TODO: temporarily disabled to give Kibana time to upgrade to text/keyword mappings
      +            /*if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0)) {
                       throw new IllegalArgumentException("The [string] type is removed in 5.0. You should now use either a [text] "
                               + "or [keyword] field instead for field [" + fieldName + "]");
      -            }
      +            }*/
                   StringFieldMapper.Builder builder = new StringFieldMapper.Builder(fieldName);
                   // hack for the fact that string can't just accept true/false for
                   // the index property and still accepts no/not_analyzed/analyzed
      @@ -240,10 +241,11 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
                                       int positionIncrementGap, int ignoreAbove,
                                       Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
               super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
      -        if (Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0)) {
      +        // TODO: temporarily disabled to give Kibana time to upgrade to text/keyword mappings
      +        /*if (Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0)) {
                   throw new IllegalArgumentException("The [string] type is removed in 5.0. You should now use either a [text] "
                           + "or [keyword] field instead for field [" + fieldType.name() + "]");
      -        }
      +        }*/
               if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) {
                   throw new MapperParsingException("Field [" + fieldType.name() + "] cannot be analyzed and have doc values");
               }
      
      From 3674774b737bd607e40b22fb52e80b65e1210e2a Mon Sep 17 00:00:00 2001
      From: Alexander Kazakov 
      Date: Sat, 5 Mar 2016 22:50:54 +0300
      Subject: [PATCH 061/320] Build empty extended stats aggregation if no docs are
       collected for bucket #16812
      
      ---
       .../extended/ExtendedStatsAggregator.java     | 12 +++---
       .../messy/tests/ExtendedStatsTests.java       | 42 ++++++++++++++++++-
       2 files changed, 46 insertions(+), 8 deletions(-)
      
      diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java
      index 4687002cf125..2dfab325127a 100644
      --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java
      +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java
      @@ -167,14 +167,12 @@ public class ExtendedStatsAggregator extends NumericMetricsAggregator.MultiValue
           }
       
           @Override
      -    public InternalAggregation buildAggregation(long owningBucketOrdinal) {
      -        if (valuesSource == null) {
      -            return new InternalExtendedStats(name, 0, 0d, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0d, 0d, formatter,
      -                    pipelineAggregators(), metaData());
      +    public InternalAggregation buildAggregation(long bucket) {
      +        if (valuesSource == null || bucket >= counts.size()) {
      +            return buildEmptyAggregation();
               }
      -        assert owningBucketOrdinal < counts.size();
      -        return new InternalExtendedStats(name, counts.get(owningBucketOrdinal), sums.get(owningBucketOrdinal),
      -                mins.get(owningBucketOrdinal), maxes.get(owningBucketOrdinal), sumOfSqrs.get(owningBucketOrdinal), sigma, formatter,
      +        return new InternalExtendedStats(name, counts.get(bucket), sums.get(bucket),
      +                mins.get(bucket), maxes.get(bucket), sumOfSqrs.get(bucket), sigma, formatter,
                       pipelineAggregators(), metaData());
           }
       
      diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java
      index 7d018adc07f1..e717ea6d6fba 100644
      --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java
      +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java
      @@ -26,6 +26,8 @@ import org.elasticsearch.script.ScriptService.ScriptType;
       import org.elasticsearch.script.groovy.GroovyPlugin;
       import org.elasticsearch.search.aggregations.bucket.global.Global;
       import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
      +import org.elasticsearch.search.aggregations.bucket.missing.Missing;
      +import org.elasticsearch.search.aggregations.bucket.terms.Terms;
       import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase;
       import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats;
       
      @@ -38,6 +40,8 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
       import static org.elasticsearch.search.aggregations.AggregationBuilders.extendedStats;
       import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
       import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
      +import static org.elasticsearch.search.aggregations.AggregationBuilders.missing;
      +import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
       import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
       import static org.hamcrest.Matchers.equalTo;
       import static org.hamcrest.Matchers.is;
      @@ -498,6 +502,42 @@ public class ExtendedStatsTests extends AbstractNumericTestCase {
               checkUpperLowerBounds(stats, sigma);
           }
       
      +    public void testEmptySubAggregation() {
      +        SearchResponse searchResponse = client().prepareSearch("idx")
      +            .setQuery(matchAllQuery())
      +            .addAggregation(terms("value").field("value")
      +                .subAggregation(missing("values").field("values")
      +                    .subAggregation(extendedStats("stats").field("value"))))
      +            .execute().actionGet();
      +
      +        assertHitCount(searchResponse, 10);
      +
      +        Terms terms = searchResponse.getAggregations().get("value");
      +        assertThat(terms, notNullValue());
      +        assertThat(terms.getBuckets().size(), equalTo(10));
      +
      +        for (Terms.Bucket bucket : terms.getBuckets()) {
      +            assertThat(bucket.getDocCount(), equalTo(1L));
      +
      +            Missing missing = bucket.getAggregations().get("values");
      +            assertThat(missing, notNullValue());
      +            assertThat(missing.getDocCount(), equalTo(0L));
      +
      +            ExtendedStats stats = missing.getAggregations().get("stats");
      +            assertThat(stats, notNullValue());
      +            assertThat(stats.getName(), equalTo("stats"));
      +            assertThat(stats.getSumOfSquares(), equalTo(0.0));
      +            assertThat(stats.getCount(), equalTo(0L));
      +            assertThat(stats.getSum(), equalTo(0.0));
      +            assertThat(stats.getMin(), equalTo(Double.POSITIVE_INFINITY));
      +            assertThat(stats.getMax(), equalTo(Double.NEGATIVE_INFINITY));
      +            assertThat(Double.isNaN(stats.getStdDeviation()), is(true));
      +            assertThat(Double.isNaN(stats.getAvg()), is(true));
      +            assertThat(Double.isNaN(stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER)), is(true));
      +            assertThat(Double.isNaN(stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER)), is(true));
      +        }
      +    }
      +
       
           private void assertShardExecutionState(SearchResponse response, int expectedFailures) throws Exception {
               ShardSearchFailure[] failures = response.getShardFailures();
      @@ -515,4 +555,4 @@ public class ExtendedStatsTests extends AbstractNumericTestCase {
               assertThat(stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER), equalTo(stats.getAvg() - (stats.getStdDeviation() * sigma)));
           }
       
      -}
      \ No newline at end of file
      +}
      
      From 17e28d05b75f092ab7f2ee2a20230ab5dc4bfc88 Mon Sep 17 00:00:00 2001
      From: Simon Willnauer 
      Date: Sun, 6 Mar 2016 13:42:56 +0100
      Subject: [PATCH 062/320] Delete ShardsAllocatorModuleIT, it's been replaced by
       ClusterModuleTests
      
      ClusterModuleTests tests what ShardsAllocatorModuleIT tests without starting
      a cluster. Unittests should be preferrred over IT tests anyway and the instantiation
      of the balanced shards allocator is tested with every other integration test.
      ---
       .../allocation/ShardsAllocatorModuleIT.java   | 58 -------------------
       1 file changed, 58 deletions(-)
       delete mode 100644 core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java
      
      diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java
      deleted file mode 100644
      index 60fa45ebfa12..000000000000
      --- a/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java
      +++ /dev/null
      @@ -1,58 +0,0 @@
      -/*
      - * Licensed to Elasticsearch under one or more contributor
      - * license agreements. See the NOTICE file distributed with
      - * this work for additional information regarding copyright
      - * ownership. Elasticsearch licenses this file to you under
      - * the Apache License, Version 2.0 (the "License"); you may
      - * not use this file except in compliance with the License.
      - * You may obtain a copy of the License at
      - *
      - *    http://www.apache.org/licenses/LICENSE-2.0
      - *
      - * Unless required by applicable law or agreed to in writing,
      - * software distributed under the License is distributed on an
      - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
      - * KIND, either express or implied.  See the License for the
      - * specific language governing permissions and limitations
      - * under the License.
      - */
      -
      -package org.elasticsearch.cluster.allocation;
      -
      -import org.elasticsearch.cluster.ClusterModule;
      -import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator;
      -import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator;
      -import org.elasticsearch.common.settings.Settings;
      -import org.elasticsearch.test.ESIntegTestCase;
      -import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
      -
      -import java.io.IOException;
      -
      -import static org.elasticsearch.common.settings.Settings.settingsBuilder;
      -import static org.elasticsearch.test.ESIntegTestCase.Scope;
      -import static org.hamcrest.Matchers.instanceOf;
      -
      -@ClusterScope(scope= Scope.TEST, numDataNodes =0)
      -public class ShardsAllocatorModuleIT extends ESIntegTestCase {
      -
      -    public void testLoadDefaultShardsAllocator() throws IOException {
      -        assertAllocatorInstance(Settings.Builder.EMPTY_SETTINGS, BalancedShardsAllocator.class);
      -    }
      -
      -    public void testLoadByShortKeyShardsAllocator() throws IOException {
      -        Settings build = settingsBuilder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), "even_shard") // legacy just to make sure we don't barf
      -                .build();
      -        assertAllocatorInstance(build, BalancedShardsAllocator.class);
      -        build = settingsBuilder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), ClusterModule.BALANCED_ALLOCATOR).build();
      -        assertAllocatorInstance(build, BalancedShardsAllocator.class);
      -    }
      -
      -    private void assertAllocatorInstance(Settings settings, Class clazz) throws IOException {
      -        while (cluster().size() != 0) {
      -            internalCluster().stopRandomDataNode();
      -        }
      -        internalCluster().startNode(settings);
      -        ShardsAllocator instance = internalCluster().getInstance(ShardsAllocator.class);
      -        assertThat(instance, instanceOf(clazz));
      -    }
      -}
      
      From 8ca284862dce41968f455b1aa40c0f30ab4e1181 Mon Sep 17 00:00:00 2001
      From: Simon Willnauer 
      Date: Sun, 6 Mar 2016 16:43:12 +0100
      Subject: [PATCH 063/320] Notify GatewayRecoveryListener on failure
      
      If the recovery throws an exception we miss to notify the recovery
      listener and bubble up the uncaught exception. This commit uses
      an AbstractRunnable that also catches rejected execution exceptions
      etc. and notifies the listener accordingly.
      ---
       .../elasticsearch/gateway/GatewayService.java    | 16 +++++++++++++++-
       1 file changed, 15 insertions(+), 1 deletion(-)
      
      diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java
      index 79f23c1b37e2..384539b4c631 100644
      --- a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java
      +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java
      @@ -39,6 +39,7 @@ import org.elasticsearch.common.inject.Inject;
       import org.elasticsearch.common.settings.Setting;
       import org.elasticsearch.common.settings.Settings;
       import org.elasticsearch.common.unit.TimeValue;
      +import org.elasticsearch.common.util.concurrent.AbstractRunnable;
       import org.elasticsearch.discovery.Discovery;
       import org.elasticsearch.env.NodeEnvironment;
       import org.elasticsearch.rest.RestStatus;
      @@ -206,7 +207,20 @@ public class GatewayService extends AbstractLifecycleComponent i
                   }
               } else {
                   if (recovered.compareAndSet(false, true)) {
      -                threadPool.generic().execute(() -> gateway.performStateRecovery(recoveryListener));
      +                threadPool.generic().execute(new AbstractRunnable() {
      +                    @Override
      +                    public void onFailure(Throwable t) {
      +                        logger.warn("Recovery failed", t);
      +                        // we reset `recovered` in the listener don't reset it here otherwise there might be a race
      +                        // that resets it to false while a new recover is already running?
      +                        recoveryListener.onFailure("state recovery failed: " + t.getMessage());
      +                    }
      +
      +                    @Override
      +                    protected void doRun() throws Exception {
      +                        gateway.performStateRecovery(recoveryListener);
      +                    }
      +                });
                   }
               }
           }
      
      From ed7934ee790a869093303655f1240bf62d434f90 Mon Sep 17 00:00:00 2001
      From: Ryan Ernst 
      Date: Fri, 4 Mar 2016 19:04:19 -0800
      Subject: [PATCH 064/320] Cli: Simplify test terminals
      
      This commit simplifies and consolidates the two different
      implementations of terminals used in tests. There is now a single
      MockTerminal which captures output, and allows accessing as one large
      string (with unix style \n as newlines), as well as configuring
      input.
      ---
       .../bootstrap/BootstrapCLIParser.java         |  3 +-
       .../elasticsearch/common/cli/Terminal.java    | 43 +++++++---
       .../common/cli/TerminalTests.java             | 25 +++---
       .../logging/LoggingConfigurationTests.java    | 22 ++---
       .../InternalSettingsPreparerTests.java        | 30 ++-----
       .../elasticsearch/plugins/PluginCliTests.java | 11 +--
       .../bootstrap/BootstrapCliParserTests.java    | 74 ++++++++--------
       .../common/cli/CheckFileCommandTests.java     | 30 +++----
       .../common/cli/CliToolTests.java              | 44 +++-------
       .../plugins/InstallPluginCommandTests.java    |  5 +-
       .../plugins/ListPluginsCommandTests.java      | 26 +++---
       .../plugins/PluginSecurityTests.java          | 10 +--
       .../plugins/RemovePluginCommandTests.java     |  5 +-
       .../common/cli/CliToolTestCase.java           | 72 ++--------------
       .../common/cli/MockTerminal.java              | 85 +++++++++++++++++++
       15 files changed, 253 insertions(+), 232 deletions(-)
       create mode 100644 test/framework/src/main/java/org/elasticsearch/common/cli/MockTerminal.java
      
      diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java
      index ca67fc911320..25ae53873fe6 100644
      --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java
      +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java
      @@ -26,8 +26,8 @@ import org.elasticsearch.common.Strings;
       import org.elasticsearch.common.SuppressForbidden;
       import org.elasticsearch.common.cli.CliTool;
       import org.elasticsearch.common.cli.CliToolConfig;
      -import org.elasticsearch.common.cli.Terminal;
       import org.elasticsearch.common.cli.UserError;
      +import org.elasticsearch.common.cli.Terminal;
       import org.elasticsearch.common.settings.Settings;
       import org.elasticsearch.env.Environment;
       import org.elasticsearch.monitor.jvm.JvmInfo;
      @@ -37,7 +37,6 @@ import java.util.Iterator;
       import java.util.Locale;
       import java.util.Map;
       import java.util.Properties;
      -import java.util.Set;
       
       import static org.elasticsearch.common.cli.CliToolConfig.Builder.cmd;
       import static org.elasticsearch.common.cli.CliToolConfig.Builder.optionBuilder;
      diff --git a/core/src/main/java/org/elasticsearch/common/cli/Terminal.java b/core/src/main/java/org/elasticsearch/common/cli/Terminal.java
      index 8d4a8036bdf8..fbef1f78cc33 100644
      --- a/core/src/main/java/org/elasticsearch/common/cli/Terminal.java
      +++ b/core/src/main/java/org/elasticsearch/common/cli/Terminal.java
      @@ -23,6 +23,7 @@ import java.io.BufferedReader;
       import java.io.Console;
       import java.io.IOException;
       import java.io.InputStreamReader;
      +import java.io.PrintWriter;
       import java.nio.charset.Charset;
       
       import org.elasticsearch.common.SuppressForbidden;
      @@ -52,6 +53,13 @@ public abstract class Terminal {
           /** The current verbosity for the terminal, defaulting to {@link Verbosity#NORMAL}. */
           private Verbosity verbosity = Verbosity.NORMAL;
       
      +    /** The newline used when calling println. */
      +    private final String lineSeparator;
      +
      +    protected Terminal(String lineSeparator) {
      +        this.lineSeparator = lineSeparator;
      +    }
      +
           /** Sets the verbosity of the terminal. */
           void setVerbosity(Verbosity verbosity) {
               this.verbosity = verbosity;
      @@ -63,8 +71,8 @@ public abstract class Terminal {
           /** Reads password text from the terminal input. See {@link Console#readPassword()}}. */
           public abstract char[] readSecret(String prompt);
       
      -    /** Print a message directly to the terminal. */
      -    protected abstract void doPrint(String msg);
      +    /** Returns a Writer which can be used to write to the terminal directly. */
      +    public abstract PrintWriter getWriter();
       
           /** Prints a line to the terminal at {@link Verbosity#NORMAL} verbosity level. */
           public final void println(String msg) {
      @@ -74,7 +82,8 @@ public abstract class Terminal {
           /** Prints a line to the terminal at {@code verbosity} level. */
           public final void println(Verbosity verbosity, String msg) {
               if (this.verbosity.ordinal() >= verbosity.ordinal()) {
      -            doPrint(msg + System.lineSeparator());
      +            getWriter().print(msg + lineSeparator);
      +            getWriter().flush();
               }
           }
       
      @@ -82,14 +91,17 @@ public abstract class Terminal {
       
               private static final Console console = System.console();
       
      +        ConsoleTerminal() {
      +            super(System.lineSeparator());
      +        }
      +
               static boolean isSupported() {
                   return console != null;
               }
       
               @Override
      -        public void doPrint(String msg) {
      -            console.printf("%s", msg);
      -            console.flush();
      +        public PrintWriter getWriter() {
      +            return console.writer();
               }
       
               @Override
      @@ -105,16 +117,25 @@ public abstract class Terminal {
       
           private static class SystemTerminal extends Terminal {
       
      +        private final PrintWriter writer = newWriter();
      +
      +        SystemTerminal() {
      +            super(System.lineSeparator());
      +        }
      +
      +        @SuppressForbidden(reason = "Writer for System.out")
      +        private static PrintWriter newWriter() {
      +            return new PrintWriter(System.out);
      +        }
      +
               @Override
      -        @SuppressForbidden(reason = "System#out")
      -        public void doPrint(String msg) {
      -            System.out.print(msg);
      -            System.out.flush();
      +        public PrintWriter getWriter() {
      +            return writer;
               }
       
               @Override
               public String readText(String text) {
      -            doPrint(text);
      +            getWriter().print(text);
                   BufferedReader reader = new BufferedReader(new InputStreamReader(System.in, Charset.defaultCharset()));
                   try {
                       return reader.readLine();
      diff --git a/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java b/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java
      index 0e71ac7cd6a9..9326cc162339 100644
      --- a/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java
      +++ b/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java
      @@ -19,41 +19,40 @@
       
       package org.elasticsearch.common.cli;
       
      -import static org.hamcrest.Matchers.hasItem;
      -import static org.hamcrest.Matchers.hasSize;
      -
       public class TerminalTests extends CliToolTestCase {
           public void testVerbosity() throws Exception {
      -        CaptureOutputTerminal terminal = new CaptureOutputTerminal(Terminal.Verbosity.SILENT);
      +        MockTerminal terminal = new MockTerminal();
      +        terminal.setVerbosity(Terminal.Verbosity.SILENT);
               assertPrinted(terminal, Terminal.Verbosity.SILENT, "text");
               assertNotPrinted(terminal, Terminal.Verbosity.NORMAL, "text");
               assertNotPrinted(terminal, Terminal.Verbosity.VERBOSE, "text");
       
      -        terminal = new CaptureOutputTerminal(Terminal.Verbosity.NORMAL);
      +        terminal = new MockTerminal();
               assertPrinted(terminal, Terminal.Verbosity.SILENT, "text");
               assertPrinted(terminal, Terminal.Verbosity.NORMAL, "text");
               assertNotPrinted(terminal, Terminal.Verbosity.VERBOSE, "text");
       
      -        terminal = new CaptureOutputTerminal(Terminal.Verbosity.VERBOSE);
      +        terminal = new MockTerminal();
      +        terminal.setVerbosity(Terminal.Verbosity.VERBOSE);
               assertPrinted(terminal, Terminal.Verbosity.SILENT, "text");
               assertPrinted(terminal, Terminal.Verbosity.NORMAL, "text");
               assertPrinted(terminal, Terminal.Verbosity.VERBOSE, "text");
           }
       
           public void testEscaping() throws Exception {
      -        CaptureOutputTerminal terminal = new CaptureOutputTerminal(Terminal.Verbosity.NORMAL);
      +        MockTerminal terminal = new MockTerminal();
               assertPrinted(terminal, Terminal.Verbosity.NORMAL, "This message contains percent like %20n");
           }
       
      -    private void assertPrinted(CaptureOutputTerminal logTerminal, Terminal.Verbosity verbosity, String text) {
      +    private void assertPrinted(MockTerminal logTerminal, Terminal.Verbosity verbosity, String text) throws Exception {
               logTerminal.println(verbosity, text);
      -        assertEquals(1, logTerminal.getTerminalOutput().size());
      -        assertTrue(logTerminal.getTerminalOutput().get(0).contains(text));
      -        logTerminal.terminalOutput.clear();
      +        assertTrue(logTerminal.getOutput().contains(text));
      +        logTerminal.resetOutput();
           }
       
      -    private void assertNotPrinted(CaptureOutputTerminal logTerminal, Terminal.Verbosity verbosity, String text) {
      +    private void assertNotPrinted(MockTerminal logTerminal, Terminal.Verbosity verbosity, String text) throws Exception {
               logTerminal.println(verbosity, text);
      -        assertThat(logTerminal.getTerminalOutput(), hasSize(0));
      +        String output = logTerminal.getOutput();
      +        assertTrue(output, output.isEmpty());
           }
       }
      diff --git a/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java b/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java
      index a6dda573304c..0cca19d33bf1 100644
      --- a/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java
      +++ b/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java
      @@ -19,21 +19,21 @@
       
       package org.elasticsearch.common.logging;
       
      -import org.apache.log4j.Appender;
      -import org.apache.log4j.Logger;
      -import org.elasticsearch.common.cli.CliToolTestCase;
      -import org.elasticsearch.common.settings.Settings;
      -import org.elasticsearch.env.Environment;
      -import org.elasticsearch.node.internal.InternalSettingsPreparer;
      -import org.elasticsearch.test.ESTestCase;
      -import org.junit.Before;
      -
       import java.nio.charset.StandardCharsets;
       import java.nio.file.Files;
       import java.nio.file.Path;
       import java.nio.file.StandardOpenOption;
       import java.util.Arrays;
       
      +import org.apache.log4j.Appender;
      +import org.apache.log4j.Logger;
      +import org.elasticsearch.common.cli.MockTerminal;
      +import org.elasticsearch.common.settings.Settings;
      +import org.elasticsearch.env.Environment;
      +import org.elasticsearch.node.internal.InternalSettingsPreparer;
      +import org.elasticsearch.test.ESTestCase;
      +import org.junit.Before;
      +
       import static org.hamcrest.Matchers.is;
       import static org.hamcrest.Matchers.notNullValue;
       import static org.hamcrest.Matchers.nullValue;
      @@ -162,7 +162,7 @@ public class LoggingConfigurationTests extends ESTestCase {
                               .put("appender.console.type", "console")
                               .put("appender.console.layout.type", "consolePattern")
                               .put("appender.console.layout.conversionPattern", "[%d{ISO8601}][%-5p][%-25c] %m%n")
      -                        .build(), new CliToolTestCase.MockTerminal());
      +                        .build(), new MockTerminal());
               LogConfigurator.configure(environment.settings(), true);
               // args should overwrite whatever is in the config
               ESLogger esLogger = ESLoggerFactory.getLogger("test_resolve_order");
      @@ -187,7 +187,7 @@ public class LoggingConfigurationTests extends ESTestCase {
                       Settings.builder()
                               .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath())
                               .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
      -                        .build(), new CliToolTestCase.MockTerminal());
      +                        .build(), new MockTerminal());
               LogConfigurator.configure(environment.settings(), false);
               ESLogger esLogger = ESLoggerFactory.getLogger("test_config_not_read");
       
      diff --git a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java
      index 95439ebdc266..33876ef61ad9 100644
      --- a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java
      +++ b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java
      @@ -19,9 +19,13 @@
       
       package org.elasticsearch.node.internal;
       
      +import java.io.IOException;
      +import java.io.InputStream;
      +import java.nio.file.Files;
      +import java.nio.file.Path;
      +
      +import org.elasticsearch.common.cli.MockTerminal;
       import org.elasticsearch.cluster.ClusterName;
      -import org.elasticsearch.common.cli.CliToolTestCase;
      -import org.elasticsearch.common.cli.Terminal;
       import org.elasticsearch.common.settings.Settings;
       import org.elasticsearch.common.settings.SettingsException;
       import org.elasticsearch.env.Environment;
      @@ -29,17 +33,9 @@ import org.elasticsearch.test.ESTestCase;
       import org.junit.After;
       import org.junit.Before;
       
      -import java.io.IOException;
      -import java.io.InputStream;
      -import java.nio.file.Files;
      -import java.nio.file.Path;
      -import java.util.ArrayList;
      -import java.util.List;
      -
       import static org.elasticsearch.common.settings.Settings.settingsBuilder;
       import static org.hamcrest.Matchers.containsString;
       import static org.hamcrest.Matchers.equalTo;
      -import static org.hamcrest.Matchers.is;
       
       public class InternalSettingsPreparerTests extends ESTestCase {
       
      @@ -81,17 +77,9 @@ public class InternalSettingsPreparerTests extends ESTestCase {
           }
       
           public void testReplacePromptPlaceholders() {
      -        final Terminal terminal = new CliToolTestCase.MockTerminal() {
      -            @Override
      -            public char[] readSecret(String message) {
      -                return "replaced".toCharArray();
      -            }
      -
      -            @Override
      -            public String readText(String message) {
      -                return "text";
      -            }
      -        };
      +        MockTerminal terminal = new MockTerminal();
      +        terminal.addTextInput("text");
      +        terminal.addSecretInput("replaced");
       
               Settings.Builder builder = settingsBuilder()
                       .put(baseEnvSettings)
      diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java
      index 3a1215900831..bd280e4e1d78 100644
      --- a/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java
      +++ b/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java
      @@ -20,6 +20,7 @@
       package org.elasticsearch.plugins;
       
       import org.elasticsearch.common.cli.CliToolTestCase;
      +import org.elasticsearch.common.cli.MockTerminal;
       
       import static org.elasticsearch.common.cli.CliTool.ExitStatus.OK_AND_EXIT;
       import static org.hamcrest.Matchers.containsString;
      @@ -28,22 +29,22 @@ import static org.hamcrest.Matchers.is;
       
       public class PluginCliTests extends CliToolTestCase {
           public void testHelpWorks() throws Exception {
      -        CliToolTestCase.CaptureOutputTerminal terminal = new CliToolTestCase.CaptureOutputTerminal();
      +        MockTerminal terminal = new MockTerminal();
               assertThat(new PluginCli(terminal).execute(args("--help")), is(OK_AND_EXIT));
               assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/plugins/plugin.help");
       
      -        terminal.getTerminalOutput().clear();
      +        terminal.resetOutput();
               assertThat(new PluginCli(terminal).execute(args("install -h")), is(OK_AND_EXIT));
               assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/plugins/plugin-install.help");
               for (String plugin : InstallPluginCommand.OFFICIAL_PLUGINS) {
      -            assertThat(terminal.getTerminalOutput(), hasItem(containsString(plugin)));
      +            assertThat(terminal.getOutput(), containsString(plugin));
               }
       
      -        terminal.getTerminalOutput().clear();
      +        terminal.resetOutput();
               assertThat(new PluginCli(terminal).execute(args("remove --help")), is(OK_AND_EXIT));
               assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/plugins/plugin-remove.help");
       
      -        terminal.getTerminalOutput().clear();
      +        terminal.resetOutput();
               assertThat(new PluginCli(terminal).execute(args("list -h")), is(OK_AND_EXIT));
               assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/plugins/plugin-list.help");
           }
      diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java
      index 012af99cef01..8700ca8a5fe7 100644
      --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java
      +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java
      @@ -24,10 +24,10 @@ import org.elasticsearch.Version;
       import org.elasticsearch.common.SuppressForbidden;
       import org.elasticsearch.common.cli.CliTool.ExitStatus;
       import org.elasticsearch.common.cli.CliToolTestCase;
      +import org.elasticsearch.common.cli.MockTerminal;
       import org.elasticsearch.common.cli.UserError;
       import org.elasticsearch.common.collect.Tuple;
       import org.elasticsearch.monitor.jvm.JvmInfo;
      -import org.hamcrest.Matcher;
       import org.junit.After;
       import org.junit.Before;
       
      @@ -37,7 +37,6 @@ import java.util.HashMap;
       import java.util.List;
       import java.util.Locale;
       import java.util.Map;
      -import java.util.Properties;
       
       import static org.elasticsearch.common.cli.CliTool.ExitStatus.OK;
       import static org.elasticsearch.common.cli.CliTool.ExitStatus.OK_AND_EXIT;
      @@ -50,7 +49,7 @@ import static org.hamcrest.Matchers.nullValue;
       @SuppressForbidden(reason = "modifies system properties intentionally")
       public class BootstrapCliParserTests extends CliToolTestCase {
       
      -    private CaptureOutputTerminal terminal = new CaptureOutputTerminal();
      +    private MockTerminal terminal = new MockTerminal();
           private List propertiesToClear = new ArrayList<>();
           private Map properties;
       
      @@ -73,10 +72,11 @@ public class BootstrapCliParserTests extends CliToolTestCase {
               ExitStatus status = parser.execute(args("version"));
               assertStatus(status, OK_AND_EXIT);
       
      -        assertThatTerminalOutput(containsString(Version.CURRENT.toString()));
      -        assertThatTerminalOutput(containsString(Build.CURRENT.shortHash()));
      -        assertThatTerminalOutput(containsString(Build.CURRENT.date()));
      -        assertThatTerminalOutput(containsString(JvmInfo.jvmInfo().version()));
      +        String output = terminal.getOutput();
      +        assertTrue(output, output.contains(Version.CURRENT.toString()));
      +        assertTrue(output, output.contains(Build.CURRENT.shortHash()));
      +        assertTrue(output, output.contains(Build.CURRENT.date()));
      +        assertTrue(output, output.contains(JvmInfo.jvmInfo().version()));
           }
       
           public void testThatVersionIsReturnedAsStartParameter() throws Exception {
      @@ -84,20 +84,22 @@ public class BootstrapCliParserTests extends CliToolTestCase {
               ExitStatus status = parser.execute(args("start -V"));
               assertStatus(status, OK_AND_EXIT);
       
      -        assertThatTerminalOutput(containsString(Version.CURRENT.toString()));
      -        assertThatTerminalOutput(containsString(Build.CURRENT.shortHash()));
      -        assertThatTerminalOutput(containsString(Build.CURRENT.date()));
      -        assertThatTerminalOutput(containsString(JvmInfo.jvmInfo().version()));
      +        String output = terminal.getOutput();
      +        assertTrue(output, output.contains(Version.CURRENT.toString()));
      +        assertTrue(output, output.contains(Build.CURRENT.shortHash()));
      +        assertTrue(output, output.contains(Build.CURRENT.date()));
      +        assertTrue(output, output.contains(JvmInfo.jvmInfo().version()));
       
      -        CaptureOutputTerminal terminal = new CaptureOutputTerminal();
      +        terminal.resetOutput();
               parser = new BootstrapCLIParser(terminal);
               status = parser.execute(args("start --version"));
               assertStatus(status, OK_AND_EXIT);
       
      -        assertThatTerminalOutput(containsString(Version.CURRENT.toString()));
      -        assertThatTerminalOutput(containsString(Build.CURRENT.shortHash()));
      -        assertThatTerminalOutput(containsString(Build.CURRENT.date()));
      -        assertThatTerminalOutput(containsString(JvmInfo.jvmInfo().version()));
      +        output = terminal.getOutput();
      +        assertTrue(output, output.contains(Version.CURRENT.toString()));
      +        assertTrue(output, output.contains(Build.CURRENT.shortHash()));
      +        assertTrue(output, output.contains(Build.CURRENT.date()));
      +        assertTrue(output, output.contains(JvmInfo.jvmInfo().version()));
           }
       
           public void testThatPidFileCanBeConfigured() throws Exception {
      @@ -173,11 +175,14 @@ public class BootstrapCliParserTests extends CliToolTestCase {
       
               ExitStatus status = parser.execute(args("start --network.host"));
               assertStatus(status, USAGE);
      -        assertThatTerminalOutput(containsString("Parameter [network.host] needs value"));
      +        String output = terminal.getOutput();
      +        assertTrue(output, output.contains("Parameter [network.host] needs value"));
       
      +        terminal.resetOutput();
               status = parser.execute(args("start --network.host --foo"));
               assertStatus(status, USAGE);
      -        assertThatTerminalOutput(containsString("Parameter [network.host] needs value"));
      +        output = terminal.getOutput();
      +        assertTrue(output, output.contains("Parameter [network.host] needs value"));
           }
       
           public void testParsingErrors() throws Exception {
      @@ -186,28 +191,32 @@ public class BootstrapCliParserTests extends CliToolTestCase {
               // unknown params
               ExitStatus status = parser.execute(args("version --unknown-param /tmp/pid"));
               assertStatus(status, USAGE);
      -        assertThatTerminalOutput(containsString("Unrecognized option: --unknown-param"));
      +        String output = terminal.getOutput();
      +        assertTrue(output, output.contains("Unrecognized option: --unknown-param"));
       
               // single dash in extra params
      -        terminal = new CaptureOutputTerminal();
      +        terminal.resetOutput();
               parser = new BootstrapCLIParser(terminal);
               status = parser.execute(args("start -network.host 127.0.0.1"));
               assertStatus(status, USAGE);
      -        assertThatTerminalOutput(containsString("Parameter [-network.host]does not start with --"));
      +        output = terminal.getOutput();
      +        assertTrue(output, output.contains("Parameter [-network.host]does not start with --"));
       
               // never ended parameter
      -        terminal = new CaptureOutputTerminal();
      +        terminal = new MockTerminal();
               parser = new BootstrapCLIParser(terminal);
               status = parser.execute(args("start --network.host"));
               assertStatus(status, USAGE);
      -        assertThatTerminalOutput(containsString("Parameter [network.host] needs value"));
      +        output = terminal.getOutput();
      +        assertTrue(output, output.contains("Parameter [network.host] needs value"));
       
               // free floating value
      -        terminal = new CaptureOutputTerminal();
      +        terminal = new MockTerminal();
               parser = new BootstrapCLIParser(terminal);
               status = parser.execute(args("start 127.0.0.1"));
               assertStatus(status, USAGE);
      -        assertThatTerminalOutput(containsString("Parameter [127.0.0.1]does not start with --"));
      +        output = terminal.getOutput();
      +        assertTrue(output, output.contains("Parameter [127.0.0.1]does not start with --"));
           }
       
           public void testHelpWorks() throws Exception {
      @@ -220,10 +229,11 @@ public class BootstrapCliParserTests extends CliToolTestCase {
               tuples.add(new Tuple<>("-h", "elasticsearch.help"));
       
               for (Tuple tuple : tuples) {
      -            terminal = new CaptureOutputTerminal();
      +            terminal.resetOutput();
                   BootstrapCLIParser parser = new BootstrapCLIParser(terminal);
                   ExitStatus status = parser.execute(args(tuple.v1()));
                   assertStatus(status, OK_AND_EXIT);
      +            // nocommit
                   assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/bootstrap/" + tuple.v2());
               }
           }
      @@ -253,16 +263,12 @@ public class BootstrapCliParserTests extends CliToolTestCase {
               propertiesToClear.addAll(Arrays.asList(systemProperties));
           }
       
      -    private void assertSystemProperty(String name, String expectedValue) {
      -        String msg = String.format(Locale.ROOT, "Expected property %s to be %s, terminal output was %s", name, expectedValue, terminal.getTerminalOutput());
      +    private void assertSystemProperty(String name, String expectedValue) throws Exception {
      +        String msg = String.format(Locale.ROOT, "Expected property %s to be %s, terminal output was %s", name, expectedValue, terminal.getOutput());
               assertThat(msg, System.getProperty(name), is(expectedValue));
           }
       
      -    private void assertStatus(ExitStatus status, ExitStatus expectedStatus) {
      -        assertThat(String.format(Locale.ROOT, "Expected status to be [%s], but was [%s], terminal output was %s", expectedStatus, status, terminal.getTerminalOutput()), status, is(expectedStatus));
      -    }
      -
      -    private void assertThatTerminalOutput(Matcher matcher) {
      -        assertThat(terminal.getTerminalOutput(), hasItem(matcher));
      +    private void assertStatus(ExitStatus status, ExitStatus expectedStatus) throws Exception {
      +        assertThat(String.format(Locale.ROOT, "Expected status to be [%s], but was [%s], terminal output was %s", expectedStatus, status, terminal.getOutput()), status, is(expectedStatus));
           }
       }
      diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java
      index 45f3df22cd77..485886b5cf43 100644
      --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java
      +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java
      @@ -49,7 +49,7 @@ import static org.hamcrest.Matchers.is;
        */
       public class CheckFileCommandTests extends ESTestCase {
       
      -    private CliToolTestCase.CaptureOutputTerminal captureOutputTerminal = new CliToolTestCase.CaptureOutputTerminal();
      +    private MockTerminal captureOutputTerminal = new MockTerminal();
       
           private Configuration jimFsConfiguration = Configuration.unix().toBuilder().setAttributeViews("basic", "owner", "posix", "unix").build();
           private Configuration jimFsConfigurationWithoutPermissions = randomBoolean() ? Configuration.unix().toBuilder().setAttributeViews("basic").build() : Configuration.windows();
      @@ -60,62 +60,62 @@ public class CheckFileCommandTests extends ESTestCase {
       
           public void testThatCommandLogsErrorMessageOnFail() throws Exception {
               executeCommand(jimFsConfiguration, new PermissionCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.CHANGE));
      -        assertThat(captureOutputTerminal.getTerminalOutput(), hasItem(containsString("Please ensure that the user account running Elasticsearch has read access to this file")));
      +        assertThat(captureOutputTerminal.getOutput(), containsString("Please ensure that the user account running Elasticsearch has read access to this file"));
           }
       
           public void testThatCommandLogsNothingWhenPermissionRemains() throws Exception {
               executeCommand(jimFsConfiguration, new PermissionCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.KEEP));
      -        assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0));
      +        assertTrue(captureOutputTerminal.getOutput().isEmpty());
           }
       
           public void testThatCommandLogsNothingWhenDisabled() throws Exception {
               executeCommand(jimFsConfiguration, new PermissionCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.DISABLED));
      -        assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0));
      +        assertTrue(captureOutputTerminal.getOutput().isEmpty());
           }
       
           public void testThatCommandLogsNothingIfFilesystemDoesNotSupportPermissions() throws Exception {
               executeCommand(jimFsConfigurationWithoutPermissions, new PermissionCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.DISABLED));
      -        assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0));
      +        assertTrue(captureOutputTerminal.getOutput().isEmpty());
           }
       
           public void testThatCommandLogsOwnerChange() throws Exception {
               executeCommand(jimFsConfiguration, new OwnerCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.CHANGE));
      -        assertThat(captureOutputTerminal.getTerminalOutput(), hasItem(allOf(containsString("Owner of file ["), containsString("] used to be ["), containsString("], but now is ["))));
      +        assertThat(captureOutputTerminal.getOutput(), allOf(containsString("Owner of file ["), containsString("] used to be ["), containsString("], but now is [")));
           }
       
           public void testThatCommandLogsNothingIfOwnerRemainsSame() throws Exception {
               executeCommand(jimFsConfiguration, new OwnerCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.KEEP));
      -        assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0));
      +        assertTrue(captureOutputTerminal.getOutput().isEmpty());
           }
       
           public void testThatCommandLogsNothingIfOwnerIsDisabled() throws Exception {
               executeCommand(jimFsConfiguration, new OwnerCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.DISABLED));
      -        assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0));
      +        assertTrue(captureOutputTerminal.getOutput().isEmpty());
           }
       
           public void testThatCommandLogsNothingIfFileSystemDoesNotSupportOwners() throws Exception {
               executeCommand(jimFsConfigurationWithoutPermissions, new OwnerCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.DISABLED));
      -        assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0));
      +        assertTrue(captureOutputTerminal.getOutput().isEmpty());
           }
       
           public void testThatCommandLogsIfGroupChanges() throws Exception {
               executeCommand(jimFsConfiguration, new GroupCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.CHANGE));
      -        assertThat(captureOutputTerminal.getTerminalOutput(), hasItem(allOf(containsString("Group of file ["), containsString("] used to be ["), containsString("], but now is ["))));
      +        assertThat(captureOutputTerminal.getOutput(), allOf(containsString("Group of file ["), containsString("] used to be ["), containsString("], but now is [")));
           }
       
           public void testThatCommandLogsNothingIfGroupRemainsSame() throws Exception {
               executeCommand(jimFsConfiguration, new GroupCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.KEEP));
      -        assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0));
      +        assertTrue(captureOutputTerminal.getOutput().isEmpty());
           }
       
           public void testThatCommandLogsNothingIfGroupIsDisabled() throws Exception {
               executeCommand(jimFsConfiguration, new GroupCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.DISABLED));
      -        assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0));
      +        assertTrue(captureOutputTerminal.getOutput().isEmpty());
           }
       
           public void testThatCommandLogsNothingIfFileSystemDoesNotSupportGroups() throws Exception {
               executeCommand(jimFsConfigurationWithoutPermissions, new GroupCheckFileCommand(createTempDir(), captureOutputTerminal, Mode.DISABLED));
      -        assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0));
      +        assertTrue(captureOutputTerminal.getOutput().isEmpty());
           }
       
           public void testThatCommandDoesNotLogAnythingOnFileCreation() throws Exception {
      @@ -130,7 +130,7 @@ public class CheckFileCommandTests extends ESTestCase {
                   assertThat(Files.exists(path), is(true));
               }
       
      -        assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0));
      +        assertTrue(captureOutputTerminal.getOutput().isEmpty());
           }
       
           public void testThatCommandWorksIfFileIsDeletedByCommand() throws Exception {
      @@ -147,7 +147,7 @@ public class CheckFileCommandTests extends ESTestCase {
                   assertThat(Files.exists(path), is(false));
               }
       
      -        assertThat(captureOutputTerminal.getTerminalOutput(), hasSize(0));
      +        assertTrue(captureOutputTerminal.getOutput().isEmpty());
           }
       
           private void executeCommand(Configuration configuration, AbstractTestCheckFileCommand command) throws Exception {
      diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CliToolTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CliToolTests.java
      index 5033914632ab..144a12f141bc 100644
      --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CliToolTests.java
      +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CliToolTests.java
      @@ -20,7 +20,6 @@
       package org.elasticsearch.common.cli;
       
       import org.apache.commons.cli.CommandLine;
      -import org.elasticsearch.ElasticsearchException;
       import org.elasticsearch.common.Strings;
       import org.elasticsearch.common.SuppressForbidden;
       import org.elasticsearch.common.settings.Settings;
      @@ -133,7 +132,7 @@ public class CliToolTests extends CliToolTestCase {
           }
       
           public void testSingleCommandToolHelp() throws Exception {
      -        CaptureOutputTerminal terminal = new CaptureOutputTerminal();
      +        MockTerminal terminal = new MockTerminal();
               final AtomicReference executed = new AtomicReference<>(false);
               final NamedCommand cmd = new NamedCommand("cmd1", terminal) {
                   @Override
      @@ -145,12 +144,11 @@ public class CliToolTests extends CliToolTestCase {
               SingleCmdTool tool = new SingleCmdTool("tool", terminal, cmd);
               CliTool.ExitStatus status = tool.execute(args("-h"));
               assertStatus(status, CliTool.ExitStatus.OK_AND_EXIT);
      -        assertThat(terminal.getTerminalOutput(), hasSize(3));
      -        assertThat(terminal.getTerminalOutput(), hasItem(containsString("cmd1 help")));
      +        assertThat(terminal.getOutput(), containsString("cmd1 help"));
           }
       
           public void testMultiCommandToolHelp() throws Exception {
      -        CaptureOutputTerminal terminal = new CaptureOutputTerminal();
      +        MockTerminal terminal = new MockTerminal();
               NamedCommand[] cmds = new NamedCommand[2];
               cmds[0] = new NamedCommand("cmd0", terminal) {
                   @Override
      @@ -167,12 +165,11 @@ public class CliToolTests extends CliToolTestCase {
               MultiCmdTool tool = new MultiCmdTool("tool", terminal, cmds);
               CliTool.ExitStatus status = tool.execute(args("-h"));
               assertStatus(status, CliTool.ExitStatus.OK_AND_EXIT);
      -        assertThat(terminal.getTerminalOutput(), hasSize(3));
      -        assertThat(terminal.getTerminalOutput(), hasItem(containsString("tool help")));
      +        assertThat(terminal.getOutput(), containsString("tool help"));
           }
       
           public void testMultiCommandCmdHelp() throws Exception {
      -        CaptureOutputTerminal terminal = new CaptureOutputTerminal();
      +        MockTerminal terminal = new MockTerminal();
               NamedCommand[] cmds = new NamedCommand[2];
               cmds[0] = new NamedCommand("cmd0", terminal) {
                   @Override
      @@ -189,12 +186,11 @@ public class CliToolTests extends CliToolTestCase {
               MultiCmdTool tool = new MultiCmdTool("tool", terminal, cmds);
               CliTool.ExitStatus status = tool.execute(args("cmd1 -h"));
               assertStatus(status, CliTool.ExitStatus.OK_AND_EXIT);
      -        assertThat(terminal.getTerminalOutput(), hasSize(3));
      -        assertThat(terminal.getTerminalOutput(), hasItem(containsString("cmd1 help")));
      +        assertThat(terminal.getOutput(), containsString("cmd1 help"));
           }
       
           public void testNonUserErrorPropagates() throws Exception {
      -        CaptureOutputTerminal terminal = new CaptureOutputTerminal();
      +        MockTerminal terminal = new MockTerminal();
               NamedCommand cmd = new NamedCommand("cmd", terminal) {
                   @Override
                   public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception {
      @@ -225,22 +221,11 @@ public class CliToolTests extends CliToolTestCase {
           }
       
           public void testPromptForSetting() throws Exception {
      -        final AtomicInteger counter = new AtomicInteger();
               final AtomicReference promptedSecretValue = new AtomicReference<>(null);
               final AtomicReference promptedTextValue = new AtomicReference<>(null);
      -        final Terminal terminal = new MockTerminal() {
      -            @Override
      -            public char[] readSecret(String text) {
      -                counter.incrementAndGet();
      -                return "changeit".toCharArray();
      -            }
      -
      -            @Override
      -            public String readText(String text) {
      -                counter.incrementAndGet();
      -                return "replaced";
      -            }
      -        };
      +        final MockTerminal terminal = new MockTerminal();
      +        terminal.addTextInput("replaced");
      +        terminal.addSecretInput("changeit");
               final NamedCommand cmd = new NamedCommand("noop", terminal) {
                   @Override
                   public CliTool.ExitStatus execute(Settings settings, Environment env) {
      @@ -259,7 +244,6 @@ public class CliToolTests extends CliToolTestCase {
                   System.clearProperty("es.replace");
               }
       
      -        assertThat(counter.intValue(), is(2));
               assertThat(promptedSecretValue.get(), is("changeit"));
               assertThat(promptedTextValue.get(), is("replaced"));
           }
      @@ -269,7 +253,7 @@ public class CliToolTests extends CliToolTestCase {
               final CliToolConfig.Cmd strictCommand = cmd("strict", CliTool.Command.Exit.class).stopAtNonOption(false).build();
               final CliToolConfig config = CliToolConfig.config("elasticsearch", CliTool.class).cmds(lenientCommand, strictCommand).build();
       
      -        final CaptureOutputTerminal terminal = new CaptureOutputTerminal();
      +        MockTerminal terminal = new MockTerminal();
               final CliTool cliTool = new CliTool(config, terminal) {
                   @Override
                   protected Command parse(String cmdName, CommandLine cli) throws Exception {
      @@ -292,11 +276,11 @@ public class CliToolTests extends CliToolTestCase {
       
               // unknown parameters, error
               assertStatus(cliTool.execute(args("strict --unknown")), USAGE);
      -        assertThat(terminal.getTerminalOutput(), hasItem(containsString("Unrecognized option: --unknown")));
      +        assertThat(terminal.getOutput(), containsString("Unrecognized option: --unknown"));
       
      -        terminal.getTerminalOutput().clear();
      +        terminal.resetOutput();
               assertStatus(cliTool.execute(args("strict -u")), USAGE);
      -        assertThat(terminal.getTerminalOutput(), hasItem(containsString("Unrecognized option: -u")));
      +        assertThat(terminal.getOutput(), containsString("Unrecognized option: -u"));
           }
       
           private void assertStatus(CliTool.ExitStatus status, CliTool.ExitStatus expectedStatus) {
      diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java
      index 66dfa67ccbdd..86f02fe6f30d 100644
      --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java
      +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java
      @@ -46,6 +46,7 @@ import org.apache.lucene.util.LuceneTestCase;
       import org.elasticsearch.Version;
       import org.elasticsearch.common.cli.CliTool;
       import org.elasticsearch.common.cli.CliToolTestCase;
      +import org.elasticsearch.common.cli.MockTerminal;
       import org.elasticsearch.common.cli.Terminal;
       import org.elasticsearch.common.cli.UserError;
       import org.elasticsearch.common.settings.Settings;
      @@ -116,8 +117,8 @@ public class InstallPluginCommandTests extends ESTestCase {
               return writeZip(structure, "elasticsearch");
           }
       
      -    static CliToolTestCase.CaptureOutputTerminal installPlugin(String pluginUrl, Environment env) throws Exception {
      -        CliToolTestCase.CaptureOutputTerminal terminal = new CliToolTestCase.CaptureOutputTerminal(Terminal.Verbosity.NORMAL);
      +    static MockTerminal installPlugin(String pluginUrl, Environment env) throws Exception {
      +        MockTerminal terminal = new MockTerminal();
               CliTool.ExitStatus status = new InstallPluginCommand(terminal, pluginUrl, true).execute(env.settings(), env);
               assertEquals(CliTool.ExitStatus.OK, status);
               return terminal;
      diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java
      index c68e207c0c38..c86a6464eb00 100644
      --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java
      +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java
      @@ -28,6 +28,7 @@ import java.util.List;
       import org.apache.lucene.util.LuceneTestCase;
       import org.elasticsearch.common.cli.CliTool;
       import org.elasticsearch.common.cli.CliToolTestCase;
      +import org.elasticsearch.common.cli.MockTerminal;
       import org.elasticsearch.common.cli.Terminal;
       import org.elasticsearch.common.settings.Settings;
       import org.elasticsearch.env.Environment;
      @@ -45,8 +46,8 @@ public class ListPluginsCommandTests extends ESTestCase {
               return new Environment(settings);
           }
       
      -    static CliToolTestCase.CaptureOutputTerminal listPlugins(Environment env) throws Exception {
      -        CliToolTestCase.CaptureOutputTerminal terminal = new CliToolTestCase.CaptureOutputTerminal(Terminal.Verbosity.NORMAL);
      +    static MockTerminal listPlugins(Environment env) throws Exception {
      +        MockTerminal terminal = new MockTerminal();
               CliTool.ExitStatus status = new ListPluginsCommand(terminal).execute(env.settings(), env);
               assertEquals(CliTool.ExitStatus.OK, status);
               return terminal;
      @@ -62,29 +63,24 @@ public class ListPluginsCommandTests extends ESTestCase {
           }
       
           public void testNoPlugins() throws Exception {
      -        CliToolTestCase.CaptureOutputTerminal terminal = listPlugins(createEnv());
      -        List lines = terminal.getTerminalOutput();
      -        assertEquals(0, lines.size());
      +        MockTerminal terminal = listPlugins(createEnv());
      +        assertTrue(terminal.getOutput(), terminal.getOutput().isEmpty());
           }
       
           public void testOnePlugin() throws Exception {
               Environment env = createEnv();
               Files.createDirectory(env.pluginsFile().resolve("fake"));
      -        CliToolTestCase.CaptureOutputTerminal terminal = listPlugins(env);
      -        List lines = terminal.getTerminalOutput();
      -        assertEquals(1, lines.size());
      -        assertTrue(lines.get(0).contains("fake"));
      +        MockTerminal terminal = listPlugins(env);
      +        assertTrue(terminal.getOutput(), terminal.getOutput().contains("fake"));
           }
       
           public void testTwoPlugins() throws Exception {
               Environment env = createEnv();
               Files.createDirectory(env.pluginsFile().resolve("fake1"));
               Files.createDirectory(env.pluginsFile().resolve("fake2"));
      -        CliToolTestCase.CaptureOutputTerminal terminal = listPlugins(env);
      -        List lines = terminal.getTerminalOutput();
      -        assertEquals(2, lines.size());
      -        Collections.sort(lines);
      -        assertTrue(lines.get(0).contains("fake1"));
      -        assertTrue(lines.get(1).contains("fake2"));
      +        MockTerminal terminal = listPlugins(env);
      +        String output = terminal.getOutput();
      +        assertTrue(output, output.contains("fake1"));
      +        assertTrue(output, output.contains("fake2"));
           }
       }
      diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginSecurityTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginSecurityTests.java
      index de1486a3bc20..acc300c6cf58 100644
      --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginSecurityTests.java
      +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginSecurityTests.java
      @@ -31,7 +31,7 @@ import java.util.List;
       
       /** Tests plugin manager security check */
       public class PluginSecurityTests extends ESTestCase {
      -    
      +
           /** Test that we can parse the set of permissions correctly for a simple policy */
           public void testParsePermissions() throws Exception {
               assumeTrue("test cannot run with security manager enabled", System.getSecurityManager() == null);
      @@ -42,7 +42,7 @@ public class PluginSecurityTests extends ESTestCase {
               PermissionCollection actual = PluginSecurity.parsePermissions(Terminal.DEFAULT, testFile, scratch);
               assertEquals(expected, actual);
           }
      -    
      +
           /** Test that we can parse the set of permissions correctly for a complex policy */
           public void testParseTwoPermissions() throws Exception {
               assumeTrue("test cannot run with security manager enabled", System.getSecurityManager() == null);
      @@ -54,12 +54,12 @@ public class PluginSecurityTests extends ESTestCase {
               PermissionCollection actual = PluginSecurity.parsePermissions(Terminal.DEFAULT, testFile, scratch);
               assertEquals(expected, actual);
           }
      -    
      +
           /** Test that we can format some simple permissions properly */
           public void testFormatSimplePermission() throws Exception {
               assertEquals("java.lang.RuntimePermission queuePrintJob", PluginSecurity.formatPermission(new RuntimePermission("queuePrintJob")));
           }
      -    
      +
           /** Test that we can format an unresolved permission properly */
           public void testFormatUnresolvedPermission() throws Exception {
               assumeTrue("test cannot run with security manager enabled", System.getSecurityManager() == null);
      @@ -70,7 +70,7 @@ public class PluginSecurityTests extends ESTestCase {
               assertEquals(1, permissions.size());
               assertEquals("org.fake.FakePermission fakeName", PluginSecurity.formatPermission(permissions.get(0)));
           }
      -    
      +
           /** no guaranteed equals on these classes, we assert they contain the same set */
           private void assertEquals(PermissionCollection expected, PermissionCollection actual) {
               assertEquals(asSet(Collections.list(expected.elements())), asSet(Collections.list(actual.elements())));
      diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java
      index 10fbc3c26966..0bfdf5c34a88 100644
      --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java
      +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java
      @@ -27,6 +27,7 @@ import java.nio.file.Path;
       import org.apache.lucene.util.LuceneTestCase;
       import org.elasticsearch.common.cli.CliTool;
       import org.elasticsearch.common.cli.CliToolTestCase;
      +import org.elasticsearch.common.cli.MockTerminal;
       import org.elasticsearch.common.cli.Terminal;
       import org.elasticsearch.common.cli.UserError;
       import org.elasticsearch.common.settings.Settings;
      @@ -48,8 +49,8 @@ public class RemovePluginCommandTests extends ESTestCase {
               return new Environment(settings);
           }
       
      -    static CliToolTestCase.CaptureOutputTerminal removePlugin(String name, Environment env) throws Exception {
      -        CliToolTestCase.CaptureOutputTerminal terminal = new CliToolTestCase.CaptureOutputTerminal(Terminal.Verbosity.VERBOSE);
      +    static MockTerminal removePlugin(String name, Environment env) throws Exception {
      +        MockTerminal terminal = new MockTerminal();
               CliTool.ExitStatus status = new RemovePluginCommand(terminal, name).execute(env.settings(), env);
               assertEquals(CliTool.ExitStatus.OK, status);
               return terminal;
      diff --git a/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java
      index 6d6c176b27d9..9debf4b8f33f 100644
      --- a/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java
      +++ b/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java
      @@ -19,7 +19,8 @@
       
       package org.elasticsearch.common.cli;
       
      -import org.elasticsearch.ExceptionsHelper;
      +import java.io.IOException;
      +
       import org.elasticsearch.common.Strings;
       import org.elasticsearch.common.SuppressForbidden;
       import org.elasticsearch.test.ESTestCase;
      @@ -27,14 +28,6 @@ import org.elasticsearch.test.StreamsUtils;
       import org.junit.After;
       import org.junit.Before;
       
      -import java.io.IOException;
      -import java.util.ArrayList;
      -import java.util.List;
      -
      -import static org.hamcrest.Matchers.containsString;
      -import static org.hamcrest.Matchers.greaterThan;
      -import static org.hamcrest.Matchers.hasSize;
      -
       public abstract class CliToolTestCase extends ESTestCase {
       
           @Before
      @@ -56,63 +49,10 @@ public abstract class CliToolTestCase extends ESTestCase {
               return command.split("\\s+");
           }
       
      -    /**
      -     * A terminal implementation that discards everything
      -     */
      -    public static class MockTerminal extends Terminal {
      -
      -        @Override
      -        protected void doPrint(String msg) {}
      -
      -        @Override
      -        public String readText(String prompt) {
      -            return null;
      -        }
      -
      -        @Override
      -        public char[] readSecret(String prompt) {
      -            return new char[0];
      -        }
      -    }
      -
      -    /**
      -     * A terminal implementation that captures everything written to it
      -     */
      -    public static class CaptureOutputTerminal extends MockTerminal {
      -
      -        List terminalOutput = new ArrayList<>();
      -
      -        public CaptureOutputTerminal() {
      -            this(Verbosity.NORMAL);
      -        }
      -
      -        public CaptureOutputTerminal(Verbosity verbosity) {
      -            setVerbosity(verbosity);
      -        }
      -
      -        @Override
      -        protected void doPrint(String msg) {
      -            terminalOutput.add(msg);
      -        }
      -
      -        public List getTerminalOutput() {
      -            return terminalOutput;
      -        }
      -    }
      -
      -    public static void assertTerminalOutputContainsHelpFile(CliToolTestCase.CaptureOutputTerminal terminal, String classPath) throws IOException {
      -        List nonEmptyLines = new ArrayList<>();
      -        for (String line : terminal.getTerminalOutput()) {
      -            String originalPrintedLine = line.replaceAll(System.lineSeparator(), "");
      -            if (Strings.isNullOrEmpty(originalPrintedLine)) {
      -                nonEmptyLines.add(originalPrintedLine);
      -            }
      -        }
      -        assertThat(nonEmptyLines, hasSize(greaterThan(0)));
      -
      +    public static void assertTerminalOutputContainsHelpFile(MockTerminal terminal, String classPath) throws IOException {
      +        String output = terminal.getOutput();
      +        assertFalse(output, output.isEmpty());
               String expectedDocs = StreamsUtils.copyToStringFromClasspath(classPath);
      -        for (String nonEmptyLine : nonEmptyLines) {
      -            assertThat(expectedDocs, containsString(nonEmptyLine.replaceAll(System.lineSeparator(), "")));
      -        }
      +        assertTrue(output, output.contains(expectedDocs));
           }
       }
      diff --git a/test/framework/src/main/java/org/elasticsearch/common/cli/MockTerminal.java b/test/framework/src/main/java/org/elasticsearch/common/cli/MockTerminal.java
      new file mode 100644
      index 000000000000..3b2903b3fabe
      --- /dev/null
      +++ b/test/framework/src/main/java/org/elasticsearch/common/cli/MockTerminal.java
      @@ -0,0 +1,85 @@
      +/*
      + * Licensed to Elasticsearch under one or more contributor
      + * license agreements. See the NOTICE file distributed with
      + * this work for additional information regarding copyright
      + * ownership. Elasticsearch licenses this file to you under
      + * the Apache License, Version 2.0 (the "License"); you may
      + * not use this file except in compliance with the License.
      + * You may obtain a copy of the License at
      + *
      + *    http://www.apache.org/licenses/LICENSE-2.0
      + *
      + * Unless required by applicable law or agreed to in writing,
      + * software distributed under the License is distributed on an
      + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
      + * KIND, either express or implied.  See the License for the
      + * specific language governing permissions and limitations
      + * under the License.
      + */
      +
      +package org.elasticsearch.common.cli;
      +
      +import java.io.ByteArrayOutputStream;
      +import java.io.OutputStreamWriter;
      +import java.io.PrintWriter;
      +import java.io.UnsupportedEncodingException;
      +import java.nio.charset.StandardCharsets;
      +import java.util.ArrayDeque;
      +import java.util.Deque;
      +
      +/**
      + * A terminal for tests which captures all output, and
      + * can be plugged with fake input.
      + */
      +public class MockTerminal extends Terminal {
      +
      +    private final ByteArrayOutputStream buffer = new ByteArrayOutputStream();
      +    private final PrintWriter writer = new PrintWriter(new OutputStreamWriter(buffer, StandardCharsets.UTF_8));
      +    private final Deque textInput = new ArrayDeque<>();
      +    private final Deque secretInput = new ArrayDeque<>();
      +
      +    public MockTerminal() {
      +        super("\n"); // always *nix newlines for tests
      +    }
      +
      +    @Override
      +    public String readText(String prompt) {
      +        if (textInput.isEmpty()) {
      +            return null;
      +        }
      +        return textInput.removeFirst();
      +    }
      +
      +    @Override
      +    public char[] readSecret(String prompt) {
      +        if (secretInput.isEmpty()) {
      +            return null;
      +        }
      +        return secretInput.removeFirst().toCharArray();
      +    }
      +
      +    @Override
      +    public PrintWriter getWriter() {
      +        return writer;
      +    }
      +
      +    /** Adds an an input that will be return from {@link #readText(String)}. Values are read in FIFO order. */
      +    public void addTextInput(String input) {
      +        textInput.addLast(input);
      +    }
      +
      +    /** Adds an an input that will be return from {@link #readText(String)}. Values are read in FIFO order. */
      +    public void addSecretInput(String input) {
      +        secretInput.addLast(input);
      +    }
      +
      +    /** Returns all output written to this terminal. */
      +    public String getOutput() throws UnsupportedEncodingException {
      +        return buffer.toString("UTF-8");
      +    }
      +
      +    /** Wipes the output. */
      +    public void resetOutput() {
      +        buffer.reset();
      +    }
      +}
      
      From 5a94f8b3e6ce1152e126456639d59e7e7dbb63c2 Mon Sep 17 00:00:00 2001
      From: Ryan Ernst 
      Date: Sun, 6 Mar 2016 11:34:10 -0800
      Subject: [PATCH 065/320] Remove outdated nocommit and tweak assert to output
       bad exception message on failure.
      
      ---
       .../test/java/org/elasticsearch/common/cli/TerminalTests.java  | 3 ++-
       .../org/elasticsearch/bootstrap/BootstrapCliParserTests.java   | 1 -
       2 files changed, 2 insertions(+), 2 deletions(-)
      
      diff --git a/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java b/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java
      index 9326cc162339..deb64e906b47 100644
      --- a/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java
      +++ b/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java
      @@ -46,7 +46,8 @@ public class TerminalTests extends CliToolTestCase {
       
           private void assertPrinted(MockTerminal logTerminal, Terminal.Verbosity verbosity, String text) throws Exception {
               logTerminal.println(verbosity, text);
      -        assertTrue(logTerminal.getOutput().contains(text));
      +        String output = logTerminal.getOutput();
      +        assertTrue(output, output.contains(text));
               logTerminal.resetOutput();
           }
       
      diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java
      index 8700ca8a5fe7..c172999e7cb3 100644
      --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java
      +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java
      @@ -233,7 +233,6 @@ public class BootstrapCliParserTests extends CliToolTestCase {
                   BootstrapCLIParser parser = new BootstrapCLIParser(terminal);
                   ExitStatus status = parser.execute(args(tuple.v1()));
                   assertStatus(status, OK_AND_EXIT);
      -            // nocommit
                   assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/bootstrap/" + tuple.v2());
               }
           }
      
      From 54018a5d3728e796bb93ba86431f4eb512edb524 Mon Sep 17 00:00:00 2001
      From: Robert Muir 
      Date: Mon, 7 Mar 2016 04:12:23 -0500
      Subject: [PATCH 066/320] upgrade to lucene 6.0.0-snapshot-bea235f
      
      Closes #16964
      
      Squashed commit of the following:
      
      commit a23f9d2d29220991aa498214530753d7a5a148c6
      Merge: eec9c4e 0b0a251
      Author: Robert Muir 
      Date:   Mon Mar 7 04:12:02 2016 -0500
      
          Merge branch 'master' into lucene6
      
      commit eec9c4e5cd11e9c3e0b426f04894bb2a6dae4f21
      Merge: bc67205 675d940
      Author: Robert Muir 
      Date:   Fri Mar 4 13:45:00 2016 -0500
      
          Merge branch 'master' into lucene6
      
      commit bc67205bdfe1526eae277ab7856fc050ecbdb7b2
      Author: Robert Muir 
      Date:   Fri Mar 4 09:56:31 2016 -0500
      
          fix test bug
      
      commit a60723b007ff12d97b1810cef473bd7b553a0327
      Author: Simon Willnauer 
      Date:   Fri Mar 4 15:35:35 2016 +0100
      
          Fix SimpleValidateQueryIT to put braces around boosted terms
      
      commit ae3a49d7ba7ced448d2a5262e5d8ec98671a9090
      Author: Simon Willnauer 
      Date:   Fri Mar 4 15:27:25 2016 +0100
      
          fix multimatchquery
      
      commit ae23fdb88a8f6d3fb7ba60fd1aaf3fd72d899aa5
      Author: Simon Willnauer 
      Date:   Fri Mar 4 15:20:49 2016 +0100
      
          Rewrite DecayFunctionScoreIT to be independent of the similarity used
      
          This test relied a lot on the term scoring and compared scores
          that are dependent on the similarity. This commit changes the base query
          to be a predictable constant score query.
      
      commit 366c2d518c35d31251033f1b6f6a93f6e2ae327d
      Author: Simon Willnauer 
      Date:   Fri Mar 4 14:06:14 2016 +0100
      
          Fix scoring in tests due to changes to idf calculation.
      
          Lucene 6 uses a different default similarity as well as a different
          way to calculate IDF. In contrast to older version lucene 6 uses docCount per field
          to calculate the IDF not the # of docs in the index to overcome the sparse field
          cases.
      
      commit dac99fd64ac2fa71b8d8d106fe68825e574c49f8
      Author: Robert Muir 
      Date:   Fri Mar 4 08:21:57 2016 -0500
      
          don't hardcoded expected termquery score
      
      commit 6e9f340ba49ab10eed512df86d52a121aa775b0f
      Author: Robert Muir 
      Date:   Fri Mar 4 08:04:45 2016 -0500
      
          suppress deprecation warning until migrated to points
      
      commit 3ac8908424b3fdad44a90a4f7bdb3eff7efd077d
      Author: Robert Muir 
      Date:   Fri Mar 4 07:21:43 2016 -0500
      
          Remove invalid test: all commits have IDs, and its illegal to do this.
      
      commit c12976288124ad1a26467e7e848fb810548e7eab
      Author: Robert Muir 
      Date:   Fri Mar 4 07:06:14 2016 -0500
      
          don't test with unsupported back compat
      
      commit 18bbfe76128570bc70883bf91ff4c44c82d27817
      Author: Robert Muir 
      Date:   Fri Mar 4 07:02:18 2016 -0500
      
          remove now invalid lucene 4 backcompat test
      
      commit 7e730e572886f0ef2d3faba712e4256216ff01ec
      Author: Robert Muir 
      Date:   Fri Mar 4 06:58:52 2016 -0500
      
          remove now invalid lucene 4 backwards test
      
      commit 244d2ab6868ba5ac9e0bcde3c2833743751a25ec
      Author: Robert Muir 
      Date:   Fri Mar 4 06:47:23 2016 -0500
      
          use 6.0 codec
      
      commit 5f64d4a431a6fdaa1234adca23f154c2a1de8284
      Author: Robert Muir 
      Date:   Fri Mar 4 06:43:08 2016 -0500
      
          compile, javadocs, forbidden-apis, etc
      
      commit 1f273cd62a7fe9ca8f8944acbbfc5cbdd3d81ccb
      Merge: cd33921 29e3443
      Author: Simon Willnauer 
      Date:   Fri Mar 4 10:45:29 2016 +0100
      
          Merge branch 'master' into lucene6
      
      commit cd33921ac742ef9fb351012eff35f3c7dbda7264
      Author: Robert Muir 
      Date:   Thu Mar 3 23:58:37 2016 -0500
      
          fix hunspell dictionary loading
      
      commit c7fdbd837b01f7defe9cb1c24e2ec65604b0dc96
      Merge: 4d4190f d8948ba
      Author: Robert Muir 
      Date:   Thu Mar 3 23:41:53 2016 -0500
      
          Merge branch 'master' into lucene6
      
      commit 4d4190fd82601aaafac6b8254ccb3edf218faa34
      Author: Robert Muir 
      Date:   Thu Mar 3 23:39:14 2016 -0500
      
          remove nocommit
      
      commit 77ca69e288b1a41aa9595c921ed166c272a00ea8
      Author: Robert Muir 
      Date:   Thu Mar 3 23:38:24 2016 -0500
      
          clean up numericutils vs legacynumericutils
      
      commit a466d696fbaad04b647ffbc0857a9439b583d0bf
      Author: Robert Muir 
      Date:   Thu Mar 3 23:32:43 2016 -0500
      
          upgrade spatial4j
      
      commit 5412c747a8cfe638bacedbc8233163cb75cc3dc5
      Author: Robert Muir 
      Date:   Thu Mar 3 23:19:28 2016 -0500
      
          move to 6.0.0-snapshot-8eada27
      
      commit b32bfe924626b87e540692375ece09e7c2edb189
      Author: Adrien Grand 
      Date:   Thu Mar 3 11:30:09 2016 +0100
      
          Fix some test compile errors.
      
      commit 6ccde35e9840b03c68d1a2cd47c7923a06edf64a
      Author: Adrien Grand 
      Date:   Thu Mar 3 11:25:51 2016 +0100
      
          Current Lucene version is 6.0.0.
      
      commit f62e1015d931b4cc04c778298a8fa1ba65e97ad9
      Author: Adrien Grand 
      Date:   Thu Mar 3 11:20:48 2016 +0100
      
          Fix compile errors in NGramTokenFilterFactory.
      
      commit 6837c6eabf96075f743649da9b9b52dd39611c58
      Author: Adrien Grand 
      Date:   Thu Mar 3 10:50:59 2016 +0100
      
          Fix the edge ngram tokenizer/filter.
      
      commit ccd7f070de5efcdfbeb34b9555c65c4990bf1ba6
      Author: Adrien Grand 
      Date:   Thu Mar 3 10:42:44 2016 +0100
      
          The missing value is now accessible through a getter.
      
      commit bd3b77f9b28e5b05daa3d49683a9922a6baf2963
      Author: Adrien Grand 
      Date:   Thu Mar 3 10:41:51 2016 +0100
      
          Remove IndexCacheableQuery.
      
      commit 05f3091c347aeae80eeb16349ac51d2b53cf86f7
      Author: Adrien Grand 
      Date:   Thu Mar 3 10:39:43 2016 +0100
      
          Fix compilation of function_score queries.
      
      commit 81cda79a2431ac78f56b0cc5a5765387f662d801
      Author: Adrien Grand 
      Date:   Thu Mar 3 10:35:02 2016 +0100
      
          Fix compile errors in BlendedTermQuery.
      
      commit 70994ce8dd1eca0b995870974a38e20f26f96a7b
      Author: Robert Muir 
      Date:   Wed Mar 2 23:33:03 2016 -0500
      
          add bug ID
      
      commit 29d4f1a71f36f646b5a6060bed3db019564a279d
      Author: Robert Muir 
      Date:   Wed Mar 2 21:02:32 2016 -0500
      
          easy .store changes
      
      commit 5e1a1e6fd665fa455e88d3a8987362fad5f44bb1
      Author: Robert Muir 
      Date:   Wed Mar 2 20:47:24 2016 -0500
      
          cleanups mostly around boosting
      
      commit 333a669ec6c305ada5645d13ed1da0e19ec1d053
      Author: Robert Muir 
      Date:   Wed Mar 2 20:27:56 2016 -0500
      
          more simple fixes
      
      commit bd5cd98a1e089c866b6b4a5e159400b110140ce6
      Author: Robert Muir 
      Date:   Wed Mar 2 19:49:38 2016 -0500
      
          more easy fixes and removal of ancient cruft
      
      commit a68f419ee47da5f9c9ce5b372f01d707e902474c
      Author: Robert Muir 
      Date:   Wed Mar 2 19:35:02 2016 -0500
      
          cutover numerics
      
      commit 4ca5dc1fa47dd5892db00899032133318fff3116
      Author: Robert Muir 
      Date:   Wed Mar 2 18:34:18 2016 -0500
      
          fix some constants
      
      commit 88710a17817086e477c6c021ec346d0534b7fb88
      Author: Robert Muir 
      Date:   Wed Mar 2 18:14:25 2016 -0500
      
          Add spatial-extras jar as a core dependency
      
      commit c8cd6726583e5ce3f546ed355d4eca037164a30d
      Author: Robert Muir 
      Date:   Wed Mar 2 18:03:33 2016 -0500
      
          update to lucene 6 jars
      ---
       .../gradle/plugin/PluginBuildPlugin.groovy    |   2 +-
       .../resources/forbidden/all-signatures.txt    |  14 --
       .../resources/forbidden/core-signatures.txt   |  15 +-
       buildSrc/version.properties                   |   4 +-
       core/build.gradle                             |  10 +-
       .../lucene/queries/BlendedTermQuery.java      |   6 +-
       .../classic/MapperQueryParser.java            |  36 ++--
       .../search/XFilteredDocIdSetIterator.java     |   3 +-
       .../vectorhighlight/CustomFieldQuery.java     |   4 +-
       .../main/java/org/elasticsearch/Version.java  | 131 ++++++-------
       .../common/geo/ShapesAvailability.java        |   2 +-
       .../common/geo/XShapeCollection.java          |   6 +-
       .../common/geo/builders/CircleBuilder.java    |   2 +-
       .../common/geo/builders/EnvelopeBuilder.java  |   2 +-
       .../builders/GeometryCollectionBuilder.java   |   2 +-
       .../geo/builders/LineStringBuilder.java       |   2 +-
       .../geo/builders/MultiLineStringBuilder.java  |   2 +-
       .../geo/builders/MultiPointBuilder.java       |   4 +-
       .../geo/builders/MultiPolygonBuilder.java     |   2 +-
       .../common/geo/builders/PointBuilder.java     |   2 +-
       .../common/geo/builders/PolygonBuilder.java   |   4 +-
       .../common/geo/builders/ShapeBuilder.java     |  12 +-
       .../common/lucene/IndexCacheableQuery.java    |  74 --------
       .../elasticsearch/common/lucene/Lucene.java   |  13 +-
       .../common/lucene/all/AllTermQuery.java       |   2 +-
       .../lucene/index/FilterableTermsEnum.java     |   5 +-
       .../lucene/search/MultiPhrasePrefixQuery.java |  13 +-
       .../common/lucene/search/Queries.java         |   7 +-
       .../function/FiltersFunctionScoreQuery.java   |   8 +-
       .../search/function/FunctionScoreQuery.java   |   8 +-
       .../lucene/store/FilterIndexOutput.java       |   2 +-
       .../elasticsearch/common/util/BigArrays.java  |  16 +-
       .../common/util/BigByteArray.java             |   2 +-
       .../common/util/BigDoubleArray.java           |   2 +-
       .../common/util/BigFloatArray.java            |   2 +-
       .../common/util/BigIntArray.java              |   2 +-
       .../common/util/BigLongArray.java             |   2 +-
       .../common/util/BigObjectArray.java           |   2 +-
       .../common/util/BloomFilter.java              |   2 +-
       .../common/util/CollectionUtils.java          |   2 +-
       .../gateway/MetaDataStateFormat.java          |   2 +-
       .../index/analysis/Analysis.java              |   4 +-
       .../analysis/EdgeNGramTokenFilterFactory.java |  15 +-
       .../analysis/EdgeNGramTokenizerFactory.java   |  40 +---
       .../index/analysis/KeepWordFilterFactory.java |  25 +--
       .../analysis/LengthTokenFilterFactory.java    |  19 +-
       .../analysis/NGramTokenFilterFactory.java     |  10 +-
       .../index/analysis/NumericDateTokenizer.java  |   6 +-
       .../analysis/NumericDoubleTokenizer.java      |   6 +-
       .../index/analysis/NumericFloatTokenizer.java |   6 +-
       .../analysis/NumericIntegerTokenizer.java     |   6 +-
       .../index/analysis/NumericLongTokenizer.java  |   6 +-
       .../index/analysis/NumericTokenizer.java      |   8 +-
       .../index/analysis/SnowballAnalyzer.java      |   8 +-
       .../analysis/StandardHtmlStripAnalyzer.java   |   9 +-
       .../analysis/StandardTokenizerFactory.java    |  14 +-
       .../analysis/StopTokenFilterFactory.java      |  14 +-
       .../analysis/TrimTokenFilterFactory.java      |  17 +-
       .../UAX29URLEmailTokenizerFactory.java        |  14 +-
       .../WordDelimiterTokenFilterFactory.java      |  11 +-
       ...tionaryCompoundWordTokenFilterFactory.java |  11 +-
       ...enationCompoundWordTokenFilterFactory.java |  11 +-
       .../index/codec/CodecService.java             |   5 +-
       .../PerFieldMappingPostingFormatCodec.java    |   4 +-
       .../index/engine/DeleteVersionValue.java      |   3 +-
       .../index/engine/InternalEngine.java          |   2 +-
       .../index/engine/LiveVersionMap.java          |   4 +-
       .../index/engine/VersionValue.java            |   2 +-
       .../fielddata/SortedBinaryDocValues.java      |   2 +-
       .../fielddata/ordinals/OrdinalsBuilder.java   |   6 +-
       .../plain/AbstractIndexGeoPointFieldData.java |   4 +-
       .../plain/GeoPointArrayAtomicFieldData.java   |   5 +-
       .../GeoPointArrayLegacyAtomicFieldData.java   |   5 +-
       .../index/mapper/ParseContext.java            |   8 +-
       .../index/mapper/core/ByteFieldMapper.java    |  16 +-
       .../index/mapper/core/DateFieldMapper.java    |  18 +-
       .../index/mapper/core/DoubleFieldMapper.java  |  18 +-
       .../index/mapper/core/FloatFieldMapper.java   |  18 +-
       .../index/mapper/core/IntegerFieldMapper.java |  16 +-
       .../index/mapper/core/LongFieldMapper.java    |  16 +-
       .../index/mapper/core/NumberFieldMapper.java  |  36 ++--
       .../index/mapper/core/ShortFieldMapper.java   |  16 +-
       .../mapper/geo/BaseGeoPointFieldMapper.java   |   4 +-
       .../index/mapper/geo/GeoPointFieldMapper.java |   4 +-
       .../index/mapper/geo/GeoShapeFieldMapper.java |   8 +-
       .../index/mapper/ip/IpFieldMapper.java        |  16 +-
       .../index/shard/ElasticsearchMergePolicy.java |   2 -
       .../index/shard/VersionFieldUpgrader.java     | 172 ------------------
       .../index/store/StoreFileMetaData.java        |   2 +-
       .../index/translog/Checkpoint.java            |   7 +-
       .../index/translog/Translog.java              |  10 +-
       .../index/translog/TranslogReader.java        |   3 +-
       .../index/translog/TranslogWriter.java        |   3 +-
       .../indices/IndicesRequestCache.java          |   2 +-
       .../indices/analysis/HunspellService.java     |   6 +-
       .../percolator/PercolatorQuery.java           |   4 -
       .../blobstore/ChecksumBlobStoreFormat.java    |   2 +-
       .../significant/SignificantStringTerms.java   |   3 +-
       .../GlobalOrdinalsStringTermsAggregator.java  |   3 +-
       .../bucket/terms/StringTerms.java             |   2 +-
       .../bucket/terms/support/IncludeExclude.java  |   4 +-
       .../cardinality/HyperLogLogPlusPlus.java      |   3 +-
       .../search/highlight/CustomQueryScorer.java   |   6 +-
       .../FragmentBuilderHelper.java                |  11 --
       .../search/internal/DefaultSearchContext.java |   4 -
       .../elasticsearch/bootstrap/security.policy   |   2 +-
       .../bootstrap/test-framework.policy           |   2 +-
       .../lucene/queries/BlendedTermQueryTests.java |   4 +-
       .../common/geo/GeoJSONShapeParserTests.java   |  14 +-
       .../common/geo/ShapeBuilderTests.java         |  12 +-
       .../geo/builders/EnvelopeBuilderTests.java    |   2 +-
       .../lucene/IndexCacheableQueryTests.java      | 140 --------------
       .../common/lucene/LuceneTests.java            |   9 +-
       .../common/lucene/all/SimpleAllTests.java     |  12 +-
       .../lucene/index/ESDirectoryReaderTests.java  |   2 +-
       .../lucene/index/FreqTermsEnumTests.java      |   2 +-
       .../search/MultiPhrasePrefixQueryTests.java   |  23 +--
       .../morelikethis/MoreLikeThisQueryTests.java  |   2 +-
       .../common/lucene/uid/VersionLookupTests.java |   4 +-
       .../common/lucene/uid/VersionsTests.java      |  10 +-
       .../deps/lucene/SimpleLuceneTests.java        |  34 ++--
       .../deps/lucene/VectorHighlighterTests.java   |  35 ++--
       .../index/IndexingSlowLogTests.java           |   4 +-
       .../index/analysis/AnalysisModuleTests.java   |  13 +-
       .../analysis/NGramTokenizerFactoryTests.java  |  52 ++----
       .../index/analysis/NumericAnalyzerTests.java  |  10 +-
       .../index/analysis/StopTokenFilterTests.java  |  31 +---
       .../WordDelimiterTokenFilterFactoryTests.java |  19 --
       .../cache/bitset/BitSetFilterCacheTests.java  |   8 +-
       .../elasticsearch/index/codec/CodecTests.java |  18 +-
       .../index/engine/CommitStatsTests.java        |  52 ------
       .../AbstractFieldDataImplTestCase.java        |   4 +-
       .../fielddata/AbstractFieldDataTestCase.java  |   2 +-
       .../AbstractStringFieldDataTestCase.java      |   6 +-
       .../index/fielddata/DuelFieldDataTests.java   |  10 +-
       .../fielddata/IndexFieldDataServiceTests.java |   2 +-
       .../fielddata/ParentChildFieldDataTests.java  |   4 +-
       .../mapper/core/BooleanFieldMapperTests.java  |   2 +-
       .../mapper/date/SimpleDateMappingTests.java   |  16 +-
       .../mapper/externalvalues/ExternalMapper.java |   2 +-
       .../mapper/lucene/DoubleIndexingDocTests.java |   2 +-
       .../lucene/StoredNumericValuesTests.java      |   4 +-
       .../mapper/numeric/SimpleNumericTests.java    |   6 +-
       .../index/query/FuzzyQueryBuilderTests.java   |   8 +-
       .../GeoBoundingBoxQueryBuilderTests.java      |   8 +-
       .../query/GeoDistanceQueryBuilderTests.java   |   2 +-
       .../query/GeoPolygonQueryBuilderTests.java    |   2 +-
       .../query/GeohashCellQueryBuilderTests.java   |   2 +-
       .../index/query/MatchQueryBuilderTests.java   |  12 +-
       .../query/MultiMatchQueryBuilderTests.java    |   4 +-
       .../query/QueryStringQueryBuilderTests.java   |   4 +-
       .../index/query/RangeQueryBuilderTests.java   |  30 +--
       .../functionscore/FunctionScoreTests.java     |   6 +-
       .../index/search/geo/GeoUtilsTests.java       |   4 +-
       .../AbstractNumberNestedSortingTestCase.java  |   2 +-
       .../search/nested/NestedSortingTests.java     |   6 +-
       .../shard/IndexSearcherWrapperTests.java      |   8 +-
       .../index/shard/ShardUtilsTests.java          |   4 +-
       .../shard/VersionFieldUpgraderTests.java      | 144 ---------------
       .../elasticsearch/index/store/StoreTests.java | 107 +----------
       .../indices/IndicesQueryCacheTests.java       |  10 +-
       .../indices/IndicesRequestCacheTests.java     |  24 +--
       .../indices/stats/IndexStatsIT.java           |   2 -
       .../percolator/PercolatorQueryTests.java      |   4 +-
       .../innerhits/NestedChildrenFilterTests.java  |   4 +-
       .../functionscore/DecayFunctionScoreIT.java   | 100 +++++-----
       .../elasticsearch/search/geo/GeoFilterIT.java |   8 +-
       .../search/geo/GeoShapeQueryTests.java        |   2 +-
       .../search/highlight/HighlighterSearchIT.java |  72 --------
       .../search/query/MultiMatchQueryIT.java       |   2 +-
       .../search/query/SearchQueryIT.java           |  12 +-
       .../phrase/NoisyChannelSpellCheckerTests.java |   6 +-
       .../phrase/SmoothingModelTestCase.java        |   2 +-
       .../test/geo/RandomShapeGenerator.java        |  14 +-
       .../hamcrest/ElasticsearchGeoAssertions.java  |  12 +-
       .../validate/SimpleValidateQueryIT.java       |   4 +-
       .../index/analysis/keep_analysis.json         |   4 +-
       .../lucene-analyzers-common-5.5.0.jar.sha1    |   1 -
       ...ers-common-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       .../lucene-backward-codecs-5.5.0.jar.sha1     |   1 -
       ...ard-codecs-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       .../licenses/lucene-core-5.5.0.jar.sha1       |   1 -
       ...ucene-core-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       .../licenses/lucene-grouping-5.5.0.jar.sha1   |   1 -
       ...e-grouping-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       .../lucene-highlighter-5.5.0.jar.sha1         |   1 -
       ...ighlighter-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       .../licenses/lucene-join-5.5.0.jar.sha1       |   1 -
       ...ucene-join-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       .../licenses/lucene-memory-5.5.0.jar.sha1     |   1 -
       ...ene-memory-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       .../licenses/lucene-misc-5.5.0.jar.sha1       |   1 -
       ...ucene-misc-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       .../licenses/lucene-queries-5.5.0.jar.sha1    |   1 -
       ...ne-queries-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       .../lucene-queryparser-5.5.0.jar.sha1         |   1 -
       ...ueryparser-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       .../licenses/lucene-sandbox-5.5.0.jar.sha1    |   1 -
       ...ne-sandbox-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       .../licenses/lucene-spatial-5.5.0.jar.sha1    |   1 -
       ...ne-spatial-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       ...ial-extras-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       .../licenses/lucene-spatial3d-5.5.0.jar.sha1  |   1 -
       ...-spatial3d-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       .../licenses/lucene-suggest-5.5.0.jar.sha1    |   1 -
       ...ne-suggest-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       distribution/licenses/spatial4j-0.5.jar.sha1  |   1 -
       distribution/licenses/spatial4j-0.6.jar.sha1  |   1 +
       .../query-dsl/geo-shape-query.asciidoc        |   6 +-
       .../lucene-expressions-5.5.0.jar.sha1         |   1 -
       ...xpressions-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       .../messy/tests/FunctionScoreTests.java       |  11 +-
       .../lucene-analyzers-icu-5.5.0.jar.sha1       |   1 -
       ...lyzers-icu-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       .../IndexableBinaryStringToolsTests.java      |   7 +-
       .../lucene-analyzers-kuromoji-5.5.0.jar.sha1  |   1 -
       ...s-kuromoji-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       .../lucene-analyzers-phonetic-5.5.0.jar.sha1  |   1 -
       ...s-phonetic-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       .../lucene-analyzers-smartcn-5.5.0.jar.sha1   |   1 -
       ...rs-smartcn-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       .../lucene-analyzers-stempel-5.5.0.jar.sha1   |   1 -
       ...rs-stempel-6.0.0-snapshot-bea235f.jar.sha1 |   1 +
       plugins/mapper-size/build.gradle              |   3 +
       .../index/store/SmbDirectoryWrapper.java      |   2 +-
       .../org/elasticsearch/test/ESTestCase.java    |  21 ---
       .../test/store/MockFSDirectoryService.java    |   3 +-
       227 files changed, 712 insertions(+), 1773 deletions(-)
       delete mode 100644 core/src/main/java/org/elasticsearch/common/lucene/IndexCacheableQuery.java
       delete mode 100644 core/src/main/java/org/elasticsearch/index/shard/VersionFieldUpgrader.java
       delete mode 100644 core/src/test/java/org/elasticsearch/common/lucene/IndexCacheableQueryTests.java
       delete mode 100644 core/src/test/java/org/elasticsearch/index/engine/CommitStatsTests.java
       delete mode 100644 core/src/test/java/org/elasticsearch/index/shard/VersionFieldUpgraderTests.java
       delete mode 100644 distribution/licenses/lucene-analyzers-common-5.5.0.jar.sha1
       create mode 100644 distribution/licenses/lucene-analyzers-common-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 distribution/licenses/lucene-backward-codecs-5.5.0.jar.sha1
       create mode 100644 distribution/licenses/lucene-backward-codecs-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 distribution/licenses/lucene-core-5.5.0.jar.sha1
       create mode 100644 distribution/licenses/lucene-core-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 distribution/licenses/lucene-grouping-5.5.0.jar.sha1
       create mode 100644 distribution/licenses/lucene-grouping-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 distribution/licenses/lucene-highlighter-5.5.0.jar.sha1
       create mode 100644 distribution/licenses/lucene-highlighter-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 distribution/licenses/lucene-join-5.5.0.jar.sha1
       create mode 100644 distribution/licenses/lucene-join-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 distribution/licenses/lucene-memory-5.5.0.jar.sha1
       create mode 100644 distribution/licenses/lucene-memory-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 distribution/licenses/lucene-misc-5.5.0.jar.sha1
       create mode 100644 distribution/licenses/lucene-misc-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 distribution/licenses/lucene-queries-5.5.0.jar.sha1
       create mode 100644 distribution/licenses/lucene-queries-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 distribution/licenses/lucene-queryparser-5.5.0.jar.sha1
       create mode 100644 distribution/licenses/lucene-queryparser-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 distribution/licenses/lucene-sandbox-5.5.0.jar.sha1
       create mode 100644 distribution/licenses/lucene-sandbox-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 distribution/licenses/lucene-spatial-5.5.0.jar.sha1
       create mode 100644 distribution/licenses/lucene-spatial-6.0.0-snapshot-bea235f.jar.sha1
       create mode 100644 distribution/licenses/lucene-spatial-extras-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 distribution/licenses/lucene-spatial3d-5.5.0.jar.sha1
       create mode 100644 distribution/licenses/lucene-spatial3d-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 distribution/licenses/lucene-suggest-5.5.0.jar.sha1
       create mode 100644 distribution/licenses/lucene-suggest-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 distribution/licenses/spatial4j-0.5.jar.sha1
       create mode 100644 distribution/licenses/spatial4j-0.6.jar.sha1
       delete mode 100644 modules/lang-expression/licenses/lucene-expressions-5.5.0.jar.sha1
       create mode 100644 modules/lang-expression/licenses/lucene-expressions-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0.jar.sha1
       create mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0.jar.sha1
       create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0.jar.sha1
       create mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0.jar.sha1
       create mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.0-snapshot-bea235f.jar.sha1
       delete mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0.jar.sha1
       create mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.0-snapshot-bea235f.jar.sha1
      
      diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy
      index bdb563e001b7..b04f959e0681 100644
      --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy
      +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy
      @@ -68,7 +68,7 @@ public class PluginBuildPlugin extends BuildPlugin {
                   testCompile "org.elasticsearch.test:framework:${project.versions.elasticsearch}"
                   // we "upgrade" these optional deps to provided for plugins, since they will run
                   // with a full elasticsearch server that includes optional deps
      -            provided "com.spatial4j:spatial4j:${project.versions.spatial4j}"
      +            provided "org.locationtech.spatial4j:spatial4j:${project.versions.spatial4j}"
                   provided "com.vividsolutions:jts:${project.versions.jts}"
                   provided "log4j:log4j:${project.versions.log4j}"
                   provided "log4j:apache-log4j-extras:${project.versions.log4j}"
      diff --git a/buildSrc/src/main/resources/forbidden/all-signatures.txt b/buildSrc/src/main/resources/forbidden/all-signatures.txt
      index 3c56a03b2939..9bc370055145 100644
      --- a/buildSrc/src/main/resources/forbidden/all-signatures.txt
      +++ b/buildSrc/src/main/resources/forbidden/all-signatures.txt
      @@ -33,20 +33,6 @@ java.util.Formatter#(java.lang.String,java.lang.String,java.util.Locale)
       java.io.RandomAccessFile
       java.nio.file.Path#toFile()
       
      -@defaultMessage Don't use deprecated lucene apis
      -org.apache.lucene.index.DocsEnum
      -org.apache.lucene.index.DocsAndPositionsEnum
      -org.apache.lucene.queries.TermFilter
      -org.apache.lucene.queries.TermsFilter
      -org.apache.lucene.search.Filter
      -org.apache.lucene.search.FilteredQuery
      -org.apache.lucene.search.TermRangeFilter
      -org.apache.lucene.search.NumericRangeFilter
      -org.apache.lucene.search.PrefixFilter
      -org.apache.lucene.search.QueryWrapperFilter
      -org.apache.lucene.search.join.BitDocIdSetCachingWrapperFilter
      -org.apache.lucene.index.IndexWriter#isLocked(org.apache.lucene.store.Directory)
      -
       java.nio.file.Paths @ Use org.elasticsearch.common.io.PathUtils.get() instead.
       java.nio.file.FileSystems#getDefault() @ use org.elasticsearch.common.io.PathUtils.getDefaultFileSystem() instead.
       
      diff --git a/buildSrc/src/main/resources/forbidden/core-signatures.txt b/buildSrc/src/main/resources/forbidden/core-signatures.txt
      index c6ab430595c0..059be403a672 100644
      --- a/buildSrc/src/main/resources/forbidden/core-signatures.txt
      +++ b/buildSrc/src/main/resources/forbidden/core-signatures.txt
      @@ -41,14 +41,10 @@ org.apache.lucene.index.IndexReader#addReaderClosedListener(org.apache.lucene.in
       org.apache.lucene.index.IndexReader#removeReaderClosedListener(org.apache.lucene.index.IndexReader$ReaderClosedListener)
       
       @defaultMessage Pass the precision step from the mappings explicitly instead
      -org.apache.lucene.search.NumericRangeQuery#newDoubleRange(java.lang.String,java.lang.Double,java.lang.Double,boolean,boolean)
      -org.apache.lucene.search.NumericRangeQuery#newFloatRange(java.lang.String,java.lang.Float,java.lang.Float,boolean,boolean)
      -org.apache.lucene.search.NumericRangeQuery#newIntRange(java.lang.String,java.lang.Integer,java.lang.Integer,boolean,boolean)
      -org.apache.lucene.search.NumericRangeQuery#newLongRange(java.lang.String,java.lang.Long,java.lang.Long,boolean,boolean)
      -org.apache.lucene.search.NumericRangeFilter#newDoubleRange(java.lang.String,java.lang.Double,java.lang.Double,boolean,boolean)
      -org.apache.lucene.search.NumericRangeFilter#newFloatRange(java.lang.String,java.lang.Float,java.lang.Float,boolean,boolean)
      -org.apache.lucene.search.NumericRangeFilter#newIntRange(java.lang.String,java.lang.Integer,java.lang.Integer,boolean,boolean)
      -org.apache.lucene.search.NumericRangeFilter#newLongRange(java.lang.String,java.lang.Long,java.lang.Long,boolean,boolean)
      +org.apache.lucene.search.LegacyNumericRangeQuery#newDoubleRange(java.lang.String,java.lang.Double,java.lang.Double,boolean,boolean)
      +org.apache.lucene.search.LegacyNumericRangeQuery#newFloatRange(java.lang.String,java.lang.Float,java.lang.Float,boolean,boolean)
      +org.apache.lucene.search.LegacyNumericRangeQuery#newIntRange(java.lang.String,java.lang.Integer,java.lang.Integer,boolean,boolean)
      +org.apache.lucene.search.LegacyNumericRangeQuery#newLongRange(java.lang.String,java.lang.Long,java.lang.Long,boolean,boolean)
       
       @defaultMessage Only use wait / notify when really needed try to use concurrency primitives, latches or callbacks instead.
       java.lang.Object#wait()
      @@ -88,9 +84,6 @@ java.util.concurrent.Future#cancel(boolean)
       org.elasticsearch.common.io.PathUtils#get(java.lang.String, java.lang.String[])
       org.elasticsearch.common.io.PathUtils#get(java.net.URI)
       
      -@defaultMessage Don't use deprecated Query#setBoost, wrap the query into a BoostQuery instead
      -org.apache.lucene.search.Query#setBoost(float)
      -
       @defaultMessage Constructing a DateTime without a time zone is dangerous
       org.joda.time.DateTime#()
       org.joda.time.DateTime#(long)
      diff --git a/buildSrc/version.properties b/buildSrc/version.properties
      index 54b16db2cb13..f75d5a936bb7 100644
      --- a/buildSrc/version.properties
      +++ b/buildSrc/version.properties
      @@ -1,8 +1,8 @@
       elasticsearch     = 5.0.0
      -lucene            = 5.5.0
      +lucene            = 6.0.0-snapshot-bea235f
       
       # optional dependencies
      -spatial4j         = 0.5
      +spatial4j         = 0.6
       jts               = 1.13
       jackson           = 2.7.1
       log4j             = 1.2.17
      diff --git a/core/build.gradle b/core/build.gradle
      index ac3f421211db..39c1e4367c0b 100644
      --- a/core/build.gradle
      +++ b/core/build.gradle
      @@ -42,6 +42,7 @@ dependencies {
         compile "org.apache.lucene:lucene-queryparser:${versions.lucene}"
         compile "org.apache.lucene:lucene-sandbox:${versions.lucene}"
         compile "org.apache.lucene:lucene-spatial:${versions.lucene}"
      +  compile "org.apache.lucene:lucene-spatial-extras:${versions.lucene}"
         compile "org.apache.lucene:lucene-spatial3d:${versions.lucene}"
         compile "org.apache.lucene:lucene-suggest:${versions.lucene}"
       
      @@ -71,7 +72,7 @@ dependencies {
         compile 'org.hdrhistogram:HdrHistogram:2.1.6'
       
         // lucene spatial
      -  compile "com.spatial4j:spatial4j:${versions.spatial4j}", optional
      +  compile "org.locationtech.spatial4j:spatial4j:${versions.spatial4j}", optional
         compile "com.vividsolutions:jts:${versions.jts}", optional
       
         // logging
      @@ -168,11 +169,6 @@ thirdPartyAudit.excludes = [
         'org.apache.commons.logging.Log', 
         'org.apache.commons.logging.LogFactory',
       
      -  // from org.apache.lucene.sandbox.queries.regex.JakartaRegexpCapabilities$JakartaRegexMatcher (lucene-sandbox)
      -  'org.apache.regexp.CharacterIterator', 
      -  'org.apache.regexp.RE', 
      -  'org.apache.regexp.REProgram', 
      -
         // from org.jboss.netty.handler.ssl.OpenSslEngine (netty)
         'org.apache.tomcat.jni.Buffer', 
         'org.apache.tomcat.jni.Library', 
      @@ -210,7 +206,7 @@ thirdPartyAudit.excludes = [
         'org.jboss.marshalling.MarshallingConfiguration', 
         'org.jboss.marshalling.Unmarshaller', 
       
      -  // from com.spatial4j.core.io.GeoJSONReader (spatial4j)
      +  // from org.locationtech.spatial4j.io.GeoJSONReader (spatial4j)
         'org.noggit.JSONParser', 
       
         // from org.jboss.netty.container.osgi.NettyBundleActivator (netty)
      diff --git a/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java b/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java
      index 798fac01a7a4..4e24944ffac0 100644
      --- a/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java
      +++ b/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java
      @@ -33,7 +33,6 @@ import org.apache.lucene.search.Query;
       import org.apache.lucene.search.TermQuery;
       import org.apache.lucene.util.ArrayUtil;
       import org.apache.lucene.util.InPlaceMergeSorter;
      -import org.apache.lucene.util.ToStringUtils;
       
       import java.io.IOException;
       import java.util.ArrayList;
      @@ -247,14 +246,15 @@ public abstract class BlendedTermQuery extends Query {
                   if (boosts != null) {
                       boost = boosts[i];
                   }
      -            builder.append(ToStringUtils.boost(boost));
      +            if (boost != 1f) {
      +                builder.append('^').append(boost);
      +            }
                   builder.append(", ");
               }
               if (terms.length > 0) {
                   builder.setLength(builder.length() - 2);
               }
               builder.append("])");
      -        builder.append(ToStringUtils.boost(getBoost()));
               return builder.toString();
           }
       
      diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java
      index f153cd53c552..a7c53a56bc41 100644
      --- a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java
      +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java
      @@ -24,6 +24,7 @@ import org.apache.lucene.analysis.TokenStream;
       import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
       import org.apache.lucene.index.Term;
       import org.apache.lucene.search.BooleanClause;
      +import org.apache.lucene.search.BooleanQuery;
       import org.apache.lucene.search.BoostQuery;
       import org.apache.lucene.search.DisjunctionMaxQuery;
       import org.apache.lucene.search.FuzzyQuery;
      @@ -165,7 +166,7 @@ public class MapperQueryParser extends QueryParser {
                       }
                       if (clauses.size() == 0)  // happens for stopwords
                           return null;
      -                return getBooleanQuery(clauses, true);
      +                return getBooleanQueryCoordDisabled(clauses);
                   }
               } else {
                   return getFieldQuerySingle(field, queryText, quoted);
      @@ -267,7 +268,7 @@ public class MapperQueryParser extends QueryParser {
                       }
                       if (clauses.size() == 0)  // happens for stopwords
                           return null;
      -                return getBooleanQuery(clauses, true);
      +                return getBooleanQueryCoordDisabled(clauses);
                   }
               } else {
                   return super.getFieldQuery(field, queryText, slop);
      @@ -318,7 +319,7 @@ public class MapperQueryParser extends QueryParser {
                   }
                   if (clauses.size() == 0)  // happens for stopwords
                       return null;
      -            return getBooleanQuery(clauses, true);
      +            return getBooleanQueryCoordDisabled(clauses);
               }
           }
       
      @@ -380,7 +381,7 @@ public class MapperQueryParser extends QueryParser {
                               clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
                           }
                       }
      -                return getBooleanQuery(clauses, true);
      +                return getBooleanQueryCoordDisabled(clauses);
                   }
               } else {
                   return getFuzzyQuerySingle(field, termStr, minSimilarity);
      @@ -445,7 +446,7 @@ public class MapperQueryParser extends QueryParser {
                       }
                       if (clauses.size() == 0)  // happens for stopwords
                           return null;
      -                return getBooleanQuery(clauses, true);
      +                return getBooleanQueryCoordDisabled(clauses);
                   }
               } else {
                   return getPrefixQuerySingle(field, termStr);
      @@ -520,7 +521,7 @@ public class MapperQueryParser extends QueryParser {
                   for (String token : tlist) {
                       clauses.add(new BooleanClause(super.getPrefixQuery(field, token), BooleanClause.Occur.SHOULD));
                   }
      -            return getBooleanQuery(clauses, true);
      +            return getBooleanQueryCoordDisabled(clauses);
               }
           }
       
      @@ -575,7 +576,7 @@ public class MapperQueryParser extends QueryParser {
                       }
                       if (clauses.size() == 0)  // happens for stopwords
                           return null;
      -                return getBooleanQuery(clauses, true);
      +                return getBooleanQueryCoordDisabled(clauses);
                   }
               } else {
                   return getWildcardQuerySingle(field, termStr);
      @@ -704,7 +705,7 @@ public class MapperQueryParser extends QueryParser {
                       }
                       if (clauses.size() == 0)  // happens for stopwords
                           return null;
      -                return getBooleanQuery(clauses, true);
      +                return getBooleanQueryCoordDisabled(clauses);
                   }
               } else {
                   return getRegexpQuerySingle(field, termStr);
      @@ -739,10 +740,24 @@ public class MapperQueryParser extends QueryParser {
                   setAnalyzer(oldAnalyzer);
               }
           }
      +    
      +    /**
      +     * @deprecated review all use of this, don't rely on coord
      +     */
      +    @Deprecated
      +    protected Query getBooleanQueryCoordDisabled(List clauses) throws ParseException {
      +        BooleanQuery.Builder builder = new BooleanQuery.Builder();
      +        builder.setDisableCoord(true);
      +        for (BooleanClause clause : clauses) {
      +            builder.add(clause);
      +        }
      +        return fixNegativeQueryIfNeeded(builder.build());
      +    }
      +
       
           @Override
      -    protected Query getBooleanQuery(List clauses, boolean disableCoord) throws ParseException {
      -        Query q = super.getBooleanQuery(clauses, disableCoord);
      +    protected Query getBooleanQuery(List clauses) throws ParseException {
      +        Query q = super.getBooleanQuery(clauses);
               if (q == null) {
                   return null;
               }
      @@ -769,7 +784,6 @@ public class MapperQueryParser extends QueryParser {
                   }
                   pq = builder.build();
                   //make sure that the boost hasn't been set beforehand, otherwise we'd lose it
      -            assert q.getBoost() == 1f;
                   assert q instanceof BoostQuery == false;
                   return pq;
               } else if (q instanceof MultiPhraseQuery) {
      diff --git a/core/src/main/java/org/apache/lucene/search/XFilteredDocIdSetIterator.java b/core/src/main/java/org/apache/lucene/search/XFilteredDocIdSetIterator.java
      index 92f2f443f0a0..8d1617d3ab4d 100644
      --- a/core/src/main/java/org/apache/lucene/search/XFilteredDocIdSetIterator.java
      +++ b/core/src/main/java/org/apache/lucene/search/XFilteredDocIdSetIterator.java
      @@ -26,8 +26,7 @@ import java.io.IOException;
       /**
        * Abstract decorator class of a DocIdSetIterator
        * implementation that provides on-demand filter/validation
      - * mechanism on an underlying DocIdSetIterator.  See {@link
      - * FilteredDocIdSet}.
      + * mechanism on an underlying DocIdSetIterator.
        */
       public abstract class XFilteredDocIdSetIterator extends DocIdSetIterator {
         protected DocIdSetIterator _innerIter;
      diff --git a/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java b/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java
      index 11b56bdcfe10..089b649cefef 100644
      --- a/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java
      +++ b/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java
      @@ -87,7 +87,7 @@ public class CustomFieldQuery extends FieldQuery {
                   if (numTerms > 16) {
                       for (Term[] currentPosTerm : terms) {
                           for (Term term : currentPosTerm) {
      -                        super.flatten(new TermQuery(term), reader, flatQueries, orig.getBoost());
      +                        super.flatten(new TermQuery(term), reader, flatQueries, 1F);
                           }
                       }
                       return;
      @@ -104,7 +104,7 @@ public class CustomFieldQuery extends FieldQuery {
                       queryBuilder.add(terms.get(i)[termsIdx[i]], pos[i]);
                   }
                   Query query = queryBuilder.build();
      -            this.flatten(query, reader, flatQueries, orig.getBoost());
      +            this.flatten(query, reader, flatQueries, 1F);
               } else {
                   Term[] t = terms.get(currentPos);
                   for (int i = 0; i < t.length; i++) {
      diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java
      index 1c9a5464bb2e..3751896d3616 100644
      --- a/core/src/main/java/org/elasticsearch/Version.java
      +++ b/core/src/main/java/org/elasticsearch/Version.java
      @@ -39,8 +39,9 @@ public class Version {
           // AA values below 50 are beta builds, and below 99 are RC builds, with 99 indicating a release
           // the (internal) format of the id is there so we can easily do after/before checks on the id
       
      -    // NOTE: indexes created with 3.6 use this constant for e.g. analysis chain emulation (imperfect)
      -    public static final org.apache.lucene.util.Version LUCENE_3_EMULATION_VERSION = org.apache.lucene.util.Version.LUCENE_4_0_0;
      +    // NOTE: ancient indexes created before 5.0 use this constant for e.g. analysis chain emulation (imperfect)
      +    // its named lucene 3 but also lucene 4 or anything else we no longer support.
      +    public static final org.apache.lucene.util.Version LUCENE_3_EMULATION_VERSION = org.apache.lucene.util.Version.LUCENE_5_0_0;
       
           public static final int V_0_18_0_ID = /*00*/180099;
           public static final Version V_0_18_0 = new Version(V_0_18_0_ID, LUCENE_3_EMULATION_VERSION);
      @@ -117,130 +118,130 @@ public class Version {
           public static final Version V_0_20_6 = new Version(V_0_20_6_ID, LUCENE_3_EMULATION_VERSION);
       
           public static final int V_0_90_0_Beta1_ID = /*00*/900001;
      -    public static final Version V_0_90_0_Beta1 = new Version(V_0_90_0_Beta1_ID, org.apache.lucene.util.Version.LUCENE_4_1);
      +    public static final Version V_0_90_0_Beta1 = new Version(V_0_90_0_Beta1_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_0_90_0_RC1_ID = /*00*/900051;
      -    public static final Version V_0_90_0_RC1 = new Version(V_0_90_0_RC1_ID, org.apache.lucene.util.Version.LUCENE_4_1);
      +    public static final Version V_0_90_0_RC1 = new Version(V_0_90_0_RC1_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_0_90_0_RC2_ID = /*00*/900052;
      -    public static final Version V_0_90_0_RC2 = new Version(V_0_90_0_RC2_ID, org.apache.lucene.util.Version.LUCENE_4_2);
      +    public static final Version V_0_90_0_RC2 = new Version(V_0_90_0_RC2_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_0_90_0_ID = /*00*/900099;
      -    public static final Version V_0_90_0 = new Version(V_0_90_0_ID, org.apache.lucene.util.Version.LUCENE_4_2);
      +    public static final Version V_0_90_0 = new Version(V_0_90_0_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_0_90_1_ID = /*00*/900199;
      -    public static final Version V_0_90_1 = new Version(V_0_90_1_ID, org.apache.lucene.util.Version.LUCENE_4_3);
      +    public static final Version V_0_90_1 = new Version(V_0_90_1_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_0_90_2_ID = /*00*/900299;
      -    public static final Version V_0_90_2 = new Version(V_0_90_2_ID, org.apache.lucene.util.Version.LUCENE_4_3);
      +    public static final Version V_0_90_2 = new Version(V_0_90_2_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_0_90_3_ID = /*00*/900399;
      -    public static final Version V_0_90_3 = new Version(V_0_90_3_ID, org.apache.lucene.util.Version.LUCENE_4_4);
      +    public static final Version V_0_90_3 = new Version(V_0_90_3_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_0_90_4_ID = /*00*/900499;
      -    public static final Version V_0_90_4 = new Version(V_0_90_4_ID, org.apache.lucene.util.Version.LUCENE_4_4);
      +    public static final Version V_0_90_4 = new Version(V_0_90_4_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_0_90_5_ID = /*00*/900599;
      -    public static final Version V_0_90_5 = new Version(V_0_90_5_ID, org.apache.lucene.util.Version.LUCENE_4_4);
      +    public static final Version V_0_90_5 = new Version(V_0_90_5_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_0_90_6_ID = /*00*/900699;
      -    public static final Version V_0_90_6 = new Version(V_0_90_6_ID, org.apache.lucene.util.Version.LUCENE_4_5);
      +    public static final Version V_0_90_6 = new Version(V_0_90_6_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_0_90_7_ID = /*00*/900799;
      -    public static final Version V_0_90_7 = new Version(V_0_90_7_ID, org.apache.lucene.util.Version.LUCENE_4_5);
      +    public static final Version V_0_90_7 = new Version(V_0_90_7_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_0_90_8_ID = /*00*/900899;
      -    public static final Version V_0_90_8 = new Version(V_0_90_8_ID, org.apache.lucene.util.Version.LUCENE_4_6);
      +    public static final Version V_0_90_8 = new Version(V_0_90_8_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_0_90_9_ID = /*00*/900999;
      -    public static final Version V_0_90_9 = new Version(V_0_90_9_ID, org.apache.lucene.util.Version.LUCENE_4_6);
      +    public static final Version V_0_90_9 = new Version(V_0_90_9_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_0_90_10_ID = /*00*/901099;
      -    public static final Version V_0_90_10 = new Version(V_0_90_10_ID, org.apache.lucene.util.Version.LUCENE_4_6);
      +    public static final Version V_0_90_10 = new Version(V_0_90_10_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_0_90_11_ID = /*00*/901199;
      -    public static final Version V_0_90_11 = new Version(V_0_90_11_ID, org.apache.lucene.util.Version.LUCENE_4_6);
      +    public static final Version V_0_90_11 = new Version(V_0_90_11_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_0_90_12_ID = /*00*/901299;
      -    public static final Version V_0_90_12 = new Version(V_0_90_12_ID, org.apache.lucene.util.Version.LUCENE_4_6);
      +    public static final Version V_0_90_12 = new Version(V_0_90_12_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_0_90_13_ID = /*00*/901399;
      -    public static final Version V_0_90_13 = new Version(V_0_90_13_ID, org.apache.lucene.util.Version.LUCENE_4_6);
      +    public static final Version V_0_90_13 = new Version(V_0_90_13_ID, LUCENE_3_EMULATION_VERSION);
       
           public static final int V_1_0_0_Beta1_ID = 1000001;
      -    public static final Version V_1_0_0_Beta1 = new Version(V_1_0_0_Beta1_ID, org.apache.lucene.util.Version.LUCENE_4_5);
      +    public static final Version V_1_0_0_Beta1 = new Version(V_1_0_0_Beta1_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_0_0_Beta2_ID = 1000002;
      -    public static final Version V_1_0_0_Beta2 = new Version(V_1_0_0_Beta2_ID, org.apache.lucene.util.Version.LUCENE_4_6);
      +    public static final Version V_1_0_0_Beta2 = new Version(V_1_0_0_Beta2_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_0_0_RC1_ID = 1000051;
      -    public static final Version V_1_0_0_RC1 = new Version(V_1_0_0_RC1_ID, org.apache.lucene.util.Version.LUCENE_4_6);
      +    public static final Version V_1_0_0_RC1 = new Version(V_1_0_0_RC1_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_0_0_RC2_ID = 1000052;
      -    public static final Version V_1_0_0_RC2 = new Version(V_1_0_0_RC2_ID, org.apache.lucene.util.Version.LUCENE_4_6);
      +    public static final Version V_1_0_0_RC2 = new Version(V_1_0_0_RC2_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_0_0_ID = 1000099;
      -    public static final Version V_1_0_0 = new Version(V_1_0_0_ID, org.apache.lucene.util.Version.LUCENE_4_6);
      +    public static final Version V_1_0_0 = new Version(V_1_0_0_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_0_1_ID = 1000199;
      -    public static final Version V_1_0_1 = new Version(V_1_0_1_ID, org.apache.lucene.util.Version.LUCENE_4_6);
      +    public static final Version V_1_0_1 = new Version(V_1_0_1_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_0_2_ID = 1000299;
      -    public static final Version V_1_0_2 = new Version(V_1_0_2_ID, org.apache.lucene.util.Version.LUCENE_4_6);
      +    public static final Version V_1_0_2 = new Version(V_1_0_2_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_0_3_ID = 1000399;
      -    public static final Version V_1_0_3 = new Version(V_1_0_3_ID, org.apache.lucene.util.Version.LUCENE_4_6);
      +    public static final Version V_1_0_3 = new Version(V_1_0_3_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_1_0_ID = 1010099;
      -    public static final Version V_1_1_0 = new Version(V_1_1_0_ID, org.apache.lucene.util.Version.LUCENE_4_7);
      +    public static final Version V_1_1_0 = new Version(V_1_1_0_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_1_1_ID = 1010199;
      -    public static final Version V_1_1_1 = new Version(V_1_1_1_ID, org.apache.lucene.util.Version.LUCENE_4_7);
      +    public static final Version V_1_1_1 = new Version(V_1_1_1_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_1_2_ID = 1010299;
      -    public static final Version V_1_1_2 = new Version(V_1_1_2_ID, org.apache.lucene.util.Version.LUCENE_4_7);
      +    public static final Version V_1_1_2 = new Version(V_1_1_2_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_2_0_ID = 1020099;
      -    public static final Version V_1_2_0 = new Version(V_1_2_0_ID, org.apache.lucene.util.Version.LUCENE_4_8);
      +    public static final Version V_1_2_0 = new Version(V_1_2_0_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_2_1_ID = 1020199;
      -    public static final Version V_1_2_1 = new Version(V_1_2_1_ID, org.apache.lucene.util.Version.LUCENE_4_8);
      +    public static final Version V_1_2_1 = new Version(V_1_2_1_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_2_2_ID = 1020299;
      -    public static final Version V_1_2_2 = new Version(V_1_2_2_ID, org.apache.lucene.util.Version.LUCENE_4_8);
      +    public static final Version V_1_2_2 = new Version(V_1_2_2_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_2_3_ID = 1020399;
      -    public static final Version V_1_2_3 = new Version(V_1_2_3_ID, org.apache.lucene.util.Version.LUCENE_4_8);
      +    public static final Version V_1_2_3 = new Version(V_1_2_3_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_2_4_ID = 1020499;
      -    public static final Version V_1_2_4 = new Version(V_1_2_4_ID, org.apache.lucene.util.Version.LUCENE_4_8);
      +    public static final Version V_1_2_4 = new Version(V_1_2_4_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_3_0_ID = 1030099;
      -    public static final Version V_1_3_0 = new Version(V_1_3_0_ID, org.apache.lucene.util.Version.LUCENE_4_9);
      +    public static final Version V_1_3_0 = new Version(V_1_3_0_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_3_1_ID = 1030199;
      -    public static final Version V_1_3_1 = new Version(V_1_3_1_ID, org.apache.lucene.util.Version.LUCENE_4_9);
      +    public static final Version V_1_3_1 = new Version(V_1_3_1_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_3_2_ID = 1030299;
      -    public static final Version V_1_3_2 = new Version(V_1_3_2_ID, org.apache.lucene.util.Version.LUCENE_4_9);
      +    public static final Version V_1_3_2 = new Version(V_1_3_2_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_3_3_ID = 1030399;
      -    public static final Version V_1_3_3 = new Version(V_1_3_3_ID, org.apache.lucene.util.Version.LUCENE_4_9);
      +    public static final Version V_1_3_3 = new Version(V_1_3_3_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_3_4_ID = 1030499;
      -    public static final Version V_1_3_4 = new Version(V_1_3_4_ID, org.apache.lucene.util.Version.LUCENE_4_9);
      +    public static final Version V_1_3_4 = new Version(V_1_3_4_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_3_5_ID = 1030599;
      -    public static final Version V_1_3_5 = new Version(V_1_3_5_ID, org.apache.lucene.util.Version.LUCENE_4_9);
      +    public static final Version V_1_3_5 = new Version(V_1_3_5_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_3_6_ID = 1030699;
      -    public static final Version V_1_3_6 = new Version(V_1_3_6_ID, org.apache.lucene.util.Version.LUCENE_4_9);
      +    public static final Version V_1_3_6 = new Version(V_1_3_6_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_3_7_ID = 1030799;
      -    public static final Version V_1_3_7 = new Version(V_1_3_7_ID, org.apache.lucene.util.Version.LUCENE_4_9);
      +    public static final Version V_1_3_7 = new Version(V_1_3_7_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_3_8_ID = 1030899;
      -    public static final Version V_1_3_8 = new Version(V_1_3_8_ID, org.apache.lucene.util.Version.LUCENE_4_9);
      +    public static final Version V_1_3_8 = new Version(V_1_3_8_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_3_9_ID = 1030999;
      -    public static final Version V_1_3_9 = new Version(V_1_3_9_ID, org.apache.lucene.util.Version.LUCENE_4_9);
      +    public static final Version V_1_3_9 = new Version(V_1_3_9_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_4_0_Beta1_ID = 1040001;
      -    public static final Version V_1_4_0_Beta1 = new Version(V_1_4_0_Beta1_ID, org.apache.lucene.util.Version.LUCENE_4_10_1);
      +    public static final Version V_1_4_0_Beta1 = new Version(V_1_4_0_Beta1_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_4_0_ID = 1040099;
      -    public static final Version V_1_4_0 = new Version(V_1_4_0_ID, org.apache.lucene.util.Version.LUCENE_4_10_2);
      +    public static final Version V_1_4_0 = new Version(V_1_4_0_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_4_1_ID = 1040199;
      -    public static final Version V_1_4_1 = new Version(V_1_4_1_ID, org.apache.lucene.util.Version.LUCENE_4_10_2);
      +    public static final Version V_1_4_1 = new Version(V_1_4_1_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_4_2_ID = 1040299;
      -    public static final Version V_1_4_2 = new Version(V_1_4_2_ID, org.apache.lucene.util.Version.LUCENE_4_10_2);
      +    public static final Version V_1_4_2 = new Version(V_1_4_2_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_4_3_ID = 1040399;
      -    public static final Version V_1_4_3 = new Version(V_1_4_3_ID, org.apache.lucene.util.Version.LUCENE_4_10_3);
      +    public static final Version V_1_4_3 = new Version(V_1_4_3_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_4_4_ID = 1040499;
      -    public static final Version V_1_4_4 = new Version(V_1_4_4_ID, org.apache.lucene.util.Version.LUCENE_4_10_3);
      +    public static final Version V_1_4_4 = new Version(V_1_4_4_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_4_5_ID = 1040599;
      -    public static final Version V_1_4_5 = new Version(V_1_4_5_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
      +    public static final Version V_1_4_5 = new Version(V_1_4_5_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_5_0_ID = 1050099;
      -    public static final Version V_1_5_0 = new Version(V_1_5_0_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
      +    public static final Version V_1_5_0 = new Version(V_1_5_0_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_5_1_ID = 1050199;
      -    public static final Version V_1_5_1 = new Version(V_1_5_1_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
      +    public static final Version V_1_5_1 = new Version(V_1_5_1_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_5_2_ID = 1050299;
      -    public static final Version V_1_5_2 = new Version(V_1_5_2_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
      +    public static final Version V_1_5_2 = new Version(V_1_5_2_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_6_0_ID = 1060099;
      -    public static final Version V_1_6_0 = new Version(V_1_6_0_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
      +    public static final Version V_1_6_0 = new Version(V_1_6_0_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_6_1_ID = 1060199;
      -    public static final Version V_1_6_1 = new Version(V_1_6_1_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
      +    public static final Version V_1_6_1 = new Version(V_1_6_1_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_6_2_ID = 1060299;
      -    public static final Version V_1_6_2 = new Version(V_1_6_2_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
      +    public static final Version V_1_6_2 = new Version(V_1_6_2_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_7_0_ID = 1070099;
      -    public static final Version V_1_7_0 = new Version(V_1_7_0_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
      +    public static final Version V_1_7_0 = new Version(V_1_7_0_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_7_1_ID = 1070199;
      -    public static final Version V_1_7_1 = new Version(V_1_7_1_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
      +    public static final Version V_1_7_1 = new Version(V_1_7_1_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_7_2_ID = 1070299;
      -    public static final Version V_1_7_2 = new Version(V_1_7_2_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
      +    public static final Version V_1_7_2 = new Version(V_1_7_2_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_7_3_ID = 1070399;
      -    public static final Version V_1_7_3 = new Version(V_1_7_3_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
      +    public static final Version V_1_7_3 = new Version(V_1_7_3_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_7_4_ID = 1070499;
      -    public static final Version V_1_7_4 = new Version(V_1_7_4_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
      +    public static final Version V_1_7_4 = new Version(V_1_7_4_ID, LUCENE_3_EMULATION_VERSION);
           public static final int V_1_7_5_ID = 1070599;
      -    public static final Version V_1_7_5 = new Version(V_1_7_5_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
      +    public static final Version V_1_7_5 = new Version(V_1_7_5_ID, LUCENE_3_EMULATION_VERSION);
       
           public static final int V_2_0_0_beta1_ID = 2000001;
           public static final Version V_2_0_0_beta1 = new Version(V_2_0_0_beta1_ID, org.apache.lucene.util.Version.LUCENE_5_2_1);
      @@ -265,7 +266,7 @@ public class Version {
           public static final int V_2_3_0_ID = 2030099;
           public static final Version V_2_3_0 = new Version(V_2_3_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
           public static final int V_5_0_0_ID = 5000099;
      -    public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
      +    public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_6_0_0);
           public static final Version CURRENT = V_5_0_0;
       
           static {
      diff --git a/core/src/main/java/org/elasticsearch/common/geo/ShapesAvailability.java b/core/src/main/java/org/elasticsearch/common/geo/ShapesAvailability.java
      index 8af203f2ce8e..dff4277e96ff 100644
      --- a/core/src/main/java/org/elasticsearch/common/geo/ShapesAvailability.java
      +++ b/core/src/main/java/org/elasticsearch/common/geo/ShapesAvailability.java
      @@ -29,7 +29,7 @@ public class ShapesAvailability {
           static {
               boolean xSPATIAL4J_AVAILABLE;
               try {
      -            Class.forName("com.spatial4j.core.shape.impl.PointImpl");
      +            Class.forName("org.locationtech.spatial4j.shape.impl.PointImpl");
                   xSPATIAL4J_AVAILABLE = true;
               } catch (Throwable t) {
                   xSPATIAL4J_AVAILABLE = false;
      diff --git a/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java b/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java
      index 42650275b4bf..7ee2bfbd42ff 100644
      --- a/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java
      +++ b/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java
      @@ -19,9 +19,9 @@
       
       package org.elasticsearch.common.geo;
       
      -import com.spatial4j.core.context.SpatialContext;
      -import com.spatial4j.core.shape.Shape;
      -import com.spatial4j.core.shape.ShapeCollection;
      +import org.locationtech.spatial4j.context.SpatialContext;
      +import org.locationtech.spatial4j.shape.Shape;
      +import org.locationtech.spatial4j.shape.ShapeCollection;
       
       import java.util.List;
       
      diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java
      index bda0106f2b6c..97ef6561c9b7 100644
      --- a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java
      +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java
      @@ -19,7 +19,7 @@
       
       package org.elasticsearch.common.geo.builders;
       
      -import com.spatial4j.core.shape.Circle;
      +import org.locationtech.spatial4j.shape.Circle;
       import com.vividsolutions.jts.geom.Coordinate;
       
       import org.elasticsearch.common.io.stream.StreamInput;
      diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java
      index 426cbbf78004..ab997387ea1b 100644
      --- a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java
      +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java
      @@ -19,7 +19,7 @@
       
       package org.elasticsearch.common.geo.builders;
       
      -import com.spatial4j.core.shape.Rectangle;
      +import org.locationtech.spatial4j.shape.Rectangle;
       import com.vividsolutions.jts.geom.Coordinate;
       
       import org.elasticsearch.common.io.stream.StreamInput;
      diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java
      index 420f61a67992..d21f47cf0535 100644
      --- a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java
      +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java
      @@ -19,7 +19,7 @@
       
       package org.elasticsearch.common.geo.builders;
       
      -import com.spatial4j.core.shape.Shape;
      +import org.locationtech.spatial4j.shape.Shape;
       
       import org.elasticsearch.ElasticsearchException;
       import org.elasticsearch.common.geo.XShapeCollection;
      diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java
      index 8c2870e1e094..cbc9002c7851 100644
      --- a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java
      +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java
      @@ -19,7 +19,7 @@
       
       package org.elasticsearch.common.geo.builders;
       
      -import com.spatial4j.core.shape.Shape;
      +import org.locationtech.spatial4j.shape.Shape;
       import com.vividsolutions.jts.geom.Coordinate;
       import com.vividsolutions.jts.geom.Geometry;
       import com.vividsolutions.jts.geom.GeometryFactory;
      diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java
      index e69c0abe4f8a..51f4fd232c54 100644
      --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java
      +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java
      @@ -19,7 +19,7 @@
       
       package org.elasticsearch.common.geo.builders;
       
      -import com.spatial4j.core.shape.Shape;
      +import org.locationtech.spatial4j.shape.Shape;
       import com.vividsolutions.jts.geom.Coordinate;
       import com.vividsolutions.jts.geom.Geometry;
       import com.vividsolutions.jts.geom.LineString;
      diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java
      index 12b16254957e..b8f2c8137efe 100644
      --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java
      +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java
      @@ -19,8 +19,8 @@
       
       package org.elasticsearch.common.geo.builders;
       
      -import com.spatial4j.core.shape.Point;
      -import com.spatial4j.core.shape.Shape;
      +import org.locationtech.spatial4j.shape.Point;
      +import org.locationtech.spatial4j.shape.Shape;
       import com.vividsolutions.jts.geom.Coordinate;
       
       import org.elasticsearch.common.geo.XShapeCollection;
      diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java
      index 394892d909d6..6ee679b73087 100644
      --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java
      +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java
      @@ -19,7 +19,7 @@
       
       package org.elasticsearch.common.geo.builders;
       
      -import com.spatial4j.core.shape.Shape;
      +import org.locationtech.spatial4j.shape.Shape;
       import com.vividsolutions.jts.geom.Coordinate;
       
       import org.elasticsearch.common.geo.XShapeCollection;
      diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java
      index 1cee6525e7a7..30b7e370f22d 100644
      --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java
      +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java
      @@ -19,7 +19,7 @@
       
       package org.elasticsearch.common.geo.builders;
       
      -import com.spatial4j.core.shape.Point;
      +import org.locationtech.spatial4j.shape.Point;
       import com.vividsolutions.jts.geom.Coordinate;
       
       import org.elasticsearch.common.io.stream.StreamInput;
      diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java
      index ab480cfbc24c..4a9c84410723 100644
      --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java
      +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java
      @@ -19,8 +19,8 @@
       
       package org.elasticsearch.common.geo.builders;
       
      -import com.spatial4j.core.exception.InvalidShapeException;
      -import com.spatial4j.core.shape.Shape;
      +import org.locationtech.spatial4j.exception.InvalidShapeException;
      +import org.locationtech.spatial4j.shape.Shape;
       import com.vividsolutions.jts.geom.Coordinate;
       import com.vividsolutions.jts.geom.Geometry;
       import com.vividsolutions.jts.geom.GeometryFactory;
      diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java
      index 8c3ea3f3261c..d0c739645759 100644
      --- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java
      +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java
      @@ -19,10 +19,10 @@
       
       package org.elasticsearch.common.geo.builders;
       
      -import com.spatial4j.core.context.jts.JtsSpatialContext;
      -import com.spatial4j.core.exception.InvalidShapeException;
      -import com.spatial4j.core.shape.Shape;
      -import com.spatial4j.core.shape.jts.JtsGeometry;
      +import org.locationtech.spatial4j.context.jts.JtsSpatialContext;
      +import org.locationtech.spatial4j.exception.InvalidShapeException;
      +import org.locationtech.spatial4j.shape.Shape;
      +import org.locationtech.spatial4j.shape.jts.JtsGeometry;
       import com.vividsolutions.jts.geom.Coordinate;
       import com.vividsolutions.jts.geom.Geometry;
       import com.vividsolutions.jts.geom.GeometryFactory;
      @@ -81,9 +81,9 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri
            * this normally isn't allowed.
            */
           protected final boolean multiPolygonMayOverlap = false;
      -    /** @see com.spatial4j.core.shape.jts.JtsGeometry#validate() */
      +    /** @see org.locationtech.spatial4j.shape.jts.JtsGeometry#validate() */
           protected final boolean autoValidateJtsGeometry = true;
      -    /** @see com.spatial4j.core.shape.jts.JtsGeometry#index() */
      +    /** @see org.locationtech.spatial4j.shape.jts.JtsGeometry#index() */
           protected final boolean autoIndexJtsGeometry = true;//may want to turn off once SpatialStrategy impls do it.
       
           protected ShapeBuilder() {
      diff --git a/core/src/main/java/org/elasticsearch/common/lucene/IndexCacheableQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/IndexCacheableQuery.java
      deleted file mode 100644
      index d31cd3835ecb..000000000000
      --- a/core/src/main/java/org/elasticsearch/common/lucene/IndexCacheableQuery.java
      +++ /dev/null
      @@ -1,74 +0,0 @@
      -/*
      - * Licensed to Elasticsearch under one or more contributor
      - * license agreements. See the NOTICE file distributed with
      - * this work for additional information regarding copyright
      - * ownership. Elasticsearch licenses this file to you under
      - * the Apache License, Version 2.0 (the "License"); you may
      - * not use this file except in compliance with the License.
      - * You may obtain a copy of the License at
      - *
      - *    http://www.apache.org/licenses/LICENSE-2.0
      - *
      - * Unless required by applicable law or agreed to in writing,
      - * software distributed under the License is distributed on an
      - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
      - * KIND, either express or implied.  See the License for the
      - * specific language governing permissions and limitations
      - * under the License.
      - */
      -
      -package org.elasticsearch.common.lucene;
      -
      -import org.apache.lucene.index.IndexReader;
      -import org.apache.lucene.search.IndexSearcher;
      -import org.apache.lucene.search.Query;
      -import org.apache.lucene.search.Weight;
      -
      -import java.io.IOException;
      -import java.util.Objects;
      -
      -/**
      - * Base implementation for a query which is cacheable at the index level but
      - * not the segment level as usually expected.
      - */
      -public abstract class IndexCacheableQuery extends Query {
      -
      -    private Object readerCacheKey;
      -
      -    @Override
      -    public Query rewrite(IndexReader reader) throws IOException {
      -        if (reader.getCoreCacheKey() != this.readerCacheKey) {
      -            IndexCacheableQuery rewritten = (IndexCacheableQuery) clone();
      -            rewritten.readerCacheKey = reader.getCoreCacheKey();
      -            return rewritten;
      -        }
      -        return super.rewrite(reader);
      -    }
      -
      -    @Override
      -    public boolean equals(Object obj) {
      -        return super.equals(obj)
      -                && readerCacheKey == ((IndexCacheableQuery) obj).readerCacheKey;
      -    }
      -
      -    @Override
      -    public int hashCode() {
      -        return 31 * super.hashCode() + Objects.hashCode(readerCacheKey);
      -    }
      -
      -    @Override
      -    public final Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
      -        if (readerCacheKey == null) {
      -            throw new IllegalStateException("Rewrite first");
      -        }
      -        if (readerCacheKey != searcher.getIndexReader().getCoreCacheKey()) {
      -            throw new IllegalStateException("Must create weight on the same reader which has been used for rewriting");
      -        }
      -        return doCreateWeight(searcher, needsScores);
      -    }
      -
      -    /** Create a {@link Weight} for this query.
      -     *  @see Query#createWeight(IndexSearcher, boolean)
      -     */
      -    public abstract Weight doCreateWeight(IndexSearcher searcher, boolean needsScores) throws IOException;
      -}
      diff --git a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java
      index 43051f95b9a5..39f34ad867e5 100644
      --- a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java
      +++ b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java
      @@ -88,7 +88,7 @@ import java.util.Objects;
       public class Lucene {
           public static final String LATEST_DOC_VALUES_FORMAT = "Lucene54";
           public static final String LATEST_POSTINGS_FORMAT = "Lucene50";
      -    public static final String LATEST_CODEC = "Lucene54";
      +    public static final String LATEST_CODEC = "Lucene60";
       
           static {
               Deprecated annotation = PostingsFormat.forName(LATEST_POSTINGS_FORMAT).getClass().getAnnotation(Deprecated.class);
      @@ -236,13 +236,8 @@ public class Lucene {
                   protected Object doBody(String segmentFileName) throws IOException {
                       try (IndexInput input = directory.openInput(segmentFileName, IOContext.READ)) {
                           final int format = input.readInt();
      -                    final int actualFormat;
                           if (format == CodecUtil.CODEC_MAGIC) {
      -                        // 4.0+
      -                        actualFormat = CodecUtil.checkHeaderNoMagic(input, "segments", SegmentInfos.VERSION_40, Integer.MAX_VALUE);
      -                        if (actualFormat >= SegmentInfos.VERSION_48) {
      -                            CodecUtil.checksumEntireFile(input);
      -                        }
      +                        CodecUtil.checksumEntireFile(input);
                           }
                           // legacy....
                       }
      @@ -382,7 +377,7 @@ public class Lucene {
                           writeMissingValue(out, comparatorSource.missingValue(sortField.getReverse()));
                       } else {
                           writeSortType(out, sortField.getType());
      -                    writeMissingValue(out, sortField.missingValue);
      +                    writeMissingValue(out, sortField.getMissingValue());
                       }
                       out.writeBoolean(sortField.getReverse());
                   }
      @@ -684,7 +679,7 @@ public class Lucene {
                   segmentsFileName = infos.getSegmentsFileName();
                   this.dir = dir;
                   userData = infos.getUserData();
      -            files = Collections.unmodifiableCollection(infos.files(dir, true));
      +            files = Collections.unmodifiableCollection(infos.files(true));
                   generation = infos.getGeneration();
                   segmentCount = infos.size();
               }
      diff --git a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java
      index e0e03b18e127..9851ac12a1a4 100644
      --- a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java
      +++ b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java
      @@ -226,7 +226,7 @@ public final class AllTermQuery extends Query {
       
           @Override
           public String toString(String field) {
      -        return new TermQuery(term).toString(field) + ToStringUtils.boost(getBoost());
      +        return new TermQuery(term).toString(field);
           }
       
       }
      diff --git a/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java b/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java
      index 83de725a83a2..971cbdafffed 100644
      --- a/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java
      +++ b/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java
      @@ -30,7 +30,6 @@ import org.apache.lucene.search.IndexSearcher;
       import org.apache.lucene.search.Query;
       import org.apache.lucene.search.Scorer;
       import org.apache.lucene.search.Weight;
      -import org.apache.lucene.util.BitDocIdSet;
       import org.apache.lucene.util.BitSet;
       import org.apache.lucene.util.Bits;
       import org.apache.lucene.util.BytesRef;
      @@ -118,9 +117,7 @@ public class FilterableTermsEnum extends TermsEnum {
                           };
                       }
       
      -                BitDocIdSet.Builder builder = new BitDocIdSet.Builder(context.reader().maxDoc());
      -                builder.or(docs);
      -                bits = builder.build().bits();
      +                bits = BitSet.of(docs, context.reader().maxDoc());
       
                       // Count how many docs are in our filtered set
                       // TODO make this lazy-loaded only for those that need it?
      diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java
      index d1efdc3ede21..754d76fed271 100644
      --- a/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java
      +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java
      @@ -30,7 +30,6 @@ import org.apache.lucene.search.MultiPhraseQuery;
       import org.apache.lucene.search.Query;
       import org.apache.lucene.util.BytesRef;
       import org.apache.lucene.util.StringHelper;
      -import org.apache.lucene.util.ToStringUtils;
       
       import java.io.IOException;
       import java.util.ArrayList;
      @@ -51,7 +50,7 @@ public class MultiPhrasePrefixQuery extends Query {
           /**
            * Sets the phrase slop for this query.
            *
      -     * @see org.apache.lucene.search.PhraseQuery#setSlop(int)
      +     * @see org.apache.lucene.search.PhraseQuery.Builder#setSlop(int)
            */
           public void setSlop(int s) {
               slop = s;
      @@ -64,7 +63,7 @@ public class MultiPhrasePrefixQuery extends Query {
           /**
            * Sets the phrase slop for this query.
            *
      -     * @see org.apache.lucene.search.PhraseQuery#getSlop()
      +     * @see org.apache.lucene.search.PhraseQuery.Builder#getSlop()
            */
           public int getSlop() {
               return slop;
      @@ -73,7 +72,7 @@ public class MultiPhrasePrefixQuery extends Query {
           /**
            * Add a single term at the next position in the phrase.
            *
      -     * @see org.apache.lucene.search.PhraseQuery#add(Term)
      +     * @see org.apache.lucene.search.PhraseQuery.Builder#add(Term)
            */
           public void add(Term term) {
               add(new Term[]{term});
      @@ -83,7 +82,7 @@ public class MultiPhrasePrefixQuery extends Query {
            * Add multiple terms at the next position in the phrase.  Any of the terms
            * may match.
            *
      -     * @see org.apache.lucene.search.PhraseQuery#add(Term)
      +     * @see org.apache.lucene.search.PhraseQuery.Builder#add(Term)
            */
           public void add(Term[] terms) {
               int position = 0;
      @@ -98,7 +97,7 @@ public class MultiPhrasePrefixQuery extends Query {
            *
            * @param terms the terms
            * @param position the position of the terms provided as argument
      -     * @see org.apache.lucene.search.PhraseQuery#add(Term, int)
      +     * @see org.apache.lucene.search.PhraseQuery.Builder#add(Term, int)
            */
           public void add(Term[] terms, int position) {
               if (termArrays.size() == 0)
      @@ -231,8 +230,6 @@ public class MultiPhrasePrefixQuery extends Query {
                   buffer.append(slop);
               }
       
      -        buffer.append(ToStringUtils.boost(getBoost()));
      -
               return buffer.toString();
           }
       
      diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java
      index 73c3fc9400df..53ee2295ae4c 100644
      --- a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java
      +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java
      @@ -23,7 +23,6 @@ import org.apache.lucene.index.Term;
       import org.apache.lucene.search.BooleanClause;
       import org.apache.lucene.search.BooleanClause.Occur;
       import org.apache.lucene.search.BooleanQuery;
      -import org.apache.lucene.search.BoostQuery;
       import org.apache.lucene.search.ConstantScoreQuery;
       import org.apache.lucene.search.MatchAllDocsQuery;
       import org.apache.lucene.search.PrefixQuery;
      @@ -132,11 +131,7 @@ public class Queries {
                       builder.add(clause);
                   }
                   builder.setMinimumNumberShouldMatch(msm);
      -            BooleanQuery bq = builder.build();
      -            if (query.getBoost() != 1f) {
      -                return new BoostQuery(bq, query.getBoost());
      -            }
      -            return bq;
      +            return builder.build();
               } else {
                   return query;
               }
      diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java
      index a7b7300c9b6e..54e8c0e34884 100644
      --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java
      +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java
      @@ -29,7 +29,6 @@ import org.apache.lucene.search.Query;
       import org.apache.lucene.search.Scorer;
       import org.apache.lucene.search.Weight;
       import org.apache.lucene.util.Bits;
      -import org.apache.lucene.util.ToStringUtils;
       import org.elasticsearch.common.io.stream.StreamInput;
       import org.elasticsearch.common.io.stream.StreamOutput;
       import org.elasticsearch.common.io.stream.Writeable;
      @@ -102,7 +101,7 @@ public class FiltersFunctionScoreQuery extends Query {
               }
           }
       
      -    Query subQuery;
      +    final Query subQuery;
           final FilterFunction[] filterFunctions;
           final ScoreMode scoreMode;
           final float maxBoost;
      @@ -136,9 +135,7 @@ public class FiltersFunctionScoreQuery extends Query {
               Query newQ = subQuery.rewrite(reader);
               if (newQ == subQuery)
                   return this;
      -        FiltersFunctionScoreQuery bq = (FiltersFunctionScoreQuery) this.clone();
      -        bq.subQuery = newQ;
      -        return bq;
      +        return new FiltersFunctionScoreQuery(newQ, scoreMode, filterFunctions, maxBoost, minScore, combineFunction);
           }
       
           @Override
      @@ -355,7 +352,6 @@ public class FiltersFunctionScoreQuery extends Query {
                   sb.append("{filter(").append(filterFunction.filter).append("), function [").append(filterFunction.function).append("]}");
               }
               sb.append("])");
      -        sb.append(ToStringUtils.boost(getBoost()));
               return sb.toString();
           }
       
      diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java
      index 3cf4f3e48f71..646076a3a17f 100644
      --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java
      +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java
      @@ -28,7 +28,6 @@ import org.apache.lucene.search.IndexSearcher;
       import org.apache.lucene.search.Query;
       import org.apache.lucene.search.Scorer;
       import org.apache.lucene.search.Weight;
      -import org.apache.lucene.util.ToStringUtils;
       
       import java.io.IOException;
       import java.util.Objects;
      @@ -41,7 +40,7 @@ public class FunctionScoreQuery extends Query {
       
           public static final float DEFAULT_MAX_BOOST = Float.MAX_VALUE;
       
      -    Query subQuery;
      +    final Query subQuery;
           final ScoreFunction function;
           final float maxBoost;
           final CombineFunction combineFunction;
      @@ -84,9 +83,7 @@ public class FunctionScoreQuery extends Query {
               if (newQ == subQuery) {
                   return this;
               }
      -        FunctionScoreQuery bq = (FunctionScoreQuery) this.clone();
      -        bq.subQuery = newQ;
      -        return bq;
      +        return new FunctionScoreQuery(newQ, function, minScore, combineFunction, maxBoost);
           }
       
           @Override
      @@ -205,7 +202,6 @@ public class FunctionScoreQuery extends Query {
           public String toString(String field) {
               StringBuilder sb = new StringBuilder();
               sb.append("function score (").append(subQuery.toString(field)).append(",function=").append(function).append(')');
      -        sb.append(ToStringUtils.boost(getBoost()));
               return sb.toString();
           }
       
      diff --git a/core/src/main/java/org/elasticsearch/common/lucene/store/FilterIndexOutput.java b/core/src/main/java/org/elasticsearch/common/lucene/store/FilterIndexOutput.java
      index 616e43ac4225..5e5fc8262642 100644
      --- a/core/src/main/java/org/elasticsearch/common/lucene/store/FilterIndexOutput.java
      +++ b/core/src/main/java/org/elasticsearch/common/lucene/store/FilterIndexOutput.java
      @@ -30,7 +30,7 @@ public class FilterIndexOutput extends IndexOutput {
           protected final IndexOutput out;
       
           public FilterIndexOutput(String resourceDescription, IndexOutput out) {
      -        super(resourceDescription);
      +        super(resourceDescription, out.getName());
               this.out = out;
           }
       
      diff --git a/core/src/main/java/org/elasticsearch/common/util/BigArrays.java b/core/src/main/java/org/elasticsearch/common/util/BigArrays.java
      index faa377baccdf..dfef49ca9d42 100644
      --- a/core/src/main/java/org/elasticsearch/common/util/BigArrays.java
      +++ b/core/src/main/java/org/elasticsearch/common/util/BigArrays.java
      @@ -41,9 +41,9 @@ public class BigArrays {
       
           /** Page size in bytes: 16KB */
           public static final int PAGE_SIZE_IN_BYTES = 1 << 14;
      -    public static final int BYTE_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_BYTE;
      -    public static final int INT_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_INT;
      -    public static final int LONG_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_LONG;
      +    public static final int BYTE_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES;
      +    public static final int INT_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / Integer.BYTES;
      +    public static final int LONG_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / Long.BYTES;
           public static final int OBJECT_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_OBJECT_REF;
       
           /** Returns the next size to grow when working with parallel arrays that may have different page sizes or number of bytes per element. */
      @@ -490,7 +490,7 @@ public class BigArrays {
               if (minSize <= array.size()) {
                   return array;
               }
      -        final long newSize = overSize(minSize, BYTE_PAGE_SIZE, RamUsageEstimator.NUM_BYTES_BYTE);
      +        final long newSize = overSize(minSize, BYTE_PAGE_SIZE, 1);
               return resize(array, newSize);
           }
       
      @@ -573,7 +573,7 @@ public class BigArrays {
               if (minSize <= array.size()) {
                   return array;
               }
      -        final long newSize = overSize(minSize, INT_PAGE_SIZE, RamUsageEstimator.NUM_BYTES_INT);
      +        final long newSize = overSize(minSize, INT_PAGE_SIZE, Integer.BYTES);
               return resize(array, newSize);
           }
       
      @@ -623,7 +623,7 @@ public class BigArrays {
               if (minSize <= array.size()) {
                   return array;
               }
      -        final long newSize = overSize(minSize, LONG_PAGE_SIZE, RamUsageEstimator.NUM_BYTES_LONG);
      +        final long newSize = overSize(minSize, LONG_PAGE_SIZE, Long.BYTES);
               return resize(array, newSize);
           }
       
      @@ -670,7 +670,7 @@ public class BigArrays {
               if (minSize <= array.size()) {
                   return array;
               }
      -        final long newSize = overSize(minSize, LONG_PAGE_SIZE, RamUsageEstimator.NUM_BYTES_LONG);
      +        final long newSize = overSize(minSize, LONG_PAGE_SIZE, Long.BYTES);
               return resize(array, newSize);
           }
       
      @@ -717,7 +717,7 @@ public class BigArrays {
               if (minSize <= array.size()) {
                   return array;
               }
      -        final long newSize = overSize(minSize, INT_PAGE_SIZE, RamUsageEstimator.NUM_BYTES_FLOAT);
      +        final long newSize = overSize(minSize, INT_PAGE_SIZE, Float.BYTES);
               return resize(array, newSize);
           }
       
      diff --git a/core/src/main/java/org/elasticsearch/common/util/BigByteArray.java b/core/src/main/java/org/elasticsearch/common/util/BigByteArray.java
      index da4bc28408df..cac3132385f6 100644
      --- a/core/src/main/java/org/elasticsearch/common/util/BigByteArray.java
      +++ b/core/src/main/java/org/elasticsearch/common/util/BigByteArray.java
      @@ -127,7 +127,7 @@ final class BigByteArray extends AbstractBigArray implements ByteArray {
       
           @Override
           protected int numBytesPerElement() {
      -        return RamUsageEstimator.NUM_BYTES_BYTE;
      +        return 1;
           }
       
           /** Change the size of this array. Content between indexes 0 and min(size(), newSize) will be preserved. */
      diff --git a/core/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java b/core/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java
      index 1f7391883777..4aab593affe6 100644
      --- a/core/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java
      +++ b/core/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java
      @@ -71,7 +71,7 @@ final class BigDoubleArray extends AbstractBigArray implements DoubleArray {
       
           @Override
           protected int numBytesPerElement() {
      -        return RamUsageEstimator.NUM_BYTES_INT;
      +        return Integer.BYTES;
           }
       
           /** Change the size of this array. Content between indexes 0 and min(size(), newSize) will be preserved. */
      diff --git a/core/src/main/java/org/elasticsearch/common/util/BigFloatArray.java b/core/src/main/java/org/elasticsearch/common/util/BigFloatArray.java
      index f6fc2d8fce00..1fa79a9f3dbe 100644
      --- a/core/src/main/java/org/elasticsearch/common/util/BigFloatArray.java
      +++ b/core/src/main/java/org/elasticsearch/common/util/BigFloatArray.java
      @@ -71,7 +71,7 @@ final class BigFloatArray extends AbstractBigArray implements FloatArray {
       
           @Override
           protected int numBytesPerElement() {
      -        return RamUsageEstimator.NUM_BYTES_FLOAT;
      +        return Float.BYTES;
           }
       
           /** Change the size of this array. Content between indexes 0 and min(size(), newSize) will be preserved. */
      diff --git a/core/src/main/java/org/elasticsearch/common/util/BigIntArray.java b/core/src/main/java/org/elasticsearch/common/util/BigIntArray.java
      index 1c0e9fe017c0..4ce5fc7aceea 100644
      --- a/core/src/main/java/org/elasticsearch/common/util/BigIntArray.java
      +++ b/core/src/main/java/org/elasticsearch/common/util/BigIntArray.java
      @@ -88,7 +88,7 @@ final class BigIntArray extends AbstractBigArray implements IntArray {
       
           @Override
           protected int numBytesPerElement() {
      -        return RamUsageEstimator.NUM_BYTES_INT;
      +        return Integer.BYTES;
           }
       
           /** Change the size of this array. Content between indexes 0 and min(size(), newSize) will be preserved. */
      diff --git a/core/src/main/java/org/elasticsearch/common/util/BigLongArray.java b/core/src/main/java/org/elasticsearch/common/util/BigLongArray.java
      index fe0323ba67c9..2e3248143b4e 100644
      --- a/core/src/main/java/org/elasticsearch/common/util/BigLongArray.java
      +++ b/core/src/main/java/org/elasticsearch/common/util/BigLongArray.java
      @@ -70,7 +70,7 @@ final class BigLongArray extends AbstractBigArray implements LongArray {
       
           @Override
           protected int numBytesPerElement() {
      -        return RamUsageEstimator.NUM_BYTES_LONG;
      +        return Long.BYTES;
           }
       
           /** Change the size of this array. Content between indexes 0 and min(size(), newSize) will be preserved. */
      diff --git a/core/src/main/java/org/elasticsearch/common/util/BigObjectArray.java b/core/src/main/java/org/elasticsearch/common/util/BigObjectArray.java
      index ab923a195ca4..19a41d3096da 100644
      --- a/core/src/main/java/org/elasticsearch/common/util/BigObjectArray.java
      +++ b/core/src/main/java/org/elasticsearch/common/util/BigObjectArray.java
      @@ -65,7 +65,7 @@ final class BigObjectArray extends AbstractBigArray implements ObjectArray
       
           @Override
           protected int numBytesPerElement() {
      -        return RamUsageEstimator.NUM_BYTES_INT;
      +        return Integer.BYTES;
           }
       
           /** Change the size of this array. Content between indexes 0 and min(size(), newSize) will be preserved. */
      diff --git a/core/src/main/java/org/elasticsearch/common/util/BloomFilter.java b/core/src/main/java/org/elasticsearch/common/util/BloomFilter.java
      index fdc94d538499..b9dd6859ce07 100644
      --- a/core/src/main/java/org/elasticsearch/common/util/BloomFilter.java
      +++ b/core/src/main/java/org/elasticsearch/common/util/BloomFilter.java
      @@ -388,7 +388,7 @@ public class BloomFilter {
               }
       
               public long ramBytesUsed() {
      -            return RamUsageEstimator.NUM_BYTES_LONG * data.length + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 16;
      +            return Long.BYTES * data.length + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 16;
               }
           }
       
      diff --git a/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java b/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java
      index 739677342f74..36e0b19c7829 100644
      --- a/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java
      +++ b/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java
      @@ -333,7 +333,7 @@ public class CollectionUtils {
               assert indices.length >= numValues;
               if (numValues > 1) {
                   new InPlaceMergeSorter() {
      -                final Comparator comparator = BytesRef.getUTF8SortedAsUnicodeComparator();
      +                final Comparator comparator = Comparator.naturalOrder();
                       @Override
                       protected int compare(int i, int j) {
                           return comparator.compare(bytes.get(scratch, indices[i]), bytes.get(scratch1, indices[j]));
      diff --git a/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java b/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java
      index c3c1cd3b7349..8795a7e7d152 100644
      --- a/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java
      +++ b/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java
      @@ -116,7 +116,7 @@ public abstract class MetaDataStateFormat {
               final Path finalStatePath = stateLocation.resolve(fileName);
               try {
                   final String resourceDesc = "MetaDataStateFormat.write(path=\"" + tmpStatePath + "\")";
      -            try (OutputStreamIndexOutput out = new OutputStreamIndexOutput(resourceDesc, Files.newOutputStream(tmpStatePath), BUFFER_SIZE)) {
      +            try (OutputStreamIndexOutput out = new OutputStreamIndexOutput(resourceDesc, fileName, Files.newOutputStream(tmpStatePath), BUFFER_SIZE)) {
                       CodecUtil.writeHeader(out, STATE_FILE_CODEC, STATE_FILE_VERSION);
                       out.writeInt(format.index());
                       out.writeLong(version);
      diff --git a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java
      index a27b49b9618d..b7481e78496e 100644
      --- a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java
      +++ b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java
      @@ -20,8 +20,8 @@
       package org.elasticsearch.index.analysis;
       
       import org.apache.lucene.analysis.Analyzer;
      -import org.apache.lucene.analysis.NumericTokenStream;
       import org.apache.lucene.analysis.TokenStream;
      +import org.apache.lucene.analysis.LegacyNumericTokenStream;
       import org.apache.lucene.analysis.ar.ArabicAnalyzer;
       import org.apache.lucene.analysis.bg.BulgarianAnalyzer;
       import org.apache.lucene.analysis.br.BrazilianAnalyzer;
      @@ -300,7 +300,7 @@ public class Analysis {
            * 

      Although most analyzers generate character terms (CharTermAttribute), * some token only contain binary terms (BinaryTermAttribute, * CharTermAttribute being a special type of BinaryTermAttribute), such as - * {@link NumericTokenStream} and unsuitable for highlighting and + * {@link LegacyNumericTokenStream} and unsuitable for highlighting and * more-like-this queries which expect character terms.

      */ public static boolean isCharacterTokenStream(TokenStream tokenStream) { diff --git a/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenFilterFactory.java index f28f374220af..82ed526323df 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenFilterFactory.java @@ -21,10 +21,8 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter; -import org.apache.lucene.analysis.ngram.Lucene43EdgeNGramTokenFilter; import org.apache.lucene.analysis.ngram.NGramTokenFilter; import org.apache.lucene.analysis.reverse.ReverseStringFilter; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -43,14 +41,11 @@ public class EdgeNGramTokenFilterFactory extends AbstractTokenFilterFactory { public static final int SIDE_BACK = 2; private final int side; - private org.elasticsearch.Version esVersion; - public EdgeNGramTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); this.minGram = settings.getAsInt("min_gram", NGramTokenFilter.DEFAULT_MIN_NGRAM_SIZE); this.maxGram = settings.getAsInt("max_gram", NGramTokenFilter.DEFAULT_MAX_NGRAM_SIZE); this.side = parseSide(settings.get("side", "front")); - this.esVersion = org.elasticsearch.Version.indexCreated(indexSettings.getSettings()); } static int parseSide(String side) { @@ -70,15 +65,7 @@ public class EdgeNGramTokenFilterFactory extends AbstractTokenFilterFactory { result = new ReverseStringFilter(result); } - if (version.onOrAfter(Version.LUCENE_4_3) && esVersion.onOrAfter(org.elasticsearch.Version.V_0_90_2)) { - /* - * We added this in 0.90.2 but 0.90.1 used LUCENE_43 already so we can not rely on the lucene version. - * Yet if somebody uses 0.90.2 or higher with a prev. lucene version we should also use the deprecated version. - */ - result = new EdgeNGramTokenFilter(result, minGram, maxGram); - } else { - result = new Lucene43EdgeNGramTokenFilter(result, minGram, maxGram); - } + result = new EdgeNGramTokenFilter(result, minGram, maxGram); // side=BACK is not supported anymore but applying ReverseStringFilter up-front and after the token filter has the same effect if (side == SIDE_BACK) { diff --git a/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java index 2c50d8d4d667..77d122393ce3 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java @@ -21,9 +21,7 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.ngram.EdgeNGramTokenizer; -import org.apache.lucene.analysis.ngram.Lucene43EdgeNGramTokenizer; import org.apache.lucene.analysis.ngram.NGramTokenizer; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -33,55 +31,33 @@ import static org.elasticsearch.index.analysis.NGramTokenizerFactory.parseTokenC /** * */ -@SuppressWarnings("deprecation") public class EdgeNGramTokenizerFactory extends AbstractTokenizerFactory { private final int minGram; private final int maxGram; - private final Lucene43EdgeNGramTokenizer.Side side; - private final CharMatcher matcher; - - protected org.elasticsearch.Version esVersion; public EdgeNGramTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE); this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); - this.side = Lucene43EdgeNGramTokenizer.Side.getSide(settings.get("side", Lucene43EdgeNGramTokenizer.DEFAULT_SIDE.getLabel())); this.matcher = parseTokenChars(settings.getAsArray("token_chars")); - this.esVersion = indexSettings.getIndexVersionCreated(); } @Override public Tokenizer create() { - if (version.onOrAfter(Version.LUCENE_4_3) && esVersion.onOrAfter(org.elasticsearch.Version.V_0_90_2)) { - /* - * We added this in 0.90.2 but 0.90.1 used LUCENE_43 already so we can not rely on the lucene version. - * Yet if somebody uses 0.90.2 or higher with a prev. lucene version we should also use the deprecated version. - */ - if (side == Lucene43EdgeNGramTokenizer.Side.BACK) { - throw new IllegalArgumentException("side=back is not supported anymore. Please fix your analysis chain or use" - + " an older compatibility version (<=4.2) but beware that it might cause highlighting bugs." - + " To obtain the same behavior as the previous version please use \"edgeNGram\" filter which still supports side=back" - + " in combination with a \"keyword\" tokenizer"); - } - final Version version = this.version == Version.LUCENE_4_3 ? Version.LUCENE_4_4 : this.version; // always use 4.4 or higher - if (matcher == null) { - return new EdgeNGramTokenizer(minGram, maxGram); - } else { - return new EdgeNGramTokenizer(minGram, maxGram) { - @Override - protected boolean isTokenChar(int chr) { - return matcher.isTokenChar(chr); - } - }; - } + if (matcher == null) { + return new EdgeNGramTokenizer(minGram, maxGram); } else { - return new Lucene43EdgeNGramTokenizer(side, minGram, maxGram); + return new EdgeNGramTokenizer(minGram, maxGram) { + @Override + protected boolean isTokenChar(int chr) { + return matcher.isTokenChar(chr); + } + }; } } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/KeepWordFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/KeepWordFilterFactory.java index 82b8df707413..ab00657313de 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/KeepWordFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/KeepWordFilterFactory.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.KeepWordFilter; -import org.apache.lucene.analysis.miscellaneous.Lucene43KeepWordFilter; import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; @@ -40,9 +39,6 @@ import org.elasticsearch.index.IndexSettings; *
    2. {@value #KEEP_WORDS_PATH_KEY} an reference to a file containing the words * / tokens to keep. Note: this is an alternative to {@value #KEEP_WORDS_KEY} if * both are set an exception will be thrown.
    3. - *
    4. {@value #ENABLE_POS_INC_KEY} true iff the filter should - * maintain position increments for dropped tokens. The default is - * true.
    5. *
    6. {@value #KEEP_WORDS_CASE_KEY} to use case sensitive keep words. The * default is false which corresponds to case-sensitive.
    7. * @@ -51,10 +47,11 @@ import org.elasticsearch.index.IndexSettings; */ public class KeepWordFilterFactory extends AbstractTokenFilterFactory { private final CharArraySet keepWords; - private final boolean enablePositionIncrements; private static final String KEEP_WORDS_KEY = "keep_words"; private static final String KEEP_WORDS_PATH_KEY = KEEP_WORDS_KEY + "_path"; private static final String KEEP_WORDS_CASE_KEY = KEEP_WORDS_KEY + "_case"; // for javadoc + + // unsupported ancient option private static final String ENABLE_POS_INC_KEY = "enable_position_increments"; public KeepWordFilterFactory(IndexSettings indexSettings, @@ -68,26 +65,14 @@ public class KeepWordFilterFactory extends AbstractTokenFilterFactory { throw new IllegalArgumentException("keep requires either `" + KEEP_WORDS_KEY + "` or `" + KEEP_WORDS_PATH_KEY + "` to be configured"); } - if (version.onOrAfter(Version.LUCENE_4_4) && settings.get(ENABLE_POS_INC_KEY) != null) { - throw new IllegalArgumentException(ENABLE_POS_INC_KEY + " is not supported anymore. Please fix your analysis chain or use" - + " an older compatibility version (<=4.3) but beware that it might cause highlighting bugs."); + if (settings.get(ENABLE_POS_INC_KEY) != null) { + throw new IllegalArgumentException(ENABLE_POS_INC_KEY + " is not supported anymore. Please fix your analysis chain"); } - enablePositionIncrements = version.onOrAfter(Version.LUCENE_4_4) ? true : settings.getAsBoolean(ENABLE_POS_INC_KEY, true); - this.keepWords = Analysis.getWordSet(env, settings, KEEP_WORDS_KEY); - } @Override public TokenStream create(TokenStream tokenStream) { - if (version.onOrAfter(Version.LUCENE_4_4)) { - return new KeepWordFilter(tokenStream, keepWords); - } else { - @SuppressWarnings("deprecation") - final TokenStream filter = new Lucene43KeepWordFilter(enablePositionIncrements, tokenStream, keepWords); - return filter; - } + return new KeepWordFilter(tokenStream, keepWords); } - - } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/LengthTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/LengthTokenFilterFactory.java index 3af93bc79de1..e55e24ccae01 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/LengthTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/LengthTokenFilterFactory.java @@ -21,8 +21,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.LengthFilter; -import org.apache.lucene.analysis.miscellaneous.Lucene43LengthFilter; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -34,28 +32,21 @@ public class LengthTokenFilterFactory extends AbstractTokenFilterFactory { private final int min; private final int max; - private final boolean enablePositionIncrements; + + // ancient unsupported option private static final String ENABLE_POS_INC_KEY = "enable_position_increments"; public LengthTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); min = settings.getAsInt("min", 0); max = settings.getAsInt("max", Integer.MAX_VALUE); - if (version.onOrAfter(Version.LUCENE_4_4) && settings.get(ENABLE_POS_INC_KEY) != null) { - throw new IllegalArgumentException(ENABLE_POS_INC_KEY + " is not supported anymore. Please fix your analysis chain or use" - + " an older compatibility version (<=4.3) but beware that it might cause highlighting bugs."); + if (settings.get(ENABLE_POS_INC_KEY) != null) { + throw new IllegalArgumentException(ENABLE_POS_INC_KEY + " is not supported anymore. Please fix your analysis chain"); } - enablePositionIncrements = version.onOrAfter(Version.LUCENE_4_4) ? true : settings.getAsBoolean(ENABLE_POS_INC_KEY, true); } @Override public TokenStream create(TokenStream tokenStream) { - if (version.onOrAfter(Version.LUCENE_4_4)) { - return new LengthFilter(tokenStream, min, max); - } else { - @SuppressWarnings("deprecation") - final TokenStream filter = new Lucene43LengthFilter(enablePositionIncrements, tokenStream, min, max); - return filter; - } + return new LengthFilter(tokenStream, min, max); } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenFilterFactory.java index 80e0aeb32eb6..0905b310735a 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenFilterFactory.java @@ -20,9 +20,7 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.ngram.Lucene43NGramTokenFilter; import org.apache.lucene.analysis.ngram.NGramTokenFilter; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -44,14 +42,8 @@ public class NGramTokenFilterFactory extends AbstractTokenFilterFactory { this.maxGram = settings.getAsInt("max_gram", NGramTokenFilter.DEFAULT_MAX_NGRAM_SIZE); } - @SuppressWarnings("deprecation") @Override public TokenStream create(TokenStream tokenStream) { - final Version version = this.version == Version.LUCENE_4_3 ? Version.LUCENE_4_4 : this.version; // we supported it since 4.3 - if (version.onOrAfter(Version.LUCENE_4_3)) { - return new NGramTokenFilter(tokenStream, minGram, maxGram); - } else { - return new Lucene43NGramTokenFilter(tokenStream, minGram, maxGram); - } + return new NGramTokenFilter(tokenStream, minGram, maxGram); } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NumericDateTokenizer.java b/core/src/main/java/org/elasticsearch/index/analysis/NumericDateTokenizer.java index 03b502d4478f..21a13eab5733 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NumericDateTokenizer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NumericDateTokenizer.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.NumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import org.joda.time.format.DateTimeFormatter; import java.io.IOException; @@ -30,11 +30,11 @@ import java.io.IOException; public class NumericDateTokenizer extends NumericTokenizer { public NumericDateTokenizer(int precisionStep, char[] buffer, DateTimeFormatter dateTimeFormatter) throws IOException { - super(new NumericTokenStream(precisionStep), buffer, dateTimeFormatter); + super(new LegacyNumericTokenStream(precisionStep), buffer, dateTimeFormatter); } @Override - protected void setValue(NumericTokenStream tokenStream, String value) { + protected void setValue(LegacyNumericTokenStream tokenStream, String value) { tokenStream.setLongValue(((DateTimeFormatter) extra).parseMillis(value)); } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NumericDoubleTokenizer.java b/core/src/main/java/org/elasticsearch/index/analysis/NumericDoubleTokenizer.java index 793c3edd612b..6be6199b5298 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NumericDoubleTokenizer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NumericDoubleTokenizer.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.NumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import java.io.IOException; @@ -29,11 +29,11 @@ import java.io.IOException; public class NumericDoubleTokenizer extends NumericTokenizer { public NumericDoubleTokenizer(int precisionStep, char[] buffer) throws IOException { - super(new NumericTokenStream(precisionStep), buffer, null); + super(new LegacyNumericTokenStream(precisionStep), buffer, null); } @Override - protected void setValue(NumericTokenStream tokenStream, String value) { + protected void setValue(LegacyNumericTokenStream tokenStream, String value) { tokenStream.setDoubleValue(Double.parseDouble(value)); } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NumericFloatTokenizer.java b/core/src/main/java/org/elasticsearch/index/analysis/NumericFloatTokenizer.java index 02d42b8eef8e..b7b2f6577f9c 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NumericFloatTokenizer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NumericFloatTokenizer.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.NumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import java.io.IOException; @@ -29,11 +29,11 @@ import java.io.IOException; public class NumericFloatTokenizer extends NumericTokenizer { public NumericFloatTokenizer(int precisionStep, char[] buffer) throws IOException { - super(new NumericTokenStream(precisionStep), buffer, null); + super(new LegacyNumericTokenStream(precisionStep), buffer, null); } @Override - protected void setValue(NumericTokenStream tokenStream, String value) { + protected void setValue(LegacyNumericTokenStream tokenStream, String value) { tokenStream.setFloatValue(Float.parseFloat(value)); } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NumericIntegerTokenizer.java b/core/src/main/java/org/elasticsearch/index/analysis/NumericIntegerTokenizer.java index 3f758c4900eb..3d8b1309997e 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NumericIntegerTokenizer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NumericIntegerTokenizer.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.NumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import java.io.IOException; @@ -29,11 +29,11 @@ import java.io.IOException; public class NumericIntegerTokenizer extends NumericTokenizer { public NumericIntegerTokenizer(int precisionStep, char[] buffer) throws IOException { - super(new NumericTokenStream(precisionStep), buffer, null); + super(new LegacyNumericTokenStream(precisionStep), buffer, null); } @Override - protected void setValue(NumericTokenStream tokenStream, String value) { + protected void setValue(LegacyNumericTokenStream tokenStream, String value) { tokenStream.setIntValue(Integer.parseInt(value)); } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NumericLongTokenizer.java b/core/src/main/java/org/elasticsearch/index/analysis/NumericLongTokenizer.java index d926371ca484..63abd2d9ed45 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NumericLongTokenizer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NumericLongTokenizer.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.NumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import java.io.IOException; @@ -29,11 +29,11 @@ import java.io.IOException; public class NumericLongTokenizer extends NumericTokenizer { public NumericLongTokenizer(int precisionStep, char[] buffer) throws IOException { - super(new NumericTokenStream(precisionStep), buffer, null); + super(new LegacyNumericTokenStream(precisionStep), buffer, null); } @Override - protected void setValue(NumericTokenStream tokenStream, String value) { + protected void setValue(LegacyNumericTokenStream tokenStream, String value) { tokenStream.setLongValue(Long.parseLong(value)); } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NumericTokenizer.java b/core/src/main/java/org/elasticsearch/index/analysis/NumericTokenizer.java index ccd876289884..6339b11636e7 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NumericTokenizer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NumericTokenizer.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.NumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.util.Attribute; import org.apache.lucene.util.AttributeFactory; @@ -45,12 +45,12 @@ public abstract class NumericTokenizer extends Tokenizer { }; } - private final NumericTokenStream numericTokenStream; + private final LegacyNumericTokenStream numericTokenStream; private final char[] buffer; protected final Object extra; private boolean started; - protected NumericTokenizer(NumericTokenStream numericTokenStream, char[] buffer, Object extra) throws IOException { + protected NumericTokenizer(LegacyNumericTokenStream numericTokenStream, char[] buffer, Object extra) throws IOException { super(delegatingAttributeFactory(numericTokenStream)); this.numericTokenStream = numericTokenStream; // Add attributes from the numeric token stream, this works fine because the attribute factory delegates to numericTokenStream @@ -95,5 +95,5 @@ public abstract class NumericTokenizer extends Tokenizer { numericTokenStream.close(); } - protected abstract void setValue(NumericTokenStream tokenStream, String value); + protected abstract void setValue(LegacyNumericTokenStream tokenStream, String value); } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzer.java index 996cc93cd20b..a07ae16f8ef9 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzer.java @@ -28,7 +28,6 @@ import org.apache.lucene.analysis.en.EnglishPossessiveFilter; import org.apache.lucene.analysis.snowball.SnowballFilter; import org.apache.lucene.analysis.standard.StandardFilter; import org.apache.lucene.analysis.standard.StandardTokenizer; -import org.apache.lucene.analysis.standard.std40.StandardTokenizer40; import org.apache.lucene.analysis.tr.TurkishLowerCaseFilter; import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.util.Version; @@ -64,12 +63,7 @@ public final class SnowballAnalyzer extends Analyzer { and a {@link SnowballFilter} */ @Override public TokenStreamComponents createComponents(String fieldName) { - final Tokenizer tokenizer; - if (getVersion().onOrAfter(Version.LUCENE_4_7_0)) { - tokenizer = new StandardTokenizer(); - } else { - tokenizer = new StandardTokenizer40(); - } + final Tokenizer tokenizer = new StandardTokenizer(); TokenStream result = tokenizer; // remove the possessive 's for english stemmers if (name.equals("English") || name.equals("Porter") || name.equals("Lovins")) diff --git a/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzer.java index 156ad1ff07e2..a755e54db177 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzer.java @@ -26,10 +26,8 @@ import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.StopFilter; import org.apache.lucene.analysis.standard.StandardFilter; import org.apache.lucene.analysis.standard.StandardTokenizer; -import org.apache.lucene.analysis.standard.std40.StandardTokenizer40; import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.analysis.util.StopwordAnalyzerBase; -import org.apache.lucene.util.Version; public class StandardHtmlStripAnalyzer extends StopwordAnalyzerBase { @@ -47,12 +45,7 @@ public class StandardHtmlStripAnalyzer extends StopwordAnalyzerBase { @Override protected TokenStreamComponents createComponents(final String fieldName) { - final Tokenizer src; - if (getVersion().onOrAfter(Version.LUCENE_4_7_0)) { - src = new StandardTokenizer(); - } else { - src = new StandardTokenizer40(); - } + final Tokenizer src = new StandardTokenizer(); TokenStream tok = new StandardFilter(src); tok = new LowerCaseFilter(tok); if (!stopwords.isEmpty()) { diff --git a/core/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java index d0702bdbc4b0..3f142a1ab43a 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java @@ -22,8 +22,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.analysis.standard.StandardTokenizer; -import org.apache.lucene.analysis.standard.std40.StandardTokenizer40; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -41,14 +39,8 @@ public class StandardTokenizerFactory extends AbstractTokenizerFactory { @Override public Tokenizer create() { - if (version.onOrAfter(Version.LUCENE_4_7_0)) { - StandardTokenizer tokenizer = new StandardTokenizer(); - tokenizer.setMaxTokenLength(maxTokenLength); - return tokenizer; - } else { - StandardTokenizer40 tokenizer = new StandardTokenizer40(); - tokenizer.setMaxTokenLength(maxTokenLength); - return tokenizer; - } + StandardTokenizer tokenizer = new StandardTokenizer(); + tokenizer.setMaxTokenLength(maxTokenLength); + return tokenizer; } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/StopTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/StopTokenFilterFactory.java index 6ab0c3fc9c80..322fcea452f7 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/StopTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/StopTokenFilterFactory.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.core.Lucene43StopFilter; import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.StopFilter; import org.apache.lucene.analysis.util.CharArraySet; @@ -42,7 +41,6 @@ public class StopTokenFilterFactory extends AbstractTokenFilterFactory { private final boolean ignoreCase; - private final boolean enablePositionIncrements; private final boolean removeTrailing; public StopTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { @@ -50,21 +48,15 @@ public class StopTokenFilterFactory extends AbstractTokenFilterFactory { this.ignoreCase = settings.getAsBoolean("ignore_case", false); this.removeTrailing = settings.getAsBoolean("remove_trailing", true); this.stopWords = Analysis.parseStopWords(env, settings, StopAnalyzer.ENGLISH_STOP_WORDS_SET, ignoreCase); - if (version.onOrAfter(Version.LUCENE_4_4) && settings.get("enable_position_increments") != null) { - throw new IllegalArgumentException("enable_position_increments is not supported anymore as of Lucene 4.4 as it can create broken token streams." - + " Please fix your analysis chain or use an older compatibility version (<= 4.3)."); + if (settings.get("enable_position_increments") != null) { + throw new IllegalArgumentException("enable_position_increments is not supported anymore. Please fix your analysis chain"); } - this.enablePositionIncrements = settings.getAsBoolean("enable_position_increments", true); } @Override public TokenStream create(TokenStream tokenStream) { if (removeTrailing) { - if (version.onOrAfter(Version.LUCENE_4_4)) { - return new StopFilter(tokenStream, stopWords); - } else { - return new Lucene43StopFilter(enablePositionIncrements, tokenStream, stopWords); - } + return new StopFilter(tokenStream, stopWords); } else { return new SuggestStopFilter(tokenStream, stopWords); } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/TrimTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/TrimTokenFilterFactory.java index a80c36b5a3ec..c77467b2b41c 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/TrimTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/TrimTokenFilterFactory.java @@ -20,9 +20,7 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.miscellaneous.Lucene43TrimFilter; import org.apache.lucene.analysis.miscellaneous.TrimFilter; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -32,26 +30,17 @@ import org.elasticsearch.index.IndexSettings; */ public class TrimTokenFilterFactory extends AbstractTokenFilterFactory { - private final boolean updateOffsets; private static final String UPDATE_OFFSETS_KEY = "update_offsets"; public TrimTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); - if (version.onOrAfter(Version.LUCENE_4_4_0) && settings.get(UPDATE_OFFSETS_KEY) != null) { - throw new IllegalArgumentException(UPDATE_OFFSETS_KEY + " is not supported anymore. Please fix your analysis chain or use" - + " an older compatibility version (<=4.3) but beware that it might cause highlighting bugs."); + if (settings.get(UPDATE_OFFSETS_KEY) != null) { + throw new IllegalArgumentException(UPDATE_OFFSETS_KEY + " is not supported anymore. Please fix your analysis chain"); } - this.updateOffsets = settings.getAsBoolean("update_offsets", false); } @Override public TokenStream create(TokenStream tokenStream) { - if (version.onOrAfter(Version.LUCENE_4_4_0)) { - return new TrimFilter(tokenStream); - } else { - @SuppressWarnings("deprecation") - final TokenStream filter = new Lucene43TrimFilter(tokenStream, updateOffsets); - return filter; - } + return new TrimFilter(tokenStream); } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/UAX29URLEmailTokenizerFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/UAX29URLEmailTokenizerFactory.java index 0668409fa074..3e75d214bd3c 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/UAX29URLEmailTokenizerFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/UAX29URLEmailTokenizerFactory.java @@ -22,8 +22,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer; -import org.apache.lucene.analysis.standard.std40.UAX29URLEmailTokenizer40; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -42,14 +40,8 @@ public class UAX29URLEmailTokenizerFactory extends AbstractTokenizerFactory { @Override public Tokenizer create() { - if (version.onOrAfter(Version.LUCENE_4_7)) { - UAX29URLEmailTokenizer tokenizer = new UAX29URLEmailTokenizer(); - tokenizer.setMaxTokenLength(maxTokenLength); - return tokenizer; - } else { - UAX29URLEmailTokenizer40 tokenizer = new UAX29URLEmailTokenizer40(); - tokenizer.setMaxTokenLength(maxTokenLength); - return tokenizer; - } + UAX29URLEmailTokenizer tokenizer = new UAX29URLEmailTokenizer(); + tokenizer.setMaxTokenLength(maxTokenLength); + return tokenizer; } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java index 1d5a95631303..118d7f84a111 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java @@ -20,11 +20,9 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.miscellaneous.Lucene47WordDelimiterFilter; import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter; import org.apache.lucene.analysis.miscellaneous.WordDelimiterIterator; import org.apache.lucene.analysis.util.CharArraySet; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -96,17 +94,10 @@ public class WordDelimiterTokenFilterFactory extends AbstractTokenFilterFactory @Override public TokenStream create(TokenStream tokenStream) { - if (version.onOrAfter(Version.LUCENE_4_8)) { - return new WordDelimiterFilter(tokenStream, + return new WordDelimiterFilter(tokenStream, charTypeTable, flags, protoWords); - } else { - return new Lucene47WordDelimiterFilter(tokenStream, - charTypeTable, - flags, - protoWords); - } } public int getFlag(int flag, Settings settings, String key, boolean defaultValue) { diff --git a/core/src/main/java/org/elasticsearch/index/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java index 8d65e008f254..fc9719d36b12 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java @@ -21,8 +21,6 @@ package org.elasticsearch.index.analysis.compound; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.compound.DictionaryCompoundWordTokenFilter; -import org.apache.lucene.analysis.compound.Lucene43DictionaryCompoundWordTokenFilter; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -41,12 +39,7 @@ public class DictionaryCompoundWordTokenFilterFactory extends AbstractCompoundWo @Override public TokenStream create(TokenStream tokenStream) { - if (version.onOrAfter(Version.LUCENE_4_4_0)) { - return new DictionaryCompoundWordTokenFilter(tokenStream, wordList, minWordSize, - minSubwordSize, maxSubwordSize, onlyLongestMatch); - } else { - return new Lucene43DictionaryCompoundWordTokenFilter(tokenStream, wordList, minWordSize, - minSubwordSize, maxSubwordSize, onlyLongestMatch); - } + return new DictionaryCompoundWordTokenFilter(tokenStream, wordList, minWordSize, + minSubwordSize, maxSubwordSize, onlyLongestMatch); } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java index 42a29784acc9..152d4395ef3d 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java @@ -21,9 +21,7 @@ package org.elasticsearch.index.analysis.compound; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.compound.HyphenationCompoundWordTokenFilter; -import org.apache.lucene.analysis.compound.Lucene43HyphenationCompoundWordTokenFilter; import org.apache.lucene.analysis.compound.hyphenation.HyphenationTree; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -60,12 +58,7 @@ public class HyphenationCompoundWordTokenFilterFactory extends AbstractCompoundW @Override public TokenStream create(TokenStream tokenStream) { - if (version.onOrAfter(Version.LUCENE_4_4_0)) { - return new HyphenationCompoundWordTokenFilter(tokenStream, hyphenationTree, wordList, minWordSize, - minSubwordSize, maxSubwordSize, onlyLongestMatch); - } else { - return new Lucene43HyphenationCompoundWordTokenFilter(tokenStream, hyphenationTree, wordList, minWordSize, - minSubwordSize, maxSubwordSize, onlyLongestMatch); - } + return new HyphenationCompoundWordTokenFilter(tokenStream, hyphenationTree, wordList, minWordSize, + minSubwordSize, maxSubwordSize, onlyLongestMatch); } } diff --git a/core/src/main/java/org/elasticsearch/index/codec/CodecService.java b/core/src/main/java/org/elasticsearch/index/codec/CodecService.java index 432f81da8a97..3edc509b7eb7 100644 --- a/core/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/core/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.codec; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode; import org.apache.lucene.codecs.lucene54.Lucene54Codec; +import org.apache.lucene.codecs.lucene60.Lucene60Codec; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.logging.ESLogger; @@ -47,8 +48,8 @@ public class CodecService { public CodecService(@Nullable MapperService mapperService, ESLogger logger) { final MapBuilder codecs = MapBuilder.newMapBuilder(); if (mapperService == null) { - codecs.put(DEFAULT_CODEC, new Lucene54Codec()); - codecs.put(BEST_COMPRESSION_CODEC, new Lucene54Codec(Mode.BEST_COMPRESSION)); + codecs.put(DEFAULT_CODEC, new Lucene60Codec()); + codecs.put(BEST_COMPRESSION_CODEC, new Lucene60Codec(Mode.BEST_COMPRESSION)); } else { codecs.put(DEFAULT_CODEC, new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger)); diff --git a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java index 7663a322be67..a4977baa1f25 100644 --- a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -22,7 +22,7 @@ package org.elasticsearch.index.codec; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; -import org.apache.lucene.codecs.lucene54.Lucene54Codec; +import org.apache.lucene.codecs.lucene60.Lucene60Codec; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.index.mapper.MappedFieldType; @@ -38,7 +38,7 @@ import org.elasticsearch.index.mapper.core.CompletionFieldMapper; * configured for a specific field the default postings format is used. */ // LUCENE UPGRADE: make sure to move to a new codec depending on the lucene version -public class PerFieldMappingPostingFormatCodec extends Lucene54Codec { +public class PerFieldMappingPostingFormatCodec extends Lucene60Codec { private final ESLogger logger; private final MapperService mapperService; diff --git a/core/src/main/java/org/elasticsearch/index/engine/DeleteVersionValue.java b/core/src/main/java/org/elasticsearch/index/engine/DeleteVersionValue.java index 5a7f481eaadb..a2900f649efc 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/DeleteVersionValue.java +++ b/core/src/main/java/org/elasticsearch/index/engine/DeleteVersionValue.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.engine; -import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.index.translog.Translog; /** Holds a deleted version, which just adds a timestamp to {@link VersionValue} so we know when we can expire the deletion. */ @@ -44,6 +43,6 @@ class DeleteVersionValue extends VersionValue { @Override public long ramBytesUsed() { - return super.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_LONG; + return super.ramBytesUsed() + Long.BYTES; } } diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index c412ce3b85f5..01f02025aeb2 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -275,7 +275,7 @@ public class InternalEngine extends Engine { SearcherManager searcherManager = null; try { try { - final DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter, true), shardId); + final DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter), shardId); searcherManager = new SearcherManager(directoryReader, searcherFactory); lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager, store); success = true; diff --git a/core/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java b/core/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java index 747e955b1796..f962d31bf8bb 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java +++ b/core/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java @@ -64,7 +64,7 @@ class LiveVersionMap implements ReferenceManager.RefreshListener, Accountable { * * NUM_BYTES_OBJECT_HEADER + 2*NUM_BYTES_INT + NUM_BYTES_OBJECT_REF + NUM_BYTES_ARRAY_HEADER [ + bytes.length] */ private static final int BASE_BYTES_PER_BYTESREF = RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + - 2*RamUsageEstimator.NUM_BYTES_INT + + 2*Integer.BYTES + RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER; @@ -76,7 +76,7 @@ class LiveVersionMap implements ReferenceManager.RefreshListener, Accountable { * CHM's pointer to CHM.Entry, double for approx load factor: * + 2*NUM_BYTES_OBJECT_REF */ private static final int BASE_BYTES_PER_CHM_ENTRY = RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + - RamUsageEstimator.NUM_BYTES_INT + + Integer.BYTES + 5*RamUsageEstimator.NUM_BYTES_OBJECT_REF; /** Tracks bytes used by current map, i.e. what is freed on refresh. For deletes, which are also added to tombstones, we only account diff --git a/core/src/main/java/org/elasticsearch/index/engine/VersionValue.java b/core/src/main/java/org/elasticsearch/index/engine/VersionValue.java index 950dbdbae656..6b780c2a6a34 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/VersionValue.java +++ b/core/src/main/java/org/elasticsearch/index/engine/VersionValue.java @@ -54,7 +54,7 @@ class VersionValue implements Accountable { @Override public long ramBytesUsed() { - return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_LONG + RamUsageEstimator.NUM_BYTES_OBJECT_REF + translogLocation.ramBytesUsed(); + return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + Long.BYTES + RamUsageEstimator.NUM_BYTES_OBJECT_REF + translogLocation.ramBytesUsed(); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/SortedBinaryDocValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/SortedBinaryDocValues.java index 58a7c9758b7a..b3c51141e20f 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/SortedBinaryDocValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/SortedBinaryDocValues.java @@ -23,7 +23,7 @@ import org.apache.lucene.util.BytesRef; /** * A list of per-document binary values, sorted - * according to {@link BytesRef#getUTF8SortedAsUnicodeComparator()}. + * according to {@link BytesRef#compareTo(BytesRef)}. * There might be dups however. */ public abstract class SortedBinaryDocValues { diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java index bdc121b134b0..2b69afa5f825 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java @@ -30,8 +30,8 @@ import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.FixedBitSet; +import org.apache.lucene.util.LegacyNumericUtils; import org.apache.lucene.util.LongsRef; -import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.packed.GrowableWriter; import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PagedGrowableWriter; @@ -459,7 +459,7 @@ public final class OrdinalsBuilder implements Closeable { @Override protected AcceptStatus accept(BytesRef term) throws IOException { // we stop accepting terms once we moved across the prefix codec terms - redundant values! - return NumericUtils.getPrefixCodedLongShift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END; + return LegacyNumericUtils.getPrefixCodedLongShift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END; } }; } @@ -475,7 +475,7 @@ public final class OrdinalsBuilder implements Closeable { @Override protected AcceptStatus accept(BytesRef term) throws IOException { // we stop accepting terms once we moved across the prefix codec terms - redundant values! - return NumericUtils.getPrefixCodedIntShift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END; + return LegacyNumericUtils.getPrefixCodedIntShift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END; } }; } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java index 022e3ad09239..2c41dece3dec 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java @@ -24,7 +24,7 @@ import org.apache.lucene.spatial.util.GeoEncodingUtils; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.CharsRefBuilder; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.geo.GeoPoint; @@ -62,7 +62,7 @@ abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldDataString[] of field values diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java index 29081c6c913a..86818a3999ef 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java @@ -23,11 +23,11 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; @@ -116,7 +116,7 @@ public class ByteFieldMapper extends NumberFieldMapper { static final class ByteFieldType extends NumberFieldType { public ByteFieldType() { - super(NumericType.INT); + super(LegacyNumericType.INT); } protected ByteFieldType(ByteFieldType ref) { @@ -155,13 +155,13 @@ public class ByteFieldMapper extends NumberFieldMapper { @Override public BytesRef indexedValueForSearch(Object value) { BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + LegacyNumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match return bytesRef.get(); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), lowerTerm == null ? null : (int)parseValue(lowerTerm), upperTerm == null ? null : (int)parseValue(upperTerm), includeLower, includeUpper); @@ -171,7 +171,7 @@ public class ByteFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { byte iValue = parseValue(value); byte iSim = fuzziness.asByte(); - return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -179,8 +179,8 @@ public class ByteFieldMapper extends NumberFieldMapper { @Override public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinInt(terms); - long maxValue = NumericUtils.getMaxInt(terms); + long minValue = LegacyNumericUtils.getMinInt(terms); + long maxValue = LegacyNumericUtils.getMaxInt(terms); return new FieldStats.Long( maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue ); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java index be83f0175c22..048c58297cba 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java @@ -23,12 +23,11 @@ import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Terms; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.NumericUtils; -import org.apache.lucene.util.ToStringUtils; +import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; @@ -243,7 +242,6 @@ public class DateFieldMapper extends NumberFieldMapper { .append(" TO ") .append((upperTerm == null) ? "*" : upperTerm.toString()) .append(includeUpper ? ']' : '}') - .append(ToStringUtils.boost(getBoost())) .toString(); } } @@ -253,7 +251,7 @@ public class DateFieldMapper extends NumberFieldMapper { protected DateMathParser dateMathParser = new DateMathParser(dateTimeFormatter); public DateFieldType() { - super(NumericType.LONG); + super(LegacyNumericType.LONG); setFieldDataType(new FieldDataType("long")); } @@ -360,7 +358,7 @@ public class DateFieldMapper extends NumberFieldMapper { @Override public BytesRef indexedValueForSearch(Object value) { BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + LegacyNumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match return bytesRef.get(); } @@ -392,7 +390,7 @@ public class DateFieldMapper extends NumberFieldMapper { // not a time format iSim = fuzziness.asLong(); } - return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -400,8 +398,8 @@ public class DateFieldMapper extends NumberFieldMapper { @Override public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinLong(terms); - long maxValue = NumericUtils.getMaxLong(terms); + long minValue = LegacyNumericUtils.getMinLong(terms); + long maxValue = LegacyNumericUtils.getMaxLong(terms); return new FieldStats.Date( maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue, dateTimeFormatter() ); @@ -412,7 +410,7 @@ public class DateFieldMapper extends NumberFieldMapper { } private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) { - return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseToMilliseconds(lowerTerm, !includeLower, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), upperTerm == null ? null : parseToMilliseconds(upperTerm, includeUpper, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), includeLower, includeUpper); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java index 7d33d09cd991..e7550dc1f925 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java @@ -24,10 +24,11 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.LegacyNumericUtils; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; @@ -49,7 +50,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.apache.lucene.util.NumericUtils.doubleToSortableLong; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeDoubleValue; import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; @@ -118,7 +118,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { public static final class DoubleFieldType extends NumberFieldType { public DoubleFieldType() { - super(NumericType.DOUBLE); + super(LegacyNumericType.DOUBLE); } protected DoubleFieldType(DoubleFieldType ref) { @@ -158,13 +158,13 @@ public class DoubleFieldMapper extends NumberFieldMapper { public BytesRef indexedValueForSearch(Object value) { long longValue = NumericUtils.doubleToSortableLong(parseDoubleValue(value)); BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.longToPrefixCoded(longValue, 0, bytesRef); // 0 because of exact match + LegacyNumericUtils.longToPrefixCoded(longValue, 0, bytesRef); // 0 because of exact match return bytesRef.get(); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseDoubleValue(lowerTerm), upperTerm == null ? null : parseDoubleValue(upperTerm), includeLower, includeUpper); @@ -174,7 +174,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { double iValue = parseDoubleValue(value); double iSim = fuzziness.asDouble(); - return NumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -182,8 +182,8 @@ public class DoubleFieldMapper extends NumberFieldMapper { @Override public FieldStats stats(Terms terms, int maxDoc) throws IOException { - double minValue = NumericUtils.sortableLongToDouble(NumericUtils.getMinLong(terms)); - double maxValue = NumericUtils.sortableLongToDouble(NumericUtils.getMaxLong(terms)); + double minValue = NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMinLong(terms)); + double maxValue = NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMaxLong(terms)); return new FieldStats.Double( maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue ); @@ -284,7 +284,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { fields.add(field); } if (fieldType().hasDocValues()) { - addDocValue(context, fields, doubleToSortableLong(value)); + addDocValue(context, fields, NumericUtils.doubleToSortableLong(value)); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java index 85c5b619bf18..93cf3a7cfaf7 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java @@ -24,10 +24,11 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.LegacyNumericUtils; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; @@ -50,7 +51,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.apache.lucene.util.NumericUtils.floatToSortableInt; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeFloatValue; import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; @@ -119,7 +119,7 @@ public class FloatFieldMapper extends NumberFieldMapper { static final class FloatFieldType extends NumberFieldType { public FloatFieldType() { - super(NumericType.FLOAT); + super(LegacyNumericType.FLOAT); } protected FloatFieldType(FloatFieldType ref) { @@ -159,13 +159,13 @@ public class FloatFieldMapper extends NumberFieldMapper { public BytesRef indexedValueForSearch(Object value) { int intValue = NumericUtils.floatToSortableInt(parseValue(value)); BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.intToPrefixCoded(intValue, 0, bytesRef); // 0 because of exact match + LegacyNumericUtils.intToPrefixCoded(intValue, 0, bytesRef); // 0 because of exact match return bytesRef.get(); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newFloatRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newFloatRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseValue(lowerTerm), upperTerm == null ? null : parseValue(upperTerm), includeLower, includeUpper); @@ -175,7 +175,7 @@ public class FloatFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { float iValue = parseValue(value); final float iSim = fuzziness.asFloat(); - return NumericRangeQuery.newFloatRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newFloatRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -183,8 +183,8 @@ public class FloatFieldMapper extends NumberFieldMapper { @Override public FieldStats stats(Terms terms, int maxDoc) throws IOException { - float minValue = NumericUtils.sortableIntToFloat(NumericUtils.getMinInt(terms)); - float maxValue = NumericUtils.sortableIntToFloat(NumericUtils.getMaxInt(terms)); + float minValue = NumericUtils.sortableIntToFloat(LegacyNumericUtils.getMinInt(terms)); + float maxValue = NumericUtils.sortableIntToFloat(LegacyNumericUtils.getMaxInt(terms)); return new FieldStats.Float( maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue ); @@ -296,7 +296,7 @@ public class FloatFieldMapper extends NumberFieldMapper { fields.add(field); } if (fieldType().hasDocValues()) { - addDocValue(context, fields, floatToSortableInt(value)); + addDocValue(context, fields, NumericUtils.floatToSortableInt(value)); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java index 7de625104159..fa7191cafbfd 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java @@ -24,11 +24,11 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; @@ -124,7 +124,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { public static final class IntegerFieldType extends NumberFieldType { public IntegerFieldType() { - super(NumericType.INT); + super(LegacyNumericType.INT); } protected IntegerFieldType(IntegerFieldType ref) { @@ -164,13 +164,13 @@ public class IntegerFieldMapper extends NumberFieldMapper { @Override public BytesRef indexedValueForSearch(Object value) { BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + LegacyNumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match return bytesRef.get(); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseValue(lowerTerm), upperTerm == null ? null : parseValue(upperTerm), includeLower, includeUpper); @@ -180,7 +180,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { int iValue = parseValue(value); int iSim = fuzziness.asInt(); - return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -188,8 +188,8 @@ public class IntegerFieldMapper extends NumberFieldMapper { @Override public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinInt(terms); - long maxValue = NumericUtils.getMaxInt(terms); + long minValue = LegacyNumericUtils.getMinInt(terms); + long maxValue = LegacyNumericUtils.getMaxInt(terms); return new FieldStats.Long( maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue ); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java index 0e9592fd72ed..a1acf0ab58ab 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java @@ -24,11 +24,11 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; @@ -123,7 +123,7 @@ public class LongFieldMapper extends NumberFieldMapper { public static class LongFieldType extends NumberFieldType { public LongFieldType() { - super(NumericType.LONG); + super(LegacyNumericType.LONG); } protected LongFieldType(LongFieldType ref) { @@ -162,13 +162,13 @@ public class LongFieldMapper extends NumberFieldMapper { @Override public BytesRef indexedValueForSearch(Object value) { BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.longToPrefixCoded(parseLongValue(value), 0, bytesRef); // 0 because of exact match + LegacyNumericUtils.longToPrefixCoded(parseLongValue(value), 0, bytesRef); // 0 because of exact match return bytesRef.get(); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseLongValue(lowerTerm), upperTerm == null ? null : parseLongValue(upperTerm), includeLower, includeUpper); @@ -178,7 +178,7 @@ public class LongFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { long iValue = parseLongValue(value); final long iSim = fuzziness.asLong(); - return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -186,8 +186,8 @@ public class LongFieldMapper extends NumberFieldMapper { @Override public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinLong(terms); - long maxValue = NumericUtils.getMaxLong(terms); + long minValue = LegacyNumericUtils.getMinLong(terms); + long maxValue = LegacyNumericUtils.getMaxLong(terms); return new FieldStats.Long( maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue ); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java index 90fb20ef827e..7c2a38eaee7f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java @@ -20,7 +20,7 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.NumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; @@ -129,7 +129,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM public static abstract class NumberFieldType extends MappedFieldType { - public NumberFieldType(NumericType numericType) { + public NumberFieldType(LegacyNumericType numericType) { setTokenized(false); setOmitNorms(true); setIndexOptions(IndexOptions.DOCS); @@ -295,38 +295,38 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM // used to we can use a numeric field in a document that is then parsed twice! public abstract static class CustomNumericField extends Field { - private ThreadLocal tokenStream = new ThreadLocal() { + private ThreadLocal tokenStream = new ThreadLocal() { @Override - protected NumericTokenStream initialValue() { - return new NumericTokenStream(fieldType().numericPrecisionStep()); + protected LegacyNumericTokenStream initialValue() { + return new LegacyNumericTokenStream(fieldType().numericPrecisionStep()); } }; - private static ThreadLocal tokenStream4 = new ThreadLocal() { + private static ThreadLocal tokenStream4 = new ThreadLocal() { @Override - protected NumericTokenStream initialValue() { - return new NumericTokenStream(4); + protected LegacyNumericTokenStream initialValue() { + return new LegacyNumericTokenStream(4); } }; - private static ThreadLocal tokenStream8 = new ThreadLocal() { + private static ThreadLocal tokenStream8 = new ThreadLocal() { @Override - protected NumericTokenStream initialValue() { - return new NumericTokenStream(8); + protected LegacyNumericTokenStream initialValue() { + return new LegacyNumericTokenStream(8); } }; - private static ThreadLocal tokenStream16 = new ThreadLocal() { + private static ThreadLocal tokenStream16 = new ThreadLocal() { @Override - protected NumericTokenStream initialValue() { - return new NumericTokenStream(16); + protected LegacyNumericTokenStream initialValue() { + return new LegacyNumericTokenStream(16); } }; - private static ThreadLocal tokenStreamMax = new ThreadLocal() { + private static ThreadLocal tokenStreamMax = new ThreadLocal() { @Override - protected NumericTokenStream initialValue() { - return new NumericTokenStream(Integer.MAX_VALUE); + protected LegacyNumericTokenStream initialValue() { + return new LegacyNumericTokenStream(Integer.MAX_VALUE); } }; @@ -337,7 +337,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM } } - protected NumericTokenStream getCachedStream() { + protected LegacyNumericTokenStream getCachedStream() { if (fieldType().numericPrecisionStep() == 4) { return tokenStream4.get(); } else if (fieldType().numericPrecisionStep() == 8) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java index 027f0b1b40b1..56b1e9a78f2a 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java @@ -24,11 +24,11 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; @@ -121,7 +121,7 @@ public class ShortFieldMapper extends NumberFieldMapper { static final class ShortFieldType extends NumberFieldType { public ShortFieldType() { - super(NumericType.INT); + super(LegacyNumericType.INT); } protected ShortFieldType(ShortFieldType ref) { @@ -160,13 +160,13 @@ public class ShortFieldMapper extends NumberFieldMapper { @Override public BytesRef indexedValueForSearch(Object value) { BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + LegacyNumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match return bytesRef.get(); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), lowerTerm == null ? null : (int)parseValue(lowerTerm), upperTerm == null ? null : (int)parseValue(upperTerm), includeLower, includeUpper); @@ -176,7 +176,7 @@ public class ShortFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { short iValue = parseValue(value); short iSim = fuzziness.asShort(); - return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -184,8 +184,8 @@ public class ShortFieldMapper extends NumberFieldMapper { @Override public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinInt(terms); - long maxValue = NumericUtils.getMaxInt(terms); + long minValue = LegacyNumericUtils.getMinInt(terms); + long maxValue = LegacyNumericUtils.getMaxInt(terms); return new FieldStats.Long( maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue ); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java index f881d206f0c6..1a1c1592d7e0 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java @@ -21,7 +21,7 @@ package org.elasticsearch.index.mapper.geo; import org.apache.lucene.document.Field; import org.apache.lucene.spatial.util.GeoHashUtils; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; @@ -483,7 +483,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr if (includeDefaults || fieldType().isLatLonEnabled() != GeoPointFieldMapper.Defaults.ENABLE_LATLON) { builder.field("lat_lon", fieldType().isLatLonEnabled()); } - if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) { + if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != LegacyNumericUtils.PRECISION_STEP_DEFAULT)) { builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep()); } if (includeDefaults || fieldType().isGeoHashEnabled() != Defaults.ENABLE_GEOHASH) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java index 0d84cf218121..75c082dd4398 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java @@ -84,7 +84,7 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { fieldType.setTokenized(false); if (context.indexCreatedVersion().before(Version.V_2_3_0)) { fieldType.setNumericPrecisionStep(GeoPointField.PRECISION_STEP); - fieldType.setNumericType(FieldType.NumericType.LONG); + fieldType.setNumericType(FieldType.LegacyNumericType.LONG); } setupFieldType(context); return new GeoPointFieldMapper(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, @@ -95,7 +95,7 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { public GeoPointFieldMapper build(BuilderContext context) { if (context.indexCreatedVersion().before(Version.V_2_3_0)) { fieldType.setNumericPrecisionStep(GeoPointField.PRECISION_STEP); - fieldType.setNumericType(FieldType.NumericType.LONG); + fieldType.setNumericType(FieldType.LegacyNumericType.LONG); } return super.build(context); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java index e90fdae0c47e..57778fa8d25f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.index.mapper.geo; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.jts.JtsGeometry; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.jts.JtsGeometry; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; @@ -58,7 +58,7 @@ import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenien /** - * FieldMapper for indexing {@link com.spatial4j.core.shape.Shape}s. + * FieldMapper for indexing {@link org.locationtech.spatial4j.shape.Shape}s. *

      * Currently Shapes can only be indexed and can only be queried using * {@link org.elasticsearch.index.query.GeoShapeQueryParser}, consequently diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java index 18929bfd833a..9a4cf70782bd 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java @@ -19,14 +19,14 @@ package org.elasticsearch.index.mapper.ip; -import org.apache.lucene.analysis.NumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; @@ -206,7 +206,7 @@ public class IpFieldMapper extends NumberFieldMapper { @Override public BytesRef indexedValueForSearch(Object value) { BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + LegacyNumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match return bytesRef.get(); } @@ -242,7 +242,7 @@ public class IpFieldMapper extends NumberFieldMapper { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseValue(lowerTerm), upperTerm == null ? null : parseValue(upperTerm), includeLower, includeUpper); @@ -257,7 +257,7 @@ public class IpFieldMapper extends NumberFieldMapper { } catch (IllegalArgumentException e) { iSim = fuzziness.asLong(); } - return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -356,11 +356,11 @@ public class IpFieldMapper extends NumberFieldMapper { public static class NumericIpTokenizer extends NumericTokenizer { public NumericIpTokenizer(int precisionStep, char[] buffer) throws IOException { - super(new NumericTokenStream(precisionStep), buffer, null); + super(new LegacyNumericTokenStream(precisionStep), buffer, null); } @Override - protected void setValue(NumericTokenStream tokenStream, String value) { + protected void setValue(LegacyNumericTokenStream tokenStream, String value) { tokenStream.setLongValue(ipToLong(value)); } } diff --git a/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java b/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java index c8d0379d7017..524266420fb7 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java +++ b/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java @@ -69,8 +69,6 @@ public final class ElasticsearchMergePolicy extends MergePolicy { /** Return an "upgraded" view of the reader. */ static CodecReader filter(CodecReader reader) throws IOException { - // convert 0.90.x _uid payloads to _version docvalues if needed - reader = VersionFieldUpgrader.wrap(reader); // TODO: remove 0.90.x/1.x freqs/prox/payloads from _uid? // the previous code never did this, so some indexes carry around trash. return reader; diff --git a/core/src/main/java/org/elasticsearch/index/shard/VersionFieldUpgrader.java b/core/src/main/java/org/elasticsearch/index/shard/VersionFieldUpgrader.java deleted file mode 100644 index 42bd5420ac3a..000000000000 --- a/core/src/main/java/org/elasticsearch/index/shard/VersionFieldUpgrader.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.shard; - -import org.apache.lucene.codecs.DocValuesProducer; -import org.apache.lucene.index.CodecReader; -import org.apache.lucene.index.DocValuesType; -import org.apache.lucene.index.FieldInfo; -import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.FilterCodecReader; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.NumericDocValues; -import org.apache.lucene.index.PostingsEnum; -import org.apache.lucene.index.Terms; -import org.apache.lucene.index.TermsEnum; -import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.util.Bits; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.packed.GrowableWriter; -import org.apache.lucene.util.packed.PackedInts; -import org.elasticsearch.common.Numbers; -import org.elasticsearch.index.mapper.internal.UidFieldMapper; -import org.elasticsearch.index.mapper.internal.VersionFieldMapper; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; - -/** - * Converts 0.90.x _uid payloads to _version docvalues - */ -class VersionFieldUpgrader extends FilterCodecReader { - final FieldInfos infos; - - VersionFieldUpgrader(CodecReader in) { - super(in); - - // Find a free field number - int fieldNumber = 0; - for (FieldInfo fi : in.getFieldInfos()) { - fieldNumber = Math.max(fieldNumber, fi.number + 1); - } - - // TODO: lots of things can wrong here... - FieldInfo newInfo = new FieldInfo(VersionFieldMapper.NAME, // field name - fieldNumber, // field number - false, // store term vectors - false, // omit norms - false, // store payloads - IndexOptions.NONE, // index options - DocValuesType.NUMERIC, // docvalues - -1, // docvalues generation - Collections.emptyMap() // attributes - ); - newInfo.checkConsistency(); // fail merge immediately if above code is wrong - - final ArrayList fieldInfoList = new ArrayList<>(); - for (FieldInfo info : in.getFieldInfos()) { - if (!info.name.equals(VersionFieldMapper.NAME)) { - fieldInfoList.add(info); - } - } - fieldInfoList.add(newInfo); - infos = new FieldInfos(fieldInfoList.toArray(new FieldInfo[fieldInfoList.size()])); - } - - static CodecReader wrap(CodecReader reader) throws IOException { - final FieldInfos fieldInfos = reader.getFieldInfos(); - final FieldInfo versionInfo = fieldInfos.fieldInfo(VersionFieldMapper.NAME); - if (versionInfo != null && versionInfo.getDocValuesType() != DocValuesType.NONE) { - // the reader is a recent one, it has versions and they are stored - // in a numeric doc values field - return reader; - } - // The segment is an old one, look at the _uid field - final Terms terms = reader.terms(UidFieldMapper.NAME); - if (terms == null || !terms.hasPayloads()) { - // The segment doesn't have an _uid field or doesn't have payloads - // don't try to do anything clever. If any other segment has versions - // all versions of this segment will be initialized to 0 - return reader; - } - // convert _uid payloads -> _version docvalues - return new VersionFieldUpgrader(reader); - } - - @Override - public FieldInfos getFieldInfos() { - return infos; - } - - @Override - public DocValuesProducer getDocValuesReader() { - DocValuesProducer producer = in.getDocValuesReader(); - // TODO: move this nullness stuff out - if (producer == null) { - producer = FilterDocValuesProducer.EMPTY; - } - return new UninvertedVersions(producer, this); - } - - static class UninvertedVersions extends FilterDocValuesProducer { - final CodecReader reader; - - UninvertedVersions(DocValuesProducer in, CodecReader reader) { - super(in); - this.reader = reader; - } - - @Override - public NumericDocValues getNumeric(FieldInfo field) throws IOException { - if (VersionFieldMapper.NAME.equals(field.name)) { - // uninvert into a packed ints and expose as docvalues - final Terms terms = reader.terms(UidFieldMapper.NAME); - final TermsEnum uids = terms.iterator(); - final GrowableWriter versions = new GrowableWriter(2, reader.maxDoc(), PackedInts.COMPACT); - PostingsEnum dpe = null; - for (BytesRef uid = uids.next(); uid != null; uid = uids.next()) { - dpe = uids.postings(dpe, PostingsEnum.PAYLOADS); - assert terms.hasPayloads() : "field has payloads"; - final Bits liveDocs = reader.getLiveDocs(); - for (int doc = dpe.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = dpe.nextDoc()) { - if (liveDocs != null && liveDocs.get(doc) == false) { - continue; - } - dpe.nextPosition(); - final BytesRef payload = dpe.getPayload(); - if (payload != null && payload.length == 8) { - final long version = Numbers.bytesToLong(payload); - versions.set(doc, version); - break; - } - } - } - return versions; - } else { - return in.getNumeric(field); - } - } - - @Override - public Bits getDocsWithField(FieldInfo field) throws IOException { - if (VersionFieldMapper.NAME.equals(field.name)) { - return new Bits.MatchAllBits(reader.maxDoc()); - } else { - return in.getDocsWithField(field); - } - } - - @Override - public DocValuesProducer getMergeInstance() throws IOException { - return new UninvertedVersions(in.getMergeInstance(), reader); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java b/core/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java index cfd5dc8f0662..9f712c77e705 100644 --- a/core/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java +++ b/core/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java @@ -36,7 +36,7 @@ import java.util.Objects; */ public class StoreFileMetaData implements Writeable { - public static final Version FIRST_LUCENE_CHECKSUM_VERSION = Version.LUCENE_4_8_0; + public static final Version FIRST_LUCENE_CHECKSUM_VERSION = Version.LUCENE_5_0_0; private final String name; diff --git a/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java b/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java index cd0f94567f3f..54ba8638eb2e 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java @@ -22,7 +22,6 @@ import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.store.DataInput; import org.apache.lucene.store.DataOutput; import org.apache.lucene.store.InputStreamDataInput; -import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.Channels; import java.io.IOException; @@ -36,9 +35,9 @@ import java.nio.file.Path; */ class Checkpoint { - static final int BUFFER_SIZE = RamUsageEstimator.NUM_BYTES_INT // ops - + RamUsageEstimator.NUM_BYTES_LONG // offset - + RamUsageEstimator.NUM_BYTES_LONG;// generation + static final int BUFFER_SIZE = Integer.BYTES // ops + + Long.BYTES // offset + + Long.BYTES;// generation final long offset; final int numOps; final long generation; diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index 5a4438f426dd..31b8db031415 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -418,10 +418,10 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC try { final BufferedChecksumStreamOutput checksumStreamOutput = new BufferedChecksumStreamOutput(out); final long start = out.position(); - out.skip(RamUsageEstimator.NUM_BYTES_INT); + out.skip(Integer.BYTES); writeOperationNoSize(checksumStreamOutput, operation); final long end = out.position(); - final int operationSize = (int) (end - RamUsageEstimator.NUM_BYTES_INT - start); + final int operationSize = (int) (end - Integer.BYTES - start); out.seek(start); out.writeInt(operationSize); out.seek(end); @@ -636,7 +636,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC @Override public long ramBytesUsed() { - return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2 * RamUsageEstimator.NUM_BYTES_LONG + RamUsageEstimator.NUM_BYTES_INT; + return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2 * Long.BYTES + Integer.BYTES; } @Override @@ -1144,10 +1144,10 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC for (Operation op : toWrite) { out.reset(); final long start = out.position(); - out.skip(RamUsageEstimator.NUM_BYTES_INT); + out.skip(Integer.BYTES); writeOperationNoSize(checksumStreamOutput, op); long end = out.position(); - int operationSize = (int) (out.position() - RamUsageEstimator.NUM_BYTES_INT - start); + int operationSize = (int) (out.position() - Integer.BYTES - start); out.seek(start); out.writeInt(operationSize); out.seek(end); diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java index ecc3822361c2..fcb3daea796f 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java @@ -26,7 +26,6 @@ import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.InputStreamDataInput; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.Channels; import org.elasticsearch.common.io.stream.InputStreamStreamInput; @@ -116,7 +115,7 @@ public class TranslogReader extends BaseTranslogReader implements Closeable { if (uuidBytes.bytesEquals(ref) == false) { throw new TranslogCorruptedException("expected shard UUID [" + uuidBytes + "] but got: [" + ref + "] this translog file belongs to a different translog. path:" + path); } - return new TranslogReader(checkpoint.generation, channel, path, ref.length + CodecUtil.headerLength(TranslogWriter.TRANSLOG_CODEC) + RamUsageEstimator.NUM_BYTES_INT, checkpoint.offset, checkpoint.numOps); + return new TranslogReader(checkpoint.generation, channel, path, ref.length + CodecUtil.headerLength(TranslogWriter.TRANSLOG_CODEC) + Integer.BYTES, checkpoint.offset, checkpoint.numOps); default: throw new TranslogCorruptedException("No known translog stream version: " + version + " path:" + path); } diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index a1fc708ddaf3..e215669761c6 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -24,7 +24,6 @@ import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.OutputStreamDataOutput; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; -import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Channels; import org.elasticsearch.common.unit.ByteSizeValue; @@ -76,7 +75,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { } private static int getHeaderLength(int uuidLength) { - return CodecUtil.headerLength(TRANSLOG_CODEC) + uuidLength + RamUsageEstimator.NUM_BYTES_INT; + return CodecUtil.headerLength(TRANSLOG_CODEC) + uuidLength + Integer.BYTES; } public static TranslogWriter create(ShardId shardId, String translogUUID, long fileGeneration, Path file, ChannelFactory channelFactory, ByteSizeValue bufferSize) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java b/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java index 575153c8ada6..32b5f55b3692 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java @@ -228,7 +228,7 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo @Override public long ramBytesUsed() { - return RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_LONG + value.length(); + return RamUsageEstimator.NUM_BYTES_OBJECT_REF + Long.BYTES + value.length(); } @Override diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index f99b39ef6201..5d2fb761842b 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -19,6 +19,8 @@ package org.elasticsearch.indices.analysis; import org.apache.lucene.analysis.hunspell.Dictionary; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.component.AbstractComponent; @@ -183,7 +185,9 @@ public class HunspellService extends AbstractComponent { affixStream = Files.newInputStream(affixFiles[0]); - return new Dictionary(affixStream, dicStreams, ignoreCase); + try (Directory tmp = new SimpleFSDirectory(env.tmpFile())) { + return new Dictionary(tmp, "hunspell", affixStream, dicStreams, ignoreCase); + } } catch (Exception e) { logger.error("Could not load hunspell dictionary [{}]", e, locale); diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolatorQuery.java b/core/src/main/java/org/elasticsearch/percolator/PercolatorQuery.java index b3208b4133c3..98be7d308af8 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolatorQuery.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolatorQuery.java @@ -115,10 +115,6 @@ final class PercolatorQuery extends Query { @Override public Query rewrite(IndexReader reader) throws IOException { - if (getBoost() != 1f) { - return super.rewrite(reader); - } - Query rewritten = percolatorQueriesQuery.rewrite(reader); if (rewritten != percolatorQueriesQuery) { return new PercolatorQuery(rewritten, percolatorIndexSearcher, percolatorQueries); diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java index 6d8400f648c8..6cea34cf6798 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java @@ -167,7 +167,7 @@ public class ChecksumBlobStoreFormat extends BlobStoreForm BytesReference bytes = write(obj); try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) { final String resourceDesc = "ChecksumBlobStoreFormat.writeBlob(blob=\"" + blobName + "\")"; - try (OutputStreamIndexOutput indexOutput = new OutputStreamIndexOutput(resourceDesc, byteArrayOutputStream, BUFFER_SIZE)) { + try (OutputStreamIndexOutput indexOutput = new OutputStreamIndexOutput(resourceDesc, blobName, byteArrayOutputStream, BUFFER_SIZE)) { CodecUtil.writeHeader(indexOutput, codec, VERSION); try (OutputStream indexOutputOutputStream = new IndexOutputOutputStream(indexOutput) { @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTerms.java index b0479475d869..1821124473f5 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTerms.java @@ -33,6 +33,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.ArrayList; +import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -108,7 +109,7 @@ public class SignificantStringTerms extends InternalSignificantTerms @Override int compareTerm(Terms.Bucket other) { - return BytesRef.getUTF8SortedAsUnicodeComparator().compare(termBytes, ((Bucket) other).termBytes); + return termBytes.compareTo(((Bucket) other).termBytes); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java index eee9d4cbf90f..41dd0bb441ec 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java @@ -518,13 +518,13 @@ public class IncludeExclude implements Writeable, ToXContent { if (includeValues != null) { for (BytesRef val : includeValues) { double dval=Double.parseDouble(val.utf8ToString()); - result.addAccept( NumericUtils.doubleToSortableLong(dval)); + result.addAccept(NumericUtils.doubleToSortableLong(dval)); } } if (excludeValues != null) { for (BytesRef val : excludeValues) { double dval=Double.parseDouble(val.utf8ToString()); - result.addReject( NumericUtils.doubleToSortableLong(dval)); + result.addReject(NumericUtils.doubleToSortableLong(dval)); } } return result; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java index 2e8ce4563ce7..568ecdbec590 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.metrics.cardinality; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongBitSet; -import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.packed.PackedInts; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -67,7 +66,7 @@ public final class HyperLogLogPlusPlus implements Releasable { */ public static int precisionFromThreshold(long count) { final long hashTableEntries = (long) Math.ceil(count / MAX_LOAD_FACTOR); - int precision = PackedInts.bitsRequired(hashTableEntries * RamUsageEstimator.NUM_BYTES_INT); + int precision = PackedInts.bitsRequired(hashTableEntries * Integer.BYTES); precision = Math.max(precision, MIN_PRECISION); precision = Math.min(precision, MAX_PRECISION); return precision; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java b/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java index 8ad24b5cb190..7a15f67dbd6f 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java @@ -78,12 +78,12 @@ public final class CustomQueryScorer extends QueryScorer { Map terms) throws IOException { if (query instanceof FunctionScoreQuery) { query = ((FunctionScoreQuery) query).getSubQuery(); - extract(query, query.getBoost(), terms); + extract(query, 1F, terms); } else if (query instanceof FiltersFunctionScoreQuery) { query = ((FiltersFunctionScoreQuery) query).getSubQuery(); - extract(query, query.getBoost(), terms); + extract(query, 1F, terms); } else if (terms.isEmpty()) { - extractWeightedTerms(terms, query, query.getBoost()); + extractWeightedTerms(terms, query, 1F); } } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java index b3175e6c22a0..b9ae34b60b0b 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java @@ -89,23 +89,12 @@ public final class FragmentBuilderHelper { } if (analyzer instanceof CustomAnalyzer) { final CustomAnalyzer a = (CustomAnalyzer) analyzer; - if (a.tokenizerFactory() instanceof EdgeNGramTokenizerFactory - || (a.tokenizerFactory() instanceof NGramTokenizerFactory - && !((NGramTokenizerFactory)a.tokenizerFactory()).version().onOrAfter(Version.LUCENE_4_2))) { - // ngram tokenizer is broken before 4.2 - return true; - } TokenFilterFactory[] tokenFilters = a.tokenFilters(); for (TokenFilterFactory tokenFilterFactory : tokenFilters) { if (tokenFilterFactory instanceof WordDelimiterTokenFilterFactory || tokenFilterFactory instanceof EdgeNGramTokenFilterFactory) { return true; } - if (tokenFilterFactory instanceof NGramTokenFilterFactory - && !((NGramTokenFilterFactory)tokenFilterFactory).version().onOrAfter(Version.LUCENE_4_2)) { - // ngram token filter is broken before 4.2 - return true; - } } } return false; diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java index 6c01a27442e8..8c3c19343b40 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.internal; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Collector; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; @@ -233,9 +232,6 @@ public class DefaultSearchContext extends SearchContext { Query result; if (Queries.isConstantMatchAllQuery(query())) { result = new ConstantScoreQuery(searchFilter); - if (query().getBoost() != AbstractQueryBuilder.DEFAULT_BOOST) { - result = new BoostQuery(result, query().getBoost()); - } } else { result = new BooleanQuery.Builder() .add(query, Occur.MUST) diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 608b33db0fee..f953123c7a41 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.0.jar}" { //// Very special jar permissions: //// These are dangerous permissions that we don't want to grant to everything. -grant codeBase "${codebase.lucene-core-5.5.0.jar}" { +grant codeBase "${codebase.lucene-core-6.0.0-snapshot-bea235f.jar}" { // needed to allow MMapDirectory's "unmap hack" (die unmap hack, die) permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index 856cd50e2a92..fafa57118c25 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -31,7 +31,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" { permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; -grant codeBase "${codebase.lucene-test-framework-5.5.0.jar}" { +grant codeBase "${codebase.lucene-test-framework-6.0.0-snapshot-bea235f.jar}" { // needed by RamUsageTester permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; diff --git a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java index 39b4df440594..94806422c17e 100644 --- a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java +++ b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java @@ -82,7 +82,7 @@ public class BlendedTermQueryTests extends ESTestCase { w.addDocument(d); } w.commit(); - DirectoryReader reader = DirectoryReader.open(w, true); + DirectoryReader reader = DirectoryReader.open(w); IndexSearcher searcher = setSimilarity(newSearcher(reader)); { @@ -143,7 +143,7 @@ public class BlendedTermQueryTests extends ESTestCase { w.addDocument(d); } w.commit(); - DirectoryReader reader = DirectoryReader.open(w, true); + DirectoryReader reader = DirectoryReader.open(w); IndexSearcher searcher = setSimilarity(newSearcher(reader)); { String[] fields = new String[]{"username", "song"}; diff --git a/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java b/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java index 6e4d3867fdec..566d2148cae2 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java @@ -19,13 +19,13 @@ package org.elasticsearch.common.geo; -import com.spatial4j.core.exception.InvalidShapeException; -import com.spatial4j.core.shape.Circle; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.ShapeCollection; -import com.spatial4j.core.shape.jts.JtsGeometry; -import com.spatial4j.core.shape.jts.JtsPoint; +import org.locationtech.spatial4j.exception.InvalidShapeException; +import org.locationtech.spatial4j.shape.Circle; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.ShapeCollection; +import org.locationtech.spatial4j.shape.jts.JtsGeometry; +import org.locationtech.spatial4j.shape.jts.JtsPoint; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryFactory; diff --git a/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java index 06fadffc806d..abbd6ce40aad 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java @@ -19,12 +19,12 @@ package org.elasticsearch.common.geo; -import com.spatial4j.core.exception.InvalidShapeException; -import com.spatial4j.core.shape.Circle; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.impl.PointImpl; +import org.locationtech.spatial4j.exception.InvalidShapeException; +import org.locationtech.spatial4j.shape.Circle; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.impl.PointImpl; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.LineString; import com.vividsolutions.jts.geom.Polygon; diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java index 305e57fbaf15..881db868ef90 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.geo.builders; -import com.spatial4j.core.shape.Rectangle; +import org.locationtech.spatial4j.shape.Rectangle; import com.vividsolutions.jts.geom.Coordinate; import org.elasticsearch.test.geo.RandomShapeGenerator; diff --git a/core/src/test/java/org/elasticsearch/common/lucene/IndexCacheableQueryTests.java b/core/src/test/java/org/elasticsearch/common/lucene/IndexCacheableQueryTests.java deleted file mode 100644 index a4a5972e45b9..000000000000 --- a/core/src/test/java/org/elasticsearch/common/lucene/IndexCacheableQueryTests.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.lucene; - -import org.apache.lucene.document.Document; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.MultiReader; -import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.Explanation; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.LRUQueryCache; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.QueryCachingPolicy; -import org.apache.lucene.search.QueryUtils; -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Weight; -import org.apache.lucene.store.Directory; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; -import java.util.Set; - -public class IndexCacheableQueryTests extends ESTestCase { - - static class DummyIndexCacheableQuery extends IndexCacheableQuery { - @Override - public String toString(String field) { - return "DummyIndexCacheableQuery"; - } - - @Override - public Weight doCreateWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - return new Weight(this) { - - @Override - public void extractTerms(Set terms) { - throw new UnsupportedOperationException(); - } - - @Override - public Explanation explain(LeafReaderContext context, int doc) throws IOException { - throw new UnsupportedOperationException(); - } - - @Override - public float getValueForNormalization() throws IOException { - return 0; - } - - @Override - public void normalize(float norm, float topLevelBoost) { - } - - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return null; - } - - }; - } - } - - public void testBasics() throws IOException { - DummyIndexCacheableQuery query = new DummyIndexCacheableQuery(); - QueryUtils.check(query); - - Query rewritten = query.rewrite(new MultiReader(new IndexReader[0])); - QueryUtils.check(rewritten); - QueryUtils.checkUnequal(query, rewritten); - - Query rewritten2 = query.rewrite(new MultiReader(new IndexReader[0])); - QueryUtils.check(rewritten2); - QueryUtils.checkUnequal(rewritten, rewritten2); - } - - public void testCache() throws IOException { - Directory dir = newDirectory(); - LRUQueryCache cache = new LRUQueryCache(10000, Long.MAX_VALUE); - QueryCachingPolicy policy = QueryCachingPolicy.ALWAYS_CACHE; - RandomIndexWriter writer = new RandomIndexWriter(getRandom(), dir); - for (int i = 0; i < 10; ++i) { - writer.addDocument(new Document()); - } - - IndexReader reader = writer.getReader(); - IndexSearcher searcher = newSearcher(reader); - reader = searcher.getIndexReader(); // reader might be wrapped - searcher.setQueryCache(cache); - searcher.setQueryCachingPolicy(policy); - - assertEquals(0, cache.getCacheSize()); - DummyIndexCacheableQuery query = new DummyIndexCacheableQuery(); - searcher.count(query); - int expectedCacheSize = reader.leaves().size(); - assertEquals(expectedCacheSize, cache.getCacheSize()); - searcher.count(query); - assertEquals(expectedCacheSize, cache.getCacheSize()); - - writer.addDocument(new Document()); - - IndexReader reader2 = writer.getReader(); - searcher = newSearcher(reader2); - reader2 = searcher.getIndexReader(); // reader might be wrapped - searcher.setQueryCache(cache); - searcher.setQueryCachingPolicy(policy); - - // since the query is only cacheable at the index level, it has to be recomputed on all leaves - expectedCacheSize += reader2.leaves().size(); - searcher.count(query); - assertEquals(expectedCacheSize, cache.getCacheSize()); - searcher.count(query); - assertEquals(expectedCacheSize, cache.getCacheSize()); - - reader.close(); - reader2.close(); - writer.close(); - assertEquals(0, cache.getCacheSize()); - dir.close(); - } - -} diff --git a/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index 484b88f096f4..8f8aea578de6 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -84,7 +84,6 @@ public class LuceneTests extends ESTestCase { // now shadow engine should try to be created latch.countDown(); - dir.setEnableVirusScanner(false); IndexWriterConfig iwc = newIndexWriterConfig(); iwc.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE); iwc.setMergePolicy(NoMergePolicy.INSTANCE); @@ -104,7 +103,6 @@ public class LuceneTests extends ESTestCase { public void testCleanIndex() throws IOException { MockDirectoryWrapper dir = newMockDirectory(); - dir.setEnableVirusScanner(false); IndexWriterConfig iwc = newIndexWriterConfig(); iwc.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE); iwc.setMergePolicy(NoMergePolicy.INSTANCE); @@ -130,7 +128,7 @@ public class LuceneTests extends ESTestCase { writer.deleteDocuments(new Term("id", "2")); writer.commit(); - try (DirectoryReader open = DirectoryReader.open(writer, true)) { + try (DirectoryReader open = DirectoryReader.open(writer)) { assertEquals(3, open.numDocs()); assertEquals(1, open.numDeletedDocs()); assertEquals(4, open.maxDoc()); @@ -158,7 +156,6 @@ public class LuceneTests extends ESTestCase { public void testPruneUnreferencedFiles() throws IOException { MockDirectoryWrapper dir = newMockDirectory(); - dir.setEnableVirusScanner(false); IndexWriterConfig iwc = newIndexWriterConfig(); iwc.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE); iwc.setMergePolicy(NoMergePolicy.INSTANCE); @@ -186,7 +183,7 @@ public class LuceneTests extends ESTestCase { writer.deleteDocuments(new Term("id", "2")); writer.commit(); - DirectoryReader open = DirectoryReader.open(writer, true); + DirectoryReader open = DirectoryReader.open(writer); assertEquals(3, open.numDocs()); assertEquals(1, open.numDeletedDocs()); assertEquals(4, open.maxDoc()); @@ -215,7 +212,6 @@ public class LuceneTests extends ESTestCase { public void testFiles() throws IOException { MockDirectoryWrapper dir = newMockDirectory(); - dir.setEnableVirusScanner(false); IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(NoMergePolicy.INSTANCE); iwc.setMaxBufferedDocs(2); @@ -279,7 +275,6 @@ public class LuceneTests extends ESTestCase { public void testNumDocs() throws IOException { MockDirectoryWrapper dir = newMockDirectory(); - dir.setEnableVirusScanner(false); IndexWriterConfig iwc = newIndexWriterConfig(); IndexWriter writer = new IndexWriter(dir, iwc); Document doc = new Document(); diff --git a/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java b/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java index 9e7f54b83232..f2dc53e44cd3 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java @@ -152,7 +152,7 @@ public class SimpleAllTests extends ESTestCase { indexWriter.addDocument(doc); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); Query query = new AllTermQuery(new Term("_all", "else")); @@ -198,7 +198,7 @@ public class SimpleAllTests extends ESTestCase { indexWriter.addDocument(doc); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); // this one is boosted. so the second doc is more relevant @@ -244,7 +244,7 @@ public class SimpleAllTests extends ESTestCase { indexWriter.addDocument(doc); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); assertEquals(2, reader.leaves().size()); IndexSearcher searcher = new IndexSearcher(reader); @@ -280,7 +280,7 @@ public class SimpleAllTests extends ESTestCase { indexWriter.addDocument(doc); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs docs = searcher.search(new AllTermQuery(new Term("_all", "else")), 10); @@ -330,7 +330,7 @@ public class SimpleAllTests extends ESTestCase { indexWriter.addDocument(doc); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs docs = searcher.search(new AllTermQuery(new Term("_all", "else")), 10); @@ -368,7 +368,7 @@ public class SimpleAllTests extends ESTestCase { indexWriter.addDocument(doc); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs docs = searcher.search(new MatchAllDocsQuery(), 10); diff --git a/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java b/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java index 7fb3ec0c2e9c..817dabfece3f 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java @@ -55,7 +55,7 @@ public class ESDirectoryReaderTests extends ESTestCase { // open reader ShardId shardId = new ShardId("fake", "_na_", 1); - DirectoryReader ir = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(iw, true), shardId); + DirectoryReader ir = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(iw), shardId); assertEquals(2, ir.numDocs()); assertEquals(1, ir.leaves().size()); diff --git a/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java b/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java index 51d2ba77ec57..3d1b0fdb8429 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java @@ -137,7 +137,7 @@ public class FreqTermsEnumTests extends ESTestCase { } // now go over each doc, build the relevant references and filter - reader = DirectoryReader.open(iw, true); + reader = DirectoryReader.open(iw); List filterTerms = new ArrayList<>(); for (int docId = 0; docId < reader.maxDoc(); docId++) { Document doc = reader.document(docId); diff --git a/core/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java b/core/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java index 9098289847ea..23b6939fe7a7 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java @@ -27,15 +27,12 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.Term; -import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; import org.apache.lucene.store.RAMDirectory; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; public class MultiPhrasePrefixQueryTests extends ESTestCase { public void testSimple() throws Exception { @@ -43,7 +40,7 @@ public class MultiPhrasePrefixQueryTests extends ESTestCase { Document doc = new Document(); doc.add(new Field("field", "aaa bbb ccc ddd", TextField.TYPE_NOT_STORED)); writer.addDocument(doc); - IndexReader reader = DirectoryReader.open(writer, true); + IndexReader reader = DirectoryReader.open(writer); IndexSearcher searcher = new IndexSearcher(reader); MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery(); @@ -66,22 +63,4 @@ public class MultiPhrasePrefixQueryTests extends ESTestCase { query.add(new Term("field", "xxx")); assertThat(searcher.count(query), equalTo(0)); } - - public void testBoost() throws Exception { - IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); - Document doc = new Document(); - doc.add(new Field("field", "aaa bbb", TextField.TYPE_NOT_STORED)); - writer.addDocument(doc); - doc = new Document(); - doc.add(new Field("field", "ccc ddd", TextField.TYPE_NOT_STORED)); - writer.addDocument(doc); - IndexReader reader = DirectoryReader.open(writer, true); - MultiPhrasePrefixQuery multiPhrasePrefixQuery = new MultiPhrasePrefixQuery(); - multiPhrasePrefixQuery.add(new Term[]{new Term("field", "aaa"), new Term("field", "bb")}); - multiPhrasePrefixQuery.setBoost(randomFloat()); - Query query = multiPhrasePrefixQuery.rewrite(reader); - assertThat(query, instanceOf(BoostQuery.class)); - BoostQuery boostQuery = (BoostQuery) query; - assertThat(boostQuery.getBoost(), equalTo(multiPhrasePrefixQuery.getBoost())); - } } \ No newline at end of file diff --git a/core/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java b/core/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java index b0e2ea873c47..0dcce74c1d2e 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java @@ -54,7 +54,7 @@ public class MoreLikeThisQueryTests extends ESTestCase { document.add(new TextField("text", "lucene release", Field.Store.YES)); indexWriter.addDocument(document); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); MoreLikeThisQuery mltQuery = new MoreLikeThisQuery("lucene", new String[]{"text"}, Lucene.STANDARD_ANALYZER); diff --git a/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionLookupTests.java b/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionLookupTests.java index 1340d11616cd..72b6b2b5eec6 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionLookupTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionLookupTests.java @@ -51,7 +51,7 @@ public class VersionLookupTests extends ESTestCase { doc.add(new Field(UidFieldMapper.NAME, "6", UidFieldMapper.Defaults.FIELD_TYPE)); doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87)); writer.addDocument(doc); - DirectoryReader reader = DirectoryReader.open(writer, false); + DirectoryReader reader = DirectoryReader.open(writer); LeafReaderContext segment = reader.leaves().get(0); PerThreadIDAndVersionLookup lookup = new PerThreadIDAndVersionLookup(segment.reader()); // found doc @@ -79,7 +79,7 @@ public class VersionLookupTests extends ESTestCase { doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87)); writer.addDocument(doc); writer.addDocument(doc); - DirectoryReader reader = DirectoryReader.open(writer, false); + DirectoryReader reader = DirectoryReader.open(writer); LeafReaderContext segment = reader.leaves().get(0); PerThreadIDAndVersionLookup lookup = new PerThreadIDAndVersionLookup(segment.reader()); // return the last doc when there are duplicates diff --git a/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java b/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java index 1221445e8a63..7f405ea05310 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java @@ -78,7 +78,7 @@ public class VersionsTests extends ESTestCase { public void testVersions() throws Exception { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); - DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); + DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); MatcherAssert.assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(Versions.NOT_FOUND)); Document doc = new Document(); @@ -145,7 +145,7 @@ public class VersionsTests extends ESTestCase { docs.add(doc); writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs); - DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); + DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(5L)); assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(5L)); @@ -170,7 +170,7 @@ public class VersionsTests extends ESTestCase { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); - DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); + DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); MatcherAssert.assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(Versions.NOT_FOUND)); Document doc = new Document(); @@ -305,7 +305,7 @@ public class VersionsTests extends ESTestCase { doc.add(new Field(UidFieldMapper.NAME, "6", UidFieldMapper.Defaults.FIELD_TYPE)); doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87)); writer.addDocument(doc); - DirectoryReader reader = DirectoryReader.open(writer, false); + DirectoryReader reader = DirectoryReader.open(writer); // should increase cache size by 1 assertEquals(87, Versions.loadVersion(reader, new Term(UidFieldMapper.NAME, "6"))); assertEquals(size+1, Versions.lookupStates.size()); @@ -330,7 +330,7 @@ public class VersionsTests extends ESTestCase { doc.add(new Field(UidFieldMapper.NAME, "6", UidFieldMapper.Defaults.FIELD_TYPE)); doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87)); writer.addDocument(doc); - DirectoryReader reader = DirectoryReader.open(writer, false); + DirectoryReader reader = DirectoryReader.open(writer); assertEquals(87, Versions.loadVersion(reader, new Term(UidFieldMapper.NAME, "6"))); assertEquals(size+1, Versions.lookupStates.size()); // now wrap the reader diff --git a/core/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java b/core/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java index 9c702acb2c4a..1455b397e746 100644 --- a/core/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java +++ b/core/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java @@ -22,7 +22,7 @@ package org.elasticsearch.deps.lucene; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; -import org.apache.lucene.document.IntField; +import org.apache.lucene.document.LegacyIntField; import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; @@ -51,7 +51,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.test.ESTestCase; @@ -74,9 +74,9 @@ public class SimpleLuceneTests extends ESTestCase { document.add(new SortedDocValuesField("str", new BytesRef(text))); indexWriter.addDocument(document); } - IndexReader reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(indexWriter, true)); + IndexReader reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(indexWriter)); IndexSearcher searcher = new IndexSearcher(reader); - TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), null, 10, new Sort(new SortField("str", SortField.Type.STRING))); + TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField("str", SortField.Type.STRING))); for (int i = 0; i < 10; i++) { FieldDoc fieldDoc = (FieldDoc) docs.scoreDocs[i]; assertThat((BytesRef) fieldDoc.fields[0], equalTo(new BytesRef(new String(new char[]{(char) (97 + i), (char) (97 + i)})))); @@ -89,10 +89,10 @@ public class SimpleLuceneTests extends ESTestCase { Document document = new Document(); document.add(new TextField("_id", "1", Field.Store.YES)); - document.add(new IntField("test", 2, IntField.TYPE_STORED)); + document.add(new LegacyIntField("test", 2, LegacyIntField.TYPE_STORED)); indexWriter.addDocument(document); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); Document doc = searcher.doc(topDocs.scoreDocs[0].doc); @@ -100,7 +100,7 @@ public class SimpleLuceneTests extends ESTestCase { assertThat(f.stringValue(), equalTo("2")); BytesRefBuilder bytes = new BytesRefBuilder(); - NumericUtils.intToPrefixCoded(2, 0, bytes); + LegacyNumericUtils.intToPrefixCoded(2, 0, bytes); topDocs = searcher.search(new TermQuery(new Term("test", bytes.get())), 1); doc = searcher.doc(topDocs.scoreDocs[0].doc); f = doc.getField("test"); @@ -123,7 +123,7 @@ public class SimpleLuceneTests extends ESTestCase { document.add(new TextField("#id", "1", Field.Store.YES)); indexWriter.addDocument(document); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); final ArrayList fieldsOrder = new ArrayList<>(); @@ -162,7 +162,7 @@ public class SimpleLuceneTests extends ESTestCase { indexWriter.addDocument(document); } - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TermQuery query = new TermQuery(new Term("value", "value")); TopDocs topDocs = searcher.search(query, 100); @@ -179,7 +179,7 @@ public class SimpleLuceneTests extends ESTestCase { public void testNRTSearchOnClosedWriter() throws Exception { Directory dir = new RAMDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); - DirectoryReader reader = DirectoryReader.open(indexWriter, true); + DirectoryReader reader = DirectoryReader.open(indexWriter); for (int i = 0; i < 100; i++) { Document document = new Document(); @@ -205,26 +205,26 @@ public class SimpleLuceneTests extends ESTestCase { IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); Document doc = new Document(); - FieldType type = IntField.TYPE_NOT_STORED; - IntField field = new IntField("int1", 1, type); + FieldType type = LegacyIntField.TYPE_NOT_STORED; + LegacyIntField field = new LegacyIntField("int1", 1, type); doc.add(field); - type = new FieldType(IntField.TYPE_NOT_STORED); + type = new FieldType(LegacyIntField.TYPE_NOT_STORED); type.setIndexOptions(IndexOptions.DOCS_AND_FREQS); type.freeze(); - field = new IntField("int1", 1, type); + field = new LegacyIntField("int1", 1, type); doc.add(field); - field = new IntField("int2", 1, type); + field = new LegacyIntField("int2", 1, type); doc.add(field); - field = new IntField("int2", 1, type); + field = new LegacyIntField("int2", 1, type); doc.add(field); indexWriter.addDocument(doc); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); LeafReader atomicReader = SlowCompositeReaderWrapper.wrap(reader); Terms terms = atomicReader.terms("int1"); diff --git a/core/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java b/core/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java index 66dc05426783..fbb5115903c1 100644 --- a/core/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java +++ b/core/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.deps.lucene; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; +import org.apache.lucene.document.FieldType; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; @@ -53,10 +54,14 @@ public class VectorHighlighterTests extends ESTestCase { Document document = new Document(); document.add(new TextField("_id", "1", Field.Store.YES)); - document.add(new Field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); + FieldType vectorsType = new FieldType(TextField.TYPE_STORED); + vectorsType.setStoreTermVectors(true); + vectorsType.setStoreTermVectorPositions(true); + vectorsType.setStoreTermVectorOffsets(true); + document.add(new Field("content", "the big bad dog", vectorsType)); indexWriter.addDocument(document); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); @@ -75,10 +80,14 @@ public class VectorHighlighterTests extends ESTestCase { Document document = new Document(); document.add(new TextField("_id", "1", Field.Store.YES)); - document.add(new Field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); + FieldType vectorsType = new FieldType(TextField.TYPE_STORED); + vectorsType.setStoreTermVectors(true); + vectorsType.setStoreTermVectorPositions(true); + vectorsType.setStoreTermVectorOffsets(true); + document.add(new Field("content", "the big bad dog", vectorsType)); indexWriter.addDocument(document); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); @@ -87,12 +96,12 @@ public class VectorHighlighterTests extends ESTestCase { FastVectorHighlighter highlighter = new FastVectorHighlighter(); PrefixQuery prefixQuery = new PrefixQuery(new Term("content", "ba")); - assertThat(prefixQuery.getRewriteMethod().getClass().getName(), equalTo(PrefixQuery.CONSTANT_SCORE_FILTER_REWRITE.getClass().getName())); + assertThat(prefixQuery.getRewriteMethod().getClass().getName(), equalTo(PrefixQuery.CONSTANT_SCORE_REWRITE.getClass().getName())); String fragment = highlighter.getBestFragment(highlighter.getFieldQuery(prefixQuery), reader, topDocs.scoreDocs[0].doc, "content", 30); assertThat(fragment, nullValue()); - prefixQuery.setRewriteMethod(PrefixQuery.SCORING_BOOLEAN_QUERY_REWRITE); + prefixQuery.setRewriteMethod(PrefixQuery.SCORING_BOOLEAN_REWRITE); Query rewriteQuery = prefixQuery.rewrite(reader); fragment = highlighter.getBestFragment(highlighter.getFieldQuery(rewriteQuery), reader, topDocs.scoreDocs[0].doc, "content", 30); @@ -100,7 +109,7 @@ public class VectorHighlighterTests extends ESTestCase { // now check with the custom field query prefixQuery = new PrefixQuery(new Term("content", "ba")); - assertThat(prefixQuery.getRewriteMethod().getClass().getName(), equalTo(PrefixQuery.CONSTANT_SCORE_FILTER_REWRITE.getClass().getName())); + assertThat(prefixQuery.getRewriteMethod().getClass().getName(), equalTo(PrefixQuery.CONSTANT_SCORE_REWRITE.getClass().getName())); fragment = highlighter.getBestFragment(new CustomFieldQuery(prefixQuery, reader, highlighter), reader, topDocs.scoreDocs[0].doc, "content", 30); assertThat(fragment, notNullValue()); @@ -112,10 +121,14 @@ public class VectorHighlighterTests extends ESTestCase { Document document = new Document(); document.add(new TextField("_id", "1", Field.Store.YES)); - document.add(new Field("content", "the big bad dog", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); + FieldType vectorsType = new FieldType(TextField.TYPE_NOT_STORED); + vectorsType.setStoreTermVectors(true); + vectorsType.setStoreTermVectorPositions(true); + vectorsType.setStoreTermVectorOffsets(true); + document.add(new Field("content", "the big bad dog", vectorsType)); indexWriter.addDocument(document); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); @@ -133,10 +146,10 @@ public class VectorHighlighterTests extends ESTestCase { Document document = new Document(); document.add(new TextField("_id", "1", Field.Store.YES)); - document.add(new Field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO)); + document.add(new TextField("content", "the big bad dog", Field.Store.YES)); indexWriter.addDocument(document); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); diff --git a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java index 8fd6876b4b2c..e36763665112 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.index; import org.apache.lucene.document.Field.Store; -import org.apache.lucene.document.IntField; +import org.apache.lucene.document.LegacyIntField; import org.apache.lucene.document.StringField; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -40,7 +40,7 @@ import static org.hamcrest.Matchers.not; public class IndexingSlowLogTests extends ESTestCase { public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException { BytesReference source = JsonXContent.contentBuilder().startObject().field("foo", "bar").endObject().bytes(); - ParsedDocument pd = new ParsedDocument(new StringField("uid", "test:id", Store.YES), new IntField("version", 1, Store.YES), "id", + ParsedDocument pd = new ParsedDocument(new StringField("uid", "test:id", Store.YES), new LegacyIntField("version", 1, Store.YES), "id", "test", null, 0, -1, null, source, null); // Turning off document logging doesn't log source[] diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java index 1eb1e93f09ca..43455c2a11d5 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java @@ -28,6 +28,8 @@ import org.apache.lucene.analysis.fa.PersianNormalizationFilter; import org.apache.lucene.analysis.hunspell.Dictionary; import org.apache.lucene.analysis.miscellaneous.KeywordRepeatFilter; import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.SimpleFSDirectory; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.ModuleTestCase; @@ -328,11 +330,14 @@ public class AnalysisModuleTests extends ModuleTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); - AnalysisModule module = new AnalysisModule(new Environment(settings)); + Environment environment = new Environment(settings); + AnalysisModule module = new AnalysisModule(environment); InputStream aff = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.aff"); InputStream dic = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.dic"); - Dictionary dictionary = new Dictionary(aff, dic); - module.registerHunspellDictionary("foo", dictionary); - assertInstanceBinding(module, HunspellService.class, (x) -> x.getDictionary("foo") == dictionary); + try (Directory tmp = new SimpleFSDirectory(environment.tmpFile())) { + Dictionary dictionary = new Dictionary(tmp, "hunspell", aff, dic); + module.registerHunspellDictionary("foo", dictionary); + assertInstanceBinding(module, HunspellService.class, (x) -> x.getDictionary("foo") == dictionary); + } } } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java index 9d8efb1de4b5..5e1cf2e81798 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java @@ -23,7 +23,6 @@ import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter; -import org.apache.lucene.analysis.ngram.Lucene43EdgeNGramTokenFilter; import org.apache.lucene.analysis.reverse.ReverseStringFilter; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -120,45 +119,20 @@ public class NGramTokenizerFactoryTests extends ESTokenStreamTestCase { final Index index = new Index("test", "_na_"); final String name = "ngr"; Version v = randomVersion(random()); - if (v.onOrAfter(Version.V_0_90_2)) { - Builder builder = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3); - boolean compatVersion = false; - if ((compatVersion = random().nextBoolean())) { - builder.put("version", "4." + random().nextInt(3)); - } - boolean reverse = random().nextBoolean(); - if (reverse) { - builder.put("side", "back"); - } - Settings settings = builder.build(); - Settings indexSettings = newAnalysisSettingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, v.id).build(); - Tokenizer tokenizer = new MockTokenizer(); - tokenizer.setReader(new StringReader("foo bar")); - TokenStream edgeNGramTokenFilter = new EdgeNGramTokenFilterFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings).create(tokenizer); - if (reverse) { - assertThat(edgeNGramTokenFilter, instanceOf(ReverseStringFilter.class)); - } else if (compatVersion) { - assertThat(edgeNGramTokenFilter, instanceOf(Lucene43EdgeNGramTokenFilter.class)); - } else { - assertThat(edgeNGramTokenFilter, instanceOf(EdgeNGramTokenFilter.class)); - } - + Builder builder = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3); + boolean reverse = random().nextBoolean(); + if (reverse) { + builder.put("side", "back"); + } + Settings settings = builder.build(); + Settings indexSettings = newAnalysisSettingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, v.id).build(); + Tokenizer tokenizer = new MockTokenizer(); + tokenizer.setReader(new StringReader("foo bar")); + TokenStream edgeNGramTokenFilter = new EdgeNGramTokenFilterFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings).create(tokenizer); + if (reverse) { + assertThat(edgeNGramTokenFilter, instanceOf(ReverseStringFilter.class)); } else { - Builder builder = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3); - boolean reverse = random().nextBoolean(); - if (reverse) { - builder.put("side", "back"); - } - Settings settings = builder.build(); - Settings indexSettings = newAnalysisSettingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, v.id).build(); - Tokenizer tokenizer = new MockTokenizer(); - tokenizer.setReader(new StringReader("foo bar")); - TokenStream edgeNGramTokenFilter = new EdgeNGramTokenFilterFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings).create(tokenizer); - if (reverse) { - assertThat(edgeNGramTokenFilter, instanceOf(ReverseStringFilter.class)); - } else { - assertThat(edgeNGramTokenFilter, instanceOf(Lucene43EdgeNGramTokenFilter.class)); - } + assertThat(edgeNGramTokenFilter, instanceOf(EdgeNGramTokenFilter.class)); } } } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/NumericAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/NumericAnalyzerTests.java index 89940558d514..10d3d3554dd4 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/NumericAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/NumericAnalyzerTests.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.NumericTokenStream; -import org.apache.lucene.analysis.NumericTokenStream.NumericTermAttribute; +import org.apache.lucene.analysis.LegacyNumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream.LegacyNumericTermAttribute; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.elasticsearch.test.ESTestCase; @@ -37,10 +37,10 @@ public class NumericAnalyzerTests extends ESTestCase { NumericDoubleAnalyzer analyzer = new NumericDoubleAnalyzer(precisionStep); final TokenStream ts1 = analyzer.tokenStream("dummy", String.valueOf(value)); - final NumericTokenStream ts2 = new NumericTokenStream(precisionStep); + final LegacyNumericTokenStream ts2 = new LegacyNumericTokenStream(precisionStep); ts2.setDoubleValue(value); - final NumericTermAttribute numTerm1 = ts1.addAttribute(NumericTermAttribute.class); - final NumericTermAttribute numTerm2 = ts1.addAttribute(NumericTermAttribute.class); + final LegacyNumericTermAttribute numTerm1 = ts1.addAttribute(LegacyNumericTermAttribute.class); + final LegacyNumericTermAttribute numTerm2 = ts1.addAttribute(LegacyNumericTermAttribute.class); final PositionIncrementAttribute posInc1 = ts1.addAttribute(PositionIncrementAttribute.class); final PositionIncrementAttribute posInc2 = ts1.addAttribute(PositionIncrementAttribute.class); ts1.reset(); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java index 2804f522afac..d319ab443191 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.analysis.core.Lucene43StopFilter; import org.apache.lucene.analysis.core.StopFilter; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.apache.lucene.search.suggest.analyzing.SuggestStopFilter; @@ -57,14 +56,8 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase { public void testCorrectPositionIncrementSetting() throws IOException { Builder builder = Settings.settingsBuilder().put("index.analysis.filter.my_stop.type", "stop"); - int thingToDo = random().nextInt(3); - if (thingToDo == 0) { + if (random().nextBoolean()) { builder.put("index.analysis.filter.my_stop.version", Version.LATEST); - } else if (thingToDo == 1) { - builder.put("index.analysis.filter.my_stop.version", Version.LUCENE_4_0); - if (random().nextBoolean()) { - builder.put("index.analysis.filter.my_stop.enable_position_increments", true); - } } else { // don't specify } @@ -75,27 +68,7 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase { Tokenizer tokenizer = new WhitespaceTokenizer(); tokenizer.setReader(new StringReader("foo bar")); TokenStream create = tokenFilter.create(tokenizer); - if (thingToDo == 1) { - assertThat(create, instanceOf(Lucene43StopFilter.class)); - } else { - assertThat(create, instanceOf(StopFilter.class)); - } - } - - public void testDeprecatedPositionIncrementSettingWithVersions() throws IOException { - Settings settings = Settings.settingsBuilder() - .put("index.analysis.filter.my_stop.type", "stop") - .put("index.analysis.filter.my_stop.enable_position_increments", false) - .put("index.analysis.filter.my_stop.version", "4.3") - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); - AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); - TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop"); - assertThat(tokenFilter, instanceOf(StopTokenFilterFactory.class)); - Tokenizer tokenizer = new WhitespaceTokenizer(); - tokenizer.setReader(new StringReader("foo bar")); - TokenStream create = tokenFilter.create(tokenizer); - assertThat(create, instanceOf(Lucene43StopFilter.class)); + assertThat(create, instanceOf(StopFilter.class)); } public void testThatSuggestStopFilterWorks() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java index a041694dde67..c23875f8a9ac 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java @@ -146,23 +146,4 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase tokenizer.setReader(new StringReader(source)); assertTokenStreamContents(tokenFilter.create(tokenizer), expected); } - - /** Back compat: - * old offset order when doing both parts and concatenation: PowerShot is a synonym of Shot */ - public void testDeprecatedPartsAndCatenate() throws IOException { - AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") - .put("index.analysis.filter.my_word_delimiter.catenate_words", "true") - .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true") - .put("index.analysis.filter.my_word_delimiter.version", "4.7") - .build()); - TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter"); - String source = "PowerShot"; - String[] expected = new String[]{"Power", "Shot", "PowerShot" }; - Tokenizer tokenizer = new WhitespaceTokenizer(); - tokenizer.setReader(new StringReader(source)); - assertTokenStreamContents(tokenFilter.create(tokenizer), expected); - } - } diff --git a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java index 18714fe61ef4..e82ed61fbed6 100644 --- a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java +++ b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java @@ -87,7 +87,7 @@ public class BitSetFilterCacheTests extends ESTestCase { writer.addDocument(document); writer.commit(); - DirectoryReader reader = DirectoryReader.open(writer, false); + DirectoryReader reader = DirectoryReader.open(writer); reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0)); IndexSearcher searcher = new IndexSearcher(reader); @@ -112,7 +112,7 @@ public class BitSetFilterCacheTests extends ESTestCase { writer.forceMerge(1); reader.close(); - reader = DirectoryReader.open(writer, false); + reader = DirectoryReader.open(writer); reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0)); searcher = new IndexSearcher(reader); @@ -138,7 +138,7 @@ public class BitSetFilterCacheTests extends ESTestCase { document.add(new StringField("field", "value", Field.Store.NO)); writer.addDocument(document); writer.commit(); - final DirectoryReader writerReader = DirectoryReader.open(writer, false); + final DirectoryReader writerReader = DirectoryReader.open(writer); final IndexReader reader = ElasticsearchDirectoryReader.wrap(writerReader, new ShardId("test", "_na_", 0)); final AtomicLong stats = new AtomicLong(); @@ -211,7 +211,7 @@ public class BitSetFilterCacheTests extends ESTestCase { newIndexWriterConfig() ); writer.addDocument(new Document()); - DirectoryReader reader = DirectoryReader.open(writer, true); + DirectoryReader reader = DirectoryReader.open(writer); writer.close(); reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test2", "_na_", 0)); diff --git a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 3d912d41c38f..4fb31bb4ea94 100644 --- a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -20,18 +20,12 @@ package org.elasticsearch.index.codec; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene40.Lucene40Codec; -import org.apache.lucene.codecs.lucene41.Lucene41Codec; -import org.apache.lucene.codecs.lucene410.Lucene410Codec; -import org.apache.lucene.codecs.lucene42.Lucene42Codec; -import org.apache.lucene.codecs.lucene45.Lucene45Codec; -import org.apache.lucene.codecs.lucene46.Lucene46Codec; -import org.apache.lucene.codecs.lucene49.Lucene49Codec; import org.apache.lucene.codecs.lucene50.Lucene50Codec; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode; import org.apache.lucene.codecs.lucene53.Lucene53Codec; import org.apache.lucene.codecs.lucene54.Lucene54Codec; +import org.apache.lucene.codecs.lucene60.Lucene60Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -64,16 +58,10 @@ public class CodecTests extends ESTestCase { public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Lucene54Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Lucene60Codec.class)); + assertThat(codecService.codec("Lucene54"), instanceOf(Lucene54Codec.class)); assertThat(codecService.codec("Lucene53"), instanceOf(Lucene53Codec.class)); assertThat(codecService.codec("Lucene50"), instanceOf(Lucene50Codec.class)); - assertThat(codecService.codec("Lucene410"), instanceOf(Lucene410Codec.class)); - assertThat(codecService.codec("Lucene49"), instanceOf(Lucene49Codec.class)); - assertThat(codecService.codec("Lucene46"), instanceOf(Lucene46Codec.class)); - assertThat(codecService.codec("Lucene45"), instanceOf(Lucene45Codec.class)); - assertThat(codecService.codec("Lucene40"), instanceOf(Lucene40Codec.class)); - assertThat(codecService.codec("Lucene41"), instanceOf(Lucene41Codec.class)); - assertThat(codecService.codec("Lucene42"), instanceOf(Lucene42Codec.class)); } public void testDefault() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/engine/CommitStatsTests.java b/core/src/test/java/org/elasticsearch/index/engine/CommitStatsTests.java deleted file mode 100644 index 8d9c313a9a2f..000000000000 --- a/core/src/test/java/org/elasticsearch/index/engine/CommitStatsTests.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -package org.elasticsearch.index.engine; - -import org.apache.lucene.index.SegmentInfos; -import org.elasticsearch.common.io.stream.InputStreamStreamInput; -import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; -import org.elasticsearch.test.ESTestCase; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; - -import static org.elasticsearch.test.VersionUtils.randomVersion; - - -public class CommitStatsTests extends ESTestCase { - public void testStreamingWithNullId() throws IOException { - SegmentInfos segmentInfos = new SegmentInfos(); - CommitStats commitStats = new CommitStats(segmentInfos); - org.elasticsearch.Version targetNodeVersion = randomVersion(random()); - - ByteArrayOutputStream outBuffer = new ByteArrayOutputStream(); - OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer); - out.setVersion(targetNodeVersion); - commitStats.writeTo(out); - - ByteArrayInputStream inBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); - InputStreamStreamInput in = new InputStreamStreamInput(inBuffer); - in.setVersion(targetNodeVersion); - CommitStats readCommitStats = CommitStats.readCommitStatsFrom(in); - assertNull(readCommitStats.getId()); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java index 37e530cc7f4e..b6ae9948675a 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java @@ -170,7 +170,7 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes assertValues(bytesValues, 1, one()); assertValues(bytesValues, 2, three()); - IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true)); + IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.MIN, null)))); assertThat(topDocs.totalHits, equalTo(3)); assertThat(topDocs.scoreDocs.length, equalTo(3)); @@ -226,7 +226,7 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes fillExtendedMvSet(); IndexFieldData indexFieldData = getForField("value"); - IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true)); + IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.MIN, null)))); assertThat(topDocs.totalHits, equalTo(8)); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index 5c2295457554..6f8b5a45df0f 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -150,7 +150,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase { if (readerContext != null) { readerContext.reader().close(); } - topLevelReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); + topLevelReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); LeafReader reader = SlowCompositeReaderWrapper.wrap(topLevelReader); readerContext = reader.getContext(); return readerContext; diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index 31a17a684eec..15e4790ca9d8 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -265,7 +265,7 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI final IndexFieldData indexFieldData = getForField("value"); final String missingValue = values[1]; - IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true)); + IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); XFieldComparatorSource comparator = indexFieldData.comparatorSource(missingValue, MultiValueMode.MIN, null); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(new SortField("value", comparator, reverse))); assertEquals(numDocs, topDocs.totalHits); @@ -319,7 +319,7 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI } } final IndexFieldData indexFieldData = getForField("value"); - IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true)); + IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); XFieldComparatorSource comparator = indexFieldData.comparatorSource(first ? "_first" : "_last", MultiValueMode.MIN, null); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(new SortField("value", comparator, reverse))); assertEquals(numDocs, topDocs.totalHits); @@ -387,7 +387,7 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI writer.commit(); } } - DirectoryReader directoryReader = DirectoryReader.open(writer, true); + DirectoryReader directoryReader = DirectoryReader.open(writer); directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(indexService.index(), 0)); IndexSearcher searcher = new IndexSearcher(directoryReader); IndexFieldData fieldData = getForField("text"); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java index 26ea97dbf15a..7ad8653260ed 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java @@ -125,7 +125,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase { duelFieldDataBytes(random, context, leftFieldData, rightFieldData, pre); duelFieldDataBytes(random, context, rightFieldData, leftFieldData, pre); - DirectoryReader perSegment = DirectoryReader.open(writer, true); + DirectoryReader perSegment = DirectoryReader.open(writer); CompositeReaderContext composite = perSegment.getContext(); List leaves = composite.leaves(); for (LeafReaderContext atomicReaderContext : leaves) { @@ -203,7 +203,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase { duelFieldDataLong(random, context, leftFieldData, rightFieldData); duelFieldDataLong(random, context, rightFieldData, leftFieldData); - DirectoryReader perSegment = DirectoryReader.open(writer, true); + DirectoryReader perSegment = DirectoryReader.open(writer); CompositeReaderContext composite = perSegment.getContext(); List leaves = composite.leaves(); for (LeafReaderContext atomicReaderContext : leaves) { @@ -283,7 +283,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase { duelFieldDataDouble(random, context, leftFieldData, rightFieldData); duelFieldDataDouble(random, context, rightFieldData, leftFieldData); - DirectoryReader perSegment = DirectoryReader.open(writer, true); + DirectoryReader perSegment = DirectoryReader.open(writer); CompositeReaderContext composite = perSegment.getContext(); List leaves = composite.leaves(); for (LeafReaderContext atomicReaderContext : leaves) { @@ -341,7 +341,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase { duelFieldDataBytes(random, context, leftFieldData, rightFieldData, pre); duelFieldDataBytes(random, context, rightFieldData, leftFieldData, pre); - DirectoryReader perSegment = DirectoryReader.open(writer, true); + DirectoryReader perSegment = DirectoryReader.open(writer); CompositeReaderContext composite = perSegment.getContext(); List leaves = composite.leaves(); for (LeafReaderContext atomicReaderContext : leaves) { @@ -449,7 +449,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase { duelFieldDataGeoPoint(random, context, leftFieldData, rightFieldData, precision); duelFieldDataGeoPoint(random, context, rightFieldData, leftFieldData, precision); - DirectoryReader perSegment = DirectoryReader.open(writer, true); + DirectoryReader perSegment = DirectoryReader.open(writer); CompositeReaderContext composite = perSegment.getContext(); List leaves = composite.leaves(); for (LeafReaderContext atomicReaderContext : leaves) { diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index 101e73683533..2d204d1003a1 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -114,7 +114,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { Document doc = new Document(); doc.add(new StringField("s", "thisisastring", Store.NO)); writer.addDocument(doc); - DirectoryReader open = DirectoryReader.open(writer, true); + DirectoryReader open = DirectoryReader.open(writer); final boolean wrap = randomBoolean(); final IndexReader reader = wrap ? ElasticsearchDirectoryReader.wrap(open, new ShardId("test", "_na_", 1)) : open; final AtomicInteger onCacheCalled = new AtomicInteger(); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java index 1e0d8ecdf00f..9e1b5d9d1672 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java @@ -165,7 +165,7 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase { public void testSorting() throws Exception { IndexFieldData indexFieldData = getForField(parentType); - IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true)); + IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); IndexFieldData.XFieldComparatorSource comparator = indexFieldData.comparatorSource("_last", MultiValueMode.MIN, null); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField(ParentFieldMapper.joinField(parentType), comparator, false))); @@ -211,7 +211,7 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase { public void testThreads() throws Exception { final ParentChildIndexFieldData indexFieldData = getForField(childType); - final DirectoryReader reader = DirectoryReader.open(writer, true); + final DirectoryReader reader = DirectoryReader.open(writer); final IndexParentChildFieldData global = indexFieldData.loadGlobal(reader); final AtomicReference error = new AtomicReference<>(); final int numThreads = scaledRandomIntBetween(3, 8); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java index 7bed3ce091f4..74fc98fddbee 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java @@ -85,7 +85,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { try (Directory dir = new RAMDirectory(); IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(getRandom())))) { w.addDocuments(doc.docs()); - try (DirectoryReader reader = DirectoryReader.open(w, true)) { + try (DirectoryReader reader = DirectoryReader.open(w)) { final LeafReader leaf = reader.leaves().get(0).reader(); // boolean fields are indexed and have doc values by default assertEquals(new BytesRef("T"), leaf.terms("field").iterator().next()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java index 3056b63b4c0f..4f4bbc656995 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java @@ -19,11 +19,11 @@ package org.elasticsearch.index.mapper.date; -import org.apache.lucene.analysis.NumericTokenStream.NumericTermAttribute; +import org.apache.lucene.analysis.LegacyNumericTokenStream.LegacyNumericTermAttribute; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.util.Constants; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; @@ -189,7 +189,7 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { TokenStream tokenStream = doc.rootDoc().getField(fieldA).tokenStream(defaultMapper.mappers().indexAnalyzer(), null); tokenStream.reset(); - NumericTermAttribute nta = tokenStream.addAttribute(NumericTermAttribute.class); + LegacyNumericTermAttribute nta = tokenStream.addAttribute(LegacyNumericTermAttribute.class); List values = new ArrayList<>(); while(tokenStream.incrementToken()) { values.add(nta.getRawValue()); @@ -197,7 +197,7 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { tokenStream = doc.rootDoc().getField(fieldB).tokenStream(defaultMapper.mappers().indexAnalyzer(), null); tokenStream.reset(); - nta = tokenStream.addAttribute(NumericTermAttribute.class); + nta = tokenStream.addAttribute(LegacyNumericTermAttribute.class); int pos = 0; while(tokenStream.incrementToken()) { assertThat(values.get(pos++), equalTo(nta.getRawValue())); @@ -256,10 +256,10 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { .bytes()); assertThat(((LongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(10).millis(), DateTimeZone.UTC).getMillis()))); - NumericRangeQuery rangeQuery; + LegacyNumericRangeQuery rangeQuery; try { SearchContext.setCurrent(new TestSearchContext(null)); - rangeQuery = (NumericRangeQuery) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("10:00:00", "11:00:00", true, true).rewrite(null); + rangeQuery = (LegacyNumericRangeQuery) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("10:00:00", "11:00:00", true, true).rewrite(null); } finally { SearchContext.removeCurrent(); } @@ -282,10 +282,10 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { .bytes()); assertThat(((LongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(34).millis(), DateTimeZone.UTC).getMillis()))); - NumericRangeQuery rangeQuery; + LegacyNumericRangeQuery rangeQuery; try { SearchContext.setCurrent(new TestSearchContext(null)); - rangeQuery = (NumericRangeQuery) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true).rewrite(null); + rangeQuery = (LegacyNumericRangeQuery) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true).rewrite(null); } finally { SearchContext.removeCurrent(); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java index 03c14ee1a45c..8c25713ce3d5 100755 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.mapper.externalvalues; -import com.spatial4j.core.shape.Point; +import org.locationtech.spatial4j.shape.Point; import org.apache.lucene.document.Field; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java b/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java index d171430dfff6..05677d0ed8f5 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java @@ -64,7 +64,7 @@ public class DoubleIndexingDocTests extends ESSingleNodeTestCase { writer.addDocument(doc.rootDoc()); writer.addDocument(doc.rootDoc()); - IndexReader reader = DirectoryReader.open(writer, true); + IndexReader reader = DirectoryReader.open(writer); IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field1").fieldType().termQuery("value1", null), 10); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java index 0cd6fa0e1c91..9923846da0e9 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java @@ -76,7 +76,7 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase { // Indexing a doc in the old way FieldType fieldType = new FieldType(); fieldType.setStored(true); - fieldType.setNumericType(FieldType.NumericType.INT); + fieldType.setNumericType(FieldType.LegacyNumericType.INT); Document doc2 = new Document(); doc2.add(new StoredField("field1", new BytesRef(Numbers.intToBytes(1)))); doc2.add(new StoredField("field2", new BytesRef(Numbers.floatToBytes(1.1f)))); @@ -85,7 +85,7 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase { doc2.add(new StoredField("field3", new BytesRef(Numbers.longToBytes(3L)))); writer.addDocument(doc2); - DirectoryReader reader = DirectoryReader.open(writer, true); + DirectoryReader reader = DirectoryReader.open(writer); IndexSearcher searcher = new IndexSearcher(reader); Set fields = new HashSet<>(Arrays.asList("field1", "field2", "field3")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java index bf21f2fd6d32..09804f829193 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.mapper.numeric; -import org.apache.lucene.analysis.NumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.DocValuesType; @@ -623,8 +623,8 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { // check the tokenstream actually used by the indexer TokenStream ts = field.tokenStream(null, null); - assertThat(ts, instanceOf(NumericTokenStream.class)); - assertEquals(expected, ((NumericTokenStream)ts).getPrecisionStep()); + assertThat(ts, instanceOf(LegacyNumericTokenStream.class)); + assertEquals(expected, ((LegacyNumericTokenStream)ts).getPrecisionStep()); } public void testTermVectorsBackCompat() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java index 7ccad1ffd2a1..b14d5f507765 100644 --- a/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java @@ -22,7 +22,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.index.Term; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.FuzzyQuery; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.Fuzziness; @@ -60,7 +60,7 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase numericRangeQuery = (NumericRangeQuery) query; + LegacyNumericRangeQuery numericRangeQuery = (LegacyNumericRangeQuery) query; assertTrue(numericRangeQuery.includesMin()); assertTrue(numericRangeQuery.includesMax()); diff --git a/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java index a4af84a8f794..238a186394dd 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java @@ -25,9 +25,9 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -134,7 +134,7 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase 0); Query query = queryStringQuery("12~0.2").defaultField(INT_FIELD_NAME).toQuery(createShardContext()); - NumericRangeQuery fuzzyQuery = (NumericRangeQuery) query; + LegacyNumericRangeQuery fuzzyQuery = (LegacyNumericRangeQuery) query; assertThat(fuzzyQuery.getMin().longValue(), equalTo(12L)); assertThat(fuzzyQuery.getMax().longValue(), equalTo(12L)); } diff --git a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java index fbb708a5d970..9f99b85a294d 100644 --- a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermRangeQuery; import org.elasticsearch.ElasticsearchParseException; @@ -118,8 +118,8 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase 0); Query parsedQuery = rangeQuery(INT_FIELD_NAME).from(23).to(54).includeLower(true).includeUpper(false).toQuery(createShardContext()); // since age is automatically registered in data, we encode it as numeric - assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); - NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery; + assertThat(parsedQuery, instanceOf(LegacyNumericRangeQuery.class)); + LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) parsedQuery; assertThat(rangeQuery.getField(), equalTo(INT_FIELD_NAME)); assertThat(rangeQuery.getMin().intValue(), equalTo(23)); assertThat(rangeQuery.getMax().intValue(), equalTo(54)); @@ -220,15 +220,15 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase files = si.files(); - for (String file : files) { - if (!IndexFileNames.parseSegmentName(file).equals(si.name)) { - throw new IllegalArgumentException("invalid files: expected segment=" + si.name + ", got=" + files); - } - } - output.writeStringSet(files); - output.writeStringStringMap(si.getAttributes()); - CodecUtil.writeFooter(output); - success = true; - } finally { - if (!success) { - // TODO: are we doing this outside of the tracking wrapper? why must SIWriter cleanup like this? - IOUtils.deleteFilesIgnoringExceptions(si.dir, fileName); - } - } - } - }; - } - } - public void testNewChecksums() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); DirectoryService directoryService = new LuceneManagedDirectoryService(random()); @@ -381,7 +320,7 @@ public class StoreTests extends ESTestCase { } } if (random().nextBoolean()) { - DirectoryReader.open(writer, random().nextBoolean()).close(); // flush + DirectoryReader.open(writer).close(); // flush } Store.MetadataSnapshot metadata; // check before we committed @@ -472,32 +411,12 @@ public class StoreTests extends ESTestCase { } - final Adler32 adler32 = new Adler32(); final long luceneChecksum; try (IndexInput indexInput = dir.openInput("lucene_checksum.bin", IOContext.DEFAULT)) { assertEquals(luceneFileLength, indexInput.length()); luceneChecksum = CodecUtil.retrieveChecksum(indexInput); } - { // positive check - StoreFileMetaData lucene = new StoreFileMetaData("lucene_checksum.bin", luceneFileLength, Store.digestToString(luceneChecksum), Version.LUCENE_4_8_0); - assertTrue(Store.checkIntegrityNoException(lucene, dir)); - } - - { // negative check - wrong checksum - StoreFileMetaData lucene = new StoreFileMetaData("lucene_checksum.bin", luceneFileLength, Store.digestToString(luceneChecksum + 1), Version.LUCENE_4_8_0); - assertFalse(Store.checkIntegrityNoException(lucene, dir)); - } - - { // negative check - wrong length - StoreFileMetaData lucene = new StoreFileMetaData("lucene_checksum.bin", luceneFileLength + 1, Store.digestToString(luceneChecksum), Version.LUCENE_4_8_0); - assertFalse(Store.checkIntegrityNoException(lucene, dir)); - } - - { // negative check - wrong file - StoreFileMetaData lucene = new StoreFileMetaData("legacy.bin", luceneFileLength, Store.digestToString(luceneChecksum), Version.LUCENE_4_8_0); - assertFalse(Store.checkIntegrityNoException(lucene, dir)); - } dir.close(); } @@ -600,8 +519,6 @@ public class StoreTests extends ESTestCase { dir = StoreTests.newDirectory(random); if (dir instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper) dir).setPreventDoubleWrite(preventDoubleWrite); - // TODO: fix this test to handle virus checker - ((MockDirectoryWrapper) dir).setEnableVirusScanner(false); } this.random = random; } @@ -859,28 +776,6 @@ public class StoreTests extends ESTestCase { IOUtils.close(store); } - public void testCleanUpWithLegacyChecksums() throws IOException { - Map metaDataMap = new HashMap<>(); - metaDataMap.put("segments_1", new StoreFileMetaData("segments_1", 50, "foobar", Version.LUCENE_4_8_0, new BytesRef(new byte[]{1}))); - metaDataMap.put("_0_1.del", new StoreFileMetaData("_0_1.del", 42, "foobarbaz", Version.LUCENE_4_8_0, new BytesRef())); - Store.MetadataSnapshot snapshot = new Store.MetadataSnapshot(unmodifiableMap(metaDataMap), emptyMap(), 0); - - final ShardId shardId = new ShardId("index", "_na_", 1); - DirectoryService directoryService = new LuceneManagedDirectoryService(random()); - Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); - for (String file : metaDataMap.keySet()) { - try (IndexOutput output = store.directory().createOutput(file, IOContext.DEFAULT)) { - BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); - output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); - CodecUtil.writeFooter(output); - } - } - - store.verifyAfterCleanup(snapshot, snapshot); - deleteContent(store.directory()); - IOUtils.close(store); - } - public void testOnCloseCallback() throws IOException { final ShardId shardId = new ShardId(new Index(randomRealisticUnicodeOfCodepointLengthBetween(1, 10), "_na_"), randomIntBetween(0, 100)); DirectoryService directoryService = new LuceneManagedDirectoryService(random()); diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java index d6e248f1c94b..5a4aa2e6b24c 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java @@ -84,7 +84,7 @@ public class IndicesQueryCacheTests extends ESTestCase { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()); w.addDocument(new Document()); - DirectoryReader r = DirectoryReader.open(w, false); + DirectoryReader r = DirectoryReader.open(w); w.close(); ShardId shard = new ShardId("index", "_na_", 0); r = ElasticsearchDirectoryReader.wrap(r, shard); @@ -154,7 +154,7 @@ public class IndicesQueryCacheTests extends ESTestCase { Directory dir1 = newDirectory(); IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig()); w1.addDocument(new Document()); - DirectoryReader r1 = DirectoryReader.open(w1, false); + DirectoryReader r1 = DirectoryReader.open(w1); w1.close(); ShardId shard1 = new ShardId("index", "_na_", 0); r1 = ElasticsearchDirectoryReader.wrap(r1, shard1); @@ -164,7 +164,7 @@ public class IndicesQueryCacheTests extends ESTestCase { Directory dir2 = newDirectory(); IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig()); w2.addDocument(new Document()); - DirectoryReader r2 = DirectoryReader.open(w2, false); + DirectoryReader r2 = DirectoryReader.open(w2); w2.close(); ShardId shard2 = new ShardId("index", "_na_", 1); r2 = ElasticsearchDirectoryReader.wrap(r2, shard2); @@ -279,7 +279,7 @@ public class IndicesQueryCacheTests extends ESTestCase { Directory dir1 = newDirectory(); IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig()); w1.addDocument(new Document()); - DirectoryReader r1 = DirectoryReader.open(w1, false); + DirectoryReader r1 = DirectoryReader.open(w1); w1.close(); ShardId shard1 = new ShardId("index", "_na_", 0); r1 = ElasticsearchDirectoryReader.wrap(r1, shard1); @@ -289,7 +289,7 @@ public class IndicesQueryCacheTests extends ESTestCase { Directory dir2 = newDirectory(); IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig()); w2.addDocument(new Document()); - DirectoryReader r2 = DirectoryReader.open(w2, false); + DirectoryReader r2 = DirectoryReader.open(w2); w2.close(); ShardId shard2 = new ShardId("index", "_na_", 1); r2 = ElasticsearchDirectoryReader.wrap(r2, shard2); diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java index bd48a388f34c..e36f1bca49ba 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java @@ -54,7 +54,7 @@ public class IndicesRequestCacheTests extends ESTestCase { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); writer.addDocument(newDoc(0, "foo")); - DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); AtomicBoolean indexShard = new AtomicBoolean(true); @@ -107,7 +107,7 @@ public class IndicesRequestCacheTests extends ESTestCase { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); writer.addDocument(newDoc(0, "foo")); - DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0); @@ -144,12 +144,12 @@ public class IndicesRequestCacheTests extends ESTestCase { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); writer.addDocument(newDoc(0, "foo")); - DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "bar", 1)); + DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); - DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "bar", 1)); + DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0); // initial cache @@ -237,13 +237,13 @@ public class IndicesRequestCacheTests extends ESTestCase { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); writer.addDocument(newDoc(0, "foo")); - DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); - DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0); @@ -263,18 +263,18 @@ public class IndicesRequestCacheTests extends ESTestCase { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); writer.addDocument(newDoc(0, "foo")); - DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); - DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "baz")); - DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TestEntity thirddEntity = new TestEntity(requestCacheStats, thirdReader, indexShard, 0); @@ -299,18 +299,18 @@ public class IndicesRequestCacheTests extends ESTestCase { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); writer.addDocument(newDoc(0, "foo")); - DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); - DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "baz")); - DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); AtomicBoolean differentIdentity = new AtomicBoolean(true); TestEntity thirddEntity = new TestEntity(requestCacheStats, thirdReader, differentIdentity, 0); diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index e29ad3e081aa..467aa4d3309a 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.indices.stats; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; -import org.apache.lucene.util.Version; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; @@ -542,7 +541,6 @@ public class IndexStatsIT extends ESIntegTestCase { assertThat(stats.getTotal().getSegments(), notNullValue()); assertThat(stats.getTotal().getSegments().getCount(), equalTo((long) test1.totalNumShards)); - assumeTrue("test doesn't work with 4.6.0", org.elasticsearch.Version.CURRENT.luceneVersion != Version.LUCENE_4_6_0); assertThat(stats.getTotal().getSegments().getMemoryInBytes(), greaterThan(0L)); } diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorQueryTests.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorQueryTests.java index 170b0be30dfc..8bef91385670 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorQueryTests.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorQueryTests.java @@ -147,7 +147,9 @@ public class PercolatorQueryTests extends ESTestCase { indexWriter.close(); directoryReader = DirectoryReader.open(directory); - IndexSearcher shardSearcher = newSearcher(directoryReader); + // don't use newSearcher, which randomizes similarity. if it gets classic sim, the test eats it, + // as the score becomes 1 due to querynorm. + IndexSearcher shardSearcher = new IndexSearcher(directoryReader); MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer()); diff --git a/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java b/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java index 60810ee4df66..7587866b1446 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java @@ -21,7 +21,7 @@ package org.elasticsearch.search.fetch.innerhits; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; -import org.apache.lucene.document.IntField; +import org.apache.lucene.document.LegacyIntField; import org.apache.lucene.document.StringField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; @@ -66,7 +66,7 @@ public class NestedChildrenFilterTests extends ESTestCase { Document parenDoc = new Document(); parenDoc.add(new StringField("type", "parent", Field.Store.NO)); - parenDoc.add(new IntField("num_child_docs", numChildDocs, Field.Store.YES)); + parenDoc.add(new LegacyIntField("num_child_docs", numChildDocs, Field.Store.YES)); docs.add(parenDoc); writer.addDocuments(docs); } diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index e96b4d69b00f..175adc27892a 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -31,8 +31,10 @@ import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; +import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.SearchHits; @@ -50,6 +52,7 @@ import java.util.Locale; import static org.elasticsearch.client.Requests.indexRequest; import static org.elasticsearch.client.Requests.searchRequest; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.query.QueryBuilders.boostingQuery; import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; @@ -74,6 +77,8 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { return pluginList(InternalSettingsPlugin.class); // uses index.version.created } + private final QueryBuilder baseQuery = constantScoreQuery(termQuery("test", "value")); + public void testDistanceScoreGeoLinGaussExp() throws Exception { assertAcked(prepareCreate("test").addMapping( "type1", @@ -117,7 +122,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ActionFuture response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( - searchSource().query(constantScoreQuery(termQuery("test", "value"))))); + searchSource().query(baseQuery))); SearchResponse sr = response.actionGet(); SearchHits sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2))); @@ -125,7 +130,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), gaussDecayFunction("loc", lonlat, "1000km"))))); + functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km"))))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2))); @@ -136,7 +141,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( - searchSource().query(constantScoreQuery(termQuery("test", "value"))))); + searchSource().query(baseQuery))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2))); @@ -144,7 +149,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), linearDecayFunction("loc", lonlat, "1000km"))))); + functionScoreQuery(baseQuery, linearDecayFunction("loc", lonlat, "1000km"))))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2))); @@ -155,7 +160,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( - searchSource().query(constantScoreQuery(termQuery("test", "value"))))); + searchSource().query(baseQuery))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2))); @@ -163,7 +168,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), exponentialDecayFunction("loc", lonlat, "1000km"))))); + functionScoreQuery(baseQuery, exponentialDecayFunction("loc", lonlat, "1000km"))))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2))); @@ -314,30 +319,30 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { .setSource( jsonBuilder().startObject().field("test", "value").startObject("loc").field("lat", 20).field("lon", 11).endObject() .endObject()).setRefresh(true).get(); - + FunctionScoreQueryBuilder baseQuery = functionScoreQuery(constantScoreQuery(termQuery("test", "value")), ScoreFunctionBuilders.weightFactorFunction(randomIntBetween(1, 10))); GeoPoint point = new GeoPoint(20, 11); ActionFuture response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("loc", point, "1000km")).boostMode( - CombineFunction.MULTIPLY)))); + functionScoreQuery(baseQuery, gaussDecayFunction("loc", point, "1000km")).boostMode( + CombineFunction.REPLACE)))); SearchResponse sr = response.actionGet(); SearchHits sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).score(), closeTo(0.30685282, 1.e-5)); + assertThat((double) sh.getAt(0).score(), closeTo(1.0, 1.e-5)); + // this is equivalent to new GeoPoint(20, 11); just flipped so scores must be same float[] coords = { 11, 20 }; - response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("loc", coords, "1000km")).boostMode( - CombineFunction.MULTIPLY)))); + functionScoreQuery(baseQuery, gaussDecayFunction("loc", coords, "1000km")).boostMode( + CombineFunction.REPLACE)))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).score(), closeTo(0.30685282, 1.e-5)); + assertThat((double) sh.getAt(0).score(), closeTo(1.0f, 1.e-5)); } public void testCombineModes() throws Exception { @@ -348,26 +353,25 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ensureYellow(); client().prepareIndex().setType("type1").setId("1").setIndex("test") - .setSource(jsonBuilder().startObject().field("test", "value").field("num", 1.0).endObject()).setRefresh(true).get(); - - // function score should return 0.5 for this function - + .setSource(jsonBuilder().startObject().field("test", "value value").field("num", 1.0).endObject()).setRefresh(true).get(); + FunctionScoreQueryBuilder baseQuery = functionScoreQuery(constantScoreQuery(termQuery("test", "value")), ScoreFunctionBuilders.weightFactorFunction(2)); + // decay score should return 0.5 for this function and baseQuery should return 2.0f as it's score ActionFuture response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost( - 2.0f).boostMode(CombineFunction.MULTIPLY)))); + functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)) + .boostMode(CombineFunction.MULTIPLY)))); SearchResponse sr = response.actionGet(); SearchHits sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).score(), closeTo(0.153426408, 1.e-5)); + assertThat((double) sh.getAt(0).score(), closeTo(1.0, 1.e-5)); response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost( - 2.0f).boostMode(CombineFunction.REPLACE)))); + functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)) + .boostMode(CombineFunction.REPLACE)))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); @@ -377,48 +381,48 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost( - 2.0f).boostMode(CombineFunction.SUM)))); + functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)) + .boostMode(CombineFunction.SUM)))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).score(), closeTo(0.30685282 + 0.5, 1.e-5)); + assertThat((double) sh.getAt(0).score(), closeTo(2.0 + 0.5, 1.e-5)); logger.info("--> Hit[0] {} Explanation:\n {}", sr.getHits().getAt(0).id(), sr.getHits().getAt(0).explanation()); response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost( - 2.0f).boostMode(CombineFunction.AVG)))); + functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)) + .boostMode(CombineFunction.AVG)))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).score(), closeTo((0.30685282 + 0.5) / 2, 1.e-5)); + assertThat((double) sh.getAt(0).score(), closeTo((2.0 + 0.5) / 2, 1.e-5)); response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost( - 2.0f).boostMode(CombineFunction.MIN)))); - sr = response.actionGet(); - sh = sr.getHits(); - assertThat(sh.getTotalHits(), equalTo((long) (1))); - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).score(), closeTo(0.30685282, 1.e-5)); - - response = client().search( - searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( - searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost( - 2.0f).boostMode(CombineFunction.MAX)))); + functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)) + .boostMode(CombineFunction.MIN)))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).score(), closeTo(0.5, 1.e-5)); + response = client().search( + searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( + searchSource().query( + functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)) + .boostMode(CombineFunction.MAX)))); + sr = response.actionGet(); + sh = sr.getHits(); + assertThat(sh.getTotalHits(), equalTo((long) (1))); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat((double) sh.getAt(0).score(), closeTo(2.0, 1.e-5)); + } public void testExceptionThrownIfScaleLE0() throws Exception { @@ -509,7 +513,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ActionFuture response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ + functionScoreQuery(baseQuery, new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ new FunctionScoreQueryBuilder.FilterFunctionBuilder(linearDecayFunction("num1", "2013-05-28", "+3d")), new FunctionScoreQueryBuilder.FilterFunctionBuilder(linearDecayFunction("num2", "0.0", "1")) }).scoreMode(FiltersFunctionScoreQuery.ScoreMode.MULTIPLY)))); @@ -733,7 +737,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ActionFuture response = client().search( searchRequest().source( - searchSource().query(constantScoreQuery(termQuery("test", "value"))))); + searchSource().query(baseQuery))); SearchResponse sr = response.actionGet(); assertSearchHits(sr, "1", "2"); SearchHits sh = sr.getHits(); @@ -745,7 +749,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MIN))))); + functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MIN))))); sr = response.actionGet(); assertSearchHits(sr, "1", "2"); sh = sr.getHits(); @@ -755,7 +759,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MAX))))); + functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MAX))))); sr = response.actionGet(); assertSearchHits(sr, "1", "2"); sh = sr.getHits(); @@ -784,7 +788,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.SUM))))); + functionScoreQuery(baseQuery, linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.SUM))))); sr = response.actionGet(); assertSearchHits(sr, "1", "2"); sh = sr.getHits(); @@ -795,7 +799,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.AVG))))); + functionScoreQuery(baseQuery, linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.AVG))))); sr = response.actionGet(); assertSearchHits(sr, "1", "2"); sh = sr.getHits(); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java index 8a060af2ab03..8f04bd727565 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java @@ -19,10 +19,10 @@ package org.elasticsearch.search.geo; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.exception.InvalidShapeException; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.exception.InvalidShapeException; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java index 7afbeaa9abfd..e41e3c178c58 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.search.geo; -import com.spatial4j.core.shape.Rectangle; +import org.locationtech.spatial4j.shape.Rectangle; import com.vividsolutions.jts.geom.Coordinate; import org.elasticsearch.action.get.GetResponse; diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java index 9f898a47c06c..f34d5b33c9dc 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java @@ -268,78 +268,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { equalTo("Test: http://www.facebook.com http://elasticsearch.org http://xing.com http://cnn.com http://quora.com")); } - public void testNgramHighlightingPreLucene42() throws IOException { - assertAcked(prepareCreate("test") - .addMapping("test", - "name", "type=text,analyzer=name_index_analyzer,search_analyzer=name_search_analyzer," + randomStoreField() + "term_vector=with_positions_offsets", - "name2", "type=text,analyzer=name2_index_analyzer,search_analyzer=name_search_analyzer," + randomStoreField() + "term_vector=with_positions_offsets") - .setSettings(settingsBuilder() - .put(indexSettings()) - .put("analysis.filter.my_ngram.max_gram", 20) - .put("analysis.filter.my_ngram.version", "4.1") - .put("analysis.filter.my_ngram.min_gram", 1) - .put("analysis.filter.my_ngram.type", "ngram") - .put("analysis.tokenizer.my_ngramt.max_gram", 20) - .put("analysis.tokenizer.my_ngramt.version", "4.1") - .put("analysis.tokenizer.my_ngramt.min_gram", 1) - .put("analysis.tokenizer.my_ngramt.type", "ngram") - .put("analysis.analyzer.name_index_analyzer.tokenizer", "my_ngramt") - .put("analysis.analyzer.name2_index_analyzer.tokenizer", "whitespace") - .putArray("analysis.analyzer.name2_index_analyzer.filter", "lowercase", "my_ngram") - .put("analysis.analyzer.name_search_analyzer.tokenizer", "whitespace") - .put("analysis.analyzer.name_search_analyzer.filter", "lowercase"))); - ensureYellow(); - client().prepareIndex("test", "test", "1") - .setSource("name", "logicacmg ehemals avinci - the know how company", - "name2", "logicacmg ehemals avinci - the know how company").get(); - client().prepareIndex("test", "test", "2") - .setSource("name", "avinci, unilog avinci, logicacmg, logica", - "name2", "avinci, unilog avinci, logicacmg, logica").get(); - refresh(); - - SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name", "logica m"))) - .highlighter(new HighlightBuilder().field("name")).get(); - assertHighlight(search, 0, "name", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - assertHighlight(search, 1, "name", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - - search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name", "logica ma"))) - .highlighter(new HighlightBuilder().field("name")).get(); - assertHighlight(search, 0, "name", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - assertHighlight(search, 1, "name", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - - search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name", "logica"))) - .highlighter(new HighlightBuilder().field("name")).get(); - assertHighlight(search, 0, "name", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - assertHighlight(search, 0, "name", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - - search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name2", "logica m"))) - .highlighter(new HighlightBuilder().field("name2")).get(); - assertHighlight(search, 0, "name2", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - assertHighlight(search, 1, "name2", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - - search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name2", "logica ma"))) - .highlighter(new HighlightBuilder().field("name2")).get(); - assertHighlight(search, 0, "name2", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - assertHighlight(search, 1, "name2", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - - search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name2", "logica"))) - .highlighter(new HighlightBuilder().field("name2")).get(); - assertHighlight(search, 0, "name2", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - assertHighlight(search, 1, "name2", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - } - public void testNgramHighlighting() throws IOException { assertAcked(prepareCreate("test") .addMapping("test", diff --git a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index f65b17288aeb..e0bc26c9296c 100644 --- a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -180,7 +180,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { // the doc id is the tie-breaker } assertThat(topNIds, empty()); - assertThat(searchResponse.getHits().hits()[0].getScore(), equalTo(searchResponse.getHits().hits()[1].getScore())); + assertThat(searchResponse.getHits().hits()[0].getScore(), greaterThan(searchResponse.getHits().hits()[1].getScore())); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 079363719f15..44b8636d51a5 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -375,9 +375,9 @@ public class SearchQueryIT extends ESIntegTestCase { // try the same with multi match query searchResponse = client().prepareSearch().setQuery(multiMatchQuery("the quick brown", "field1", "field2").cutoffFrequency(3).operator(Operator.AND)).get(); assertHitCount(searchResponse, 3L); - assertFirstHit(searchResponse, hasId("3")); // better score due to different query stats - assertSecondHit(searchResponse, hasId("1")); - assertThirdHit(searchResponse, hasId("2")); + assertFirstHit(searchResponse, hasId("1")); + assertSecondHit(searchResponse, hasId("2")); + assertThirdHit(searchResponse, hasId("3")); } public void testCommonTermsQueryStackedTokens() throws Exception { @@ -467,9 +467,9 @@ public class SearchQueryIT extends ESIntegTestCase { // try the same with multi match query searchResponse = client().prepareSearch().setQuery(multiMatchQuery("the fast brown", "field1", "field2").cutoffFrequency(3).operator(Operator.AND)).get(); assertHitCount(searchResponse, 3L); - assertFirstHit(searchResponse, hasId("3")); // better score due to different query stats - assertSecondHit(searchResponse, hasId("1")); - assertThirdHit(searchResponse, hasId("2")); + assertFirstHit(searchResponse, hasId("1")); + assertSecondHit(searchResponse, hasId("2")); + assertThirdHit(searchResponse, hasId("3")); } public void testQueryStringAnalyzedWildcard() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java index 812928dee284..2143c7be9e0e 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java @@ -96,7 +96,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase { writer.addDocument(doc); } - DirectoryReader ir = DirectoryReader.open(writer, false); + DirectoryReader ir = DirectoryReader.open(writer); WordScorer wordScorer = new LaplaceScorer(ir, MultiFields.getTerms(ir, "body_ngram"), "body_ngram", 0.95d, new BytesRef(" "), 0.5f); NoisyChannelSpellChecker suggester = new NoisyChannelSpellChecker(); @@ -238,7 +238,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase { writer.addDocument(doc); } - DirectoryReader ir = DirectoryReader.open(writer, false); + DirectoryReader ir = DirectoryReader.open(writer); LaplaceScorer wordScorer = new LaplaceScorer(ir, MultiFields.getTerms(ir, "body_ngram"), "body_ngram", 0.95d, new BytesRef(" "), 0.5f); NoisyChannelSpellChecker suggester = new NoisyChannelSpellChecker(); DirectSpellChecker spellchecker = new DirectSpellChecker(); @@ -321,7 +321,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase { writer.addDocument(doc); } - DirectoryReader ir = DirectoryReader.open(writer, false); + DirectoryReader ir = DirectoryReader.open(writer); WordScorer wordScorer = new LinearInterpoatingScorer(ir, MultiFields.getTerms(ir, "body_ngram"), "body_ngram", 0.85d, new BytesRef(" "), 0.5, 0.4, 0.1); NoisyChannelSpellChecker suggester = new NoisyChannelSpellChecker(); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java index e4a8ae72b911..51152733bf88 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java @@ -132,7 +132,7 @@ public abstract class SmoothingModelTestCase extends ESTestCase { Document doc = new Document(); doc.add(new Field("field", "someText", TextField.TYPE_NOT_STORED)); writer.addDocument(doc); - DirectoryReader ir = DirectoryReader.open(writer, false); + DirectoryReader ir = DirectoryReader.open(writer); WordScorer wordScorer = testModel.buildWordScorerFactory().newScorer(ir, MultiFields.getTerms(ir , "field"), "field", 0.9d, BytesRefs.toBytesRef(" ")); assertWordScorer(wordScorer, testModel); diff --git a/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java b/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java index 4cc7f8f8487d..95984da55f69 100644 --- a/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java +++ b/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java @@ -20,12 +20,12 @@ package org.elasticsearch.test.geo; import com.carrotsearch.randomizedtesting.generators.RandomInts; -import com.spatial4j.core.context.jts.JtsSpatialContext; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.exception.InvalidShapeException; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.impl.Range; +import org.locationtech.spatial4j.context.jts.JtsSpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.exception.InvalidShapeException; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.impl.Range; import com.vividsolutions.jts.algorithm.ConvexHull; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; @@ -45,7 +45,7 @@ import org.junit.Assert; import java.util.Random; -import static com.spatial4j.core.shape.SpatialRelation.CONTAINS; +import static org.locationtech.spatial4j.shape.SpatialRelation.CONTAINS; /** * Random geoshape generation utilities for randomized {@code geo_shape} type testing diff --git a/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java b/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java index 3400f9637ffa..5fff4a61f86d 100644 --- a/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java +++ b/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java @@ -19,12 +19,12 @@ package org.elasticsearch.test.hamcrest; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.ShapeCollection; -import com.spatial4j.core.shape.impl.GeoCircle; -import com.spatial4j.core.shape.impl.RectangleImpl; -import com.spatial4j.core.shape.jts.JtsGeometry; -import com.spatial4j.core.shape.jts.JtsPoint; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.ShapeCollection; +import org.locationtech.spatial4j.shape.impl.GeoCircle; +import org.locationtech.spatial4j.shape.impl.RectangleImpl; +import org.locationtech.spatial4j.shape.jts.JtsGeometry; +import org.locationtech.spatial4j.shape.jts.JtsPoint; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.LineString; diff --git a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java index 5e9bc80b9a9f..5fc24094bd36 100644 --- a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java +++ b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java @@ -243,9 +243,9 @@ public class SimpleValidateQueryIT extends ESIntegTestCase { // fuzzy queries assertExplanation(QueryBuilders.fuzzyQuery("field", "the").fuzziness(Fuzziness.fromEdits(2)), - containsString("field:the field:tree^0.3333333"), true); + containsString("field:the (field:tree)^0.3333333"), true); assertExplanation(QueryBuilders.fuzzyQuery("field", "jump"), - containsString("field:jumps^0.75"), true); + containsString("(field:jumps)^0.75"), true); // more like this queries assertExplanation(QueryBuilders.moreLikeThisQuery(new String[] { "field" }, null, MoreLikeThisQueryBuilder.ids("1")) diff --git a/core/src/test/resources/org/elasticsearch/index/analysis/keep_analysis.json b/core/src/test/resources/org/elasticsearch/index/analysis/keep_analysis.json index 233d6f3e3d7e..0ed95e16332a 100644 --- a/core/src/test/resources/org/elasticsearch/index/analysis/keep_analysis.json +++ b/core/src/test/resources/org/elasticsearch/index/analysis/keep_analysis.json @@ -9,9 +9,7 @@ }, "my_case_sensitive_keep_filter":{ "type":"keep", - "keep_words" : ["Hello", "worlD"], - "enable_position_increments" : false, - "version" : "4.2" + "keep_words" : ["Hello", "worlD"] } } } diff --git a/distribution/licenses/lucene-analyzers-common-5.5.0.jar.sha1 b/distribution/licenses/lucene-analyzers-common-5.5.0.jar.sha1 deleted file mode 100644 index dcdeb2cb4770..000000000000 --- a/distribution/licenses/lucene-analyzers-common-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1e0e8243a4410be20c34683034fafa7bb52e55cc \ No newline at end of file diff --git a/distribution/licenses/lucene-analyzers-common-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-analyzers-common-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..74d21bae9460 --- /dev/null +++ b/distribution/licenses/lucene-analyzers-common-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +3510af19947deadd929123aaf14d69b4bdec759a \ No newline at end of file diff --git a/distribution/licenses/lucene-backward-codecs-5.5.0.jar.sha1 b/distribution/licenses/lucene-backward-codecs-5.5.0.jar.sha1 deleted file mode 100644 index dd5c846363ae..000000000000 --- a/distribution/licenses/lucene-backward-codecs-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -68480974b2f54f519763632a7c1c5d51cbff3805 \ No newline at end of file diff --git a/distribution/licenses/lucene-backward-codecs-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-backward-codecs-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..ee6143bec147 --- /dev/null +++ b/distribution/licenses/lucene-backward-codecs-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +247ad7c17cb7c742d7a9abd5d9980e4fab815178 \ No newline at end of file diff --git a/distribution/licenses/lucene-core-5.5.0.jar.sha1 b/distribution/licenses/lucene-core-5.5.0.jar.sha1 deleted file mode 100644 index 70bd0b63bba9..000000000000 --- a/distribution/licenses/lucene-core-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a74fd869bb5ad7fe6b4cd29df9543a34aea81164 \ No newline at end of file diff --git a/distribution/licenses/lucene-core-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-core-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..2d39f84d21ec --- /dev/null +++ b/distribution/licenses/lucene-core-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +c0712dbec58abad545646edab67d58f7373f5329 \ No newline at end of file diff --git a/distribution/licenses/lucene-grouping-5.5.0.jar.sha1 b/distribution/licenses/lucene-grouping-5.5.0.jar.sha1 deleted file mode 100644 index f905a2081b6f..000000000000 --- a/distribution/licenses/lucene-grouping-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -437cacec0cfa349b1dee049a7c0e32df3b8ecc07 \ No newline at end of file diff --git a/distribution/licenses/lucene-grouping-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-grouping-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..a3ce82c8a04f --- /dev/null +++ b/distribution/licenses/lucene-grouping-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +7573e3efb12dd16fdc991edaf408877dab20c030 \ No newline at end of file diff --git a/distribution/licenses/lucene-highlighter-5.5.0.jar.sha1 b/distribution/licenses/lucene-highlighter-5.5.0.jar.sha1 deleted file mode 100644 index 6ea3c5a0c13f..000000000000 --- a/distribution/licenses/lucene-highlighter-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ecdd913cb7c61a5435591f0a7268b01ab3fc782a \ No newline at end of file diff --git a/distribution/licenses/lucene-highlighter-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-highlighter-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..9259a2c66c18 --- /dev/null +++ b/distribution/licenses/lucene-highlighter-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +96ef0a9a43a5fc99d27bb7e7d61517ee4c7e54a4 \ No newline at end of file diff --git a/distribution/licenses/lucene-join-5.5.0.jar.sha1 b/distribution/licenses/lucene-join-5.5.0.jar.sha1 deleted file mode 100644 index 3cc19b170edf..000000000000 --- a/distribution/licenses/lucene-join-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -af4f55e36e3a7d1f4e9ed9efdccf7e22b767d6e8 \ No newline at end of file diff --git a/distribution/licenses/lucene-join-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-join-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..4959f5f163c7 --- /dev/null +++ b/distribution/licenses/lucene-join-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +d93de34947d37e31a337cdfed400333588c378d8 \ No newline at end of file diff --git a/distribution/licenses/lucene-memory-5.5.0.jar.sha1 b/distribution/licenses/lucene-memory-5.5.0.jar.sha1 deleted file mode 100644 index 1f4ebc783ee2..000000000000 --- a/distribution/licenses/lucene-memory-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -09a327fe9f20fc7e3912ed213bdd5cb4b6d2a65a \ No newline at end of file diff --git a/distribution/licenses/lucene-memory-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-memory-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..5218d0a019eb --- /dev/null +++ b/distribution/licenses/lucene-memory-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +9c292930b1828e68f06509944a5346c141d56fd4 \ No newline at end of file diff --git a/distribution/licenses/lucene-misc-5.5.0.jar.sha1 b/distribution/licenses/lucene-misc-5.5.0.jar.sha1 deleted file mode 100644 index 76131ae81c57..000000000000 --- a/distribution/licenses/lucene-misc-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -504d855a1a38190622fdf990b2298c067e7d60ca \ No newline at end of file diff --git a/distribution/licenses/lucene-misc-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-misc-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..947722edfd33 --- /dev/null +++ b/distribution/licenses/lucene-misc-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +866ed93f48683e877ffa4d9baa1323dcffbc65d7 \ No newline at end of file diff --git a/distribution/licenses/lucene-queries-5.5.0.jar.sha1 b/distribution/licenses/lucene-queries-5.5.0.jar.sha1 deleted file mode 100644 index 5790b2e47769..000000000000 --- a/distribution/licenses/lucene-queries-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -60ca161c1dd5f127907423b6f039b846fb713de0 \ No newline at end of file diff --git a/distribution/licenses/lucene-queries-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-queries-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..6caf86a6b968 --- /dev/null +++ b/distribution/licenses/lucene-queries-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +967d9c2647bdd4d88961747f7436a5a92aa0385b \ No newline at end of file diff --git a/distribution/licenses/lucene-queryparser-5.5.0.jar.sha1 b/distribution/licenses/lucene-queryparser-5.5.0.jar.sha1 deleted file mode 100644 index 8e4a1e66138e..000000000000 --- a/distribution/licenses/lucene-queryparser-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0fddc49725b562fd48dff0cff004336ad2a090a4 \ No newline at end of file diff --git a/distribution/licenses/lucene-queryparser-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-queryparser-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..b3e92d3f168b --- /dev/null +++ b/distribution/licenses/lucene-queryparser-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +981030d83a7504267f3141d7365fad9b46d51465 \ No newline at end of file diff --git a/distribution/licenses/lucene-sandbox-5.5.0.jar.sha1 b/distribution/licenses/lucene-sandbox-5.5.0.jar.sha1 deleted file mode 100644 index 20c2a1c95278..000000000000 --- a/distribution/licenses/lucene-sandbox-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b7da8e187acd6e4d7781ba41fac8b9082dd27409 \ No newline at end of file diff --git a/distribution/licenses/lucene-sandbox-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-sandbox-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..7b5176c4963b --- /dev/null +++ b/distribution/licenses/lucene-sandbox-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +707691b1baf22c29020569f5b875d200a4955411 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-5.5.0.jar.sha1 b/distribution/licenses/lucene-spatial-5.5.0.jar.sha1 deleted file mode 100644 index dd645be87e36..000000000000 --- a/distribution/licenses/lucene-spatial-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c14965bf67179bee93cc8efc58d09a75d230c891 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-spatial-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..9df2a16b886f --- /dev/null +++ b/distribution/licenses/lucene-spatial-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +be9e78130a069983f611f484d5b7b87bda0d6370 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-extras-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-spatial-extras-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..6badc36d3619 --- /dev/null +++ b/distribution/licenses/lucene-spatial-extras-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +edeef6ce8a58d5e6a074bebf545918d04e8579e1 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial3d-5.5.0.jar.sha1 b/distribution/licenses/lucene-spatial3d-5.5.0.jar.sha1 deleted file mode 100644 index c0b9d4ba8388..000000000000 --- a/distribution/licenses/lucene-spatial3d-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3e5ab4ea3e2052166100482f7a56b75bfa4ab0ad \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial3d-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-spatial3d-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..480ae590aedc --- /dev/null +++ b/distribution/licenses/lucene-spatial3d-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +d86a7ba859576bdcee1dacd8f407ccf71f982c60 \ No newline at end of file diff --git a/distribution/licenses/lucene-suggest-5.5.0.jar.sha1 b/distribution/licenses/lucene-suggest-5.5.0.jar.sha1 deleted file mode 100644 index adce0756ecfb..000000000000 --- a/distribution/licenses/lucene-suggest-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -51f9d52332f556976a5099817e35d37c69a24597 \ No newline at end of file diff --git a/distribution/licenses/lucene-suggest-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-suggest-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..7835298c4a28 --- /dev/null +++ b/distribution/licenses/lucene-suggest-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +a3860de6502576f142dc948eb2005fa4dc0c27c5 \ No newline at end of file diff --git a/distribution/licenses/spatial4j-0.5.jar.sha1 b/distribution/licenses/spatial4j-0.5.jar.sha1 deleted file mode 100644 index 4bcf7a33b152..000000000000 --- a/distribution/licenses/spatial4j-0.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6e16edaf6b1ba76db7f08c2f3723fce3b358ecc3 \ No newline at end of file diff --git a/distribution/licenses/spatial4j-0.6.jar.sha1 b/distribution/licenses/spatial4j-0.6.jar.sha1 new file mode 100644 index 000000000000..740a25b1c901 --- /dev/null +++ b/distribution/licenses/spatial4j-0.6.jar.sha1 @@ -0,0 +1 @@ +21b15310bddcfd8c72611c180f20cf23279809a3 \ No newline at end of file diff --git a/docs/java-api/query-dsl/geo-shape-query.asciidoc b/docs/java-api/query-dsl/geo-shape-query.asciidoc index c753cd72c1a3..e08410acbdb0 100644 --- a/docs/java-api/query-dsl/geo-shape-query.asciidoc +++ b/docs/java-api/query-dsl/geo-shape-query.asciidoc @@ -10,9 +10,9 @@ to your classpath in order to use this type: [source,xml] ----------------------------------------------- - com.spatial4j + org.locationtech.spatial4j spatial4j - 0.4.1 <1> + 0.6 <1> @@ -27,7 +27,7 @@ to your classpath in order to use this type: ----------------------------------------------- -<1> check for updates in http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22com.spatial4j%22%20AND%20a%3A%22spatial4j%22[Maven Central] +<1> check for updates in http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22org.locationtech.spatial4j%22%20AND%20a%3A%22spatial4j%22[Maven Central] <2> check for updates in http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22com.vividsolutions%22%20AND%20a%3A%22jts%22[Maven Central] [source,java] diff --git a/modules/lang-expression/licenses/lucene-expressions-5.5.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-5.5.0.jar.sha1 deleted file mode 100644 index 15c992bf4608..000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4766406a2933ac9df62c49d6619caabb9943aba2 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-6.0.0-snapshot-bea235f.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..d9a29f17c503 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +8d11bf581b0afc25f87a57c06834cd85930d2ffa \ No newline at end of file diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java index fffeabcb8077..4689d5fba03e 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java @@ -169,14 +169,21 @@ public class FunctionScoreTests extends ESIntegTestCase { } } + /** make sure min_score works if functions is empty, see https://github.com/elastic/elasticsearch/issues/10253 */ public void testWithEmptyFunctions() throws IOException, ExecutionException, InterruptedException { assertAcked(prepareCreate("test")); ensureYellow(); index("test", "testtype", "1", jsonBuilder().startObject().field("text", "test text").endObject()); refresh(); - // make sure that min_score works if functions is empty, see https://github.com/elastic/elasticsearch/issues/10253 - float termQueryScore = 0.19178301f; + SearchResponse termQuery = client().search( + searchRequest().source( + searchSource().explain(true).query( + termQuery("text", "text")))).get(); + assertSearchResponse(termQuery); + assertThat(termQuery.getHits().totalHits(), equalTo(1L)); + float termQueryScore = termQuery.getHits().getAt(0).getScore(); + for (CombineFunction combineFunction : CombineFunction.values()) { testMinScoreApplied(combineFunction, termQueryScore); } diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0.jar.sha1 deleted file mode 100644 index 18440dcdc04a..000000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -69a6e72d322b6643f1b419e6c9cc46623a2404e9 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..538d2ad8216a --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +38fda9b86e4f68eb6c9d31fb636a2540da219927 \ No newline at end of file diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IndexableBinaryStringToolsTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IndexableBinaryStringToolsTests.java index 24890fed5a93..5f3e1644481e 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IndexableBinaryStringToolsTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IndexableBinaryStringToolsTests.java @@ -23,7 +23,6 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.TimeUnits; import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter; import org.junit.BeforeClass; @@ -110,14 +109,14 @@ public class IndexableBinaryStringToolsTests extends LuceneTestCase { int encodedLen1 = IndexableBinaryStringTools.getEncodedLength( originalArray1, 0, numBytes1); if (encodedLen1 > encoded1.length) - encoded1 = new char[ArrayUtil.oversize(encodedLen1, RamUsageEstimator.NUM_BYTES_CHAR)]; + encoded1 = new char[ArrayUtil.oversize(encodedLen1, Character.BYTES)]; IndexableBinaryStringTools.encode(originalArray1, 0, numBytes1, encoded1, 0, encodedLen1); int encodedLen2 = IndexableBinaryStringTools.getEncodedLength(original2, 0, numBytes2); if (encodedLen2 > encoded2.length) - encoded2 = new char[ArrayUtil.oversize(encodedLen2, RamUsageEstimator.NUM_BYTES_CHAR)]; + encoded2 = new char[ArrayUtil.oversize(encodedLen2, Character.BYTES)]; IndexableBinaryStringTools.encode(original2, 0, numBytes2, encoded2, 0, encodedLen2); @@ -196,7 +195,7 @@ public class IndexableBinaryStringToolsTests extends LuceneTestCase { int encodedLen = IndexableBinaryStringTools.getEncodedLength(binary, 0, numBytes); if (encoded.length < encodedLen) - encoded = new char[ArrayUtil.oversize(encodedLen, RamUsageEstimator.NUM_BYTES_CHAR)]; + encoded = new char[ArrayUtil.oversize(encodedLen, Character.BYTES)]; IndexableBinaryStringTools.encode(binary, 0, numBytes, encoded, 0, encodedLen); diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0.jar.sha1 deleted file mode 100644 index 832db46564ed..000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e9d68dd5d9fae3349b81de5952d0ee8115c696a4 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..b90115da4abd --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +352fea7a169ada6a7ae18e4ec34559496e09b465 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0.jar.sha1 deleted file mode 100644 index 3436526863d8..000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c4735c43440ebcb20f2b6f49f508fedc12f5366c \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..7cbe648e0bd8 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +445f5ea7822d0dd6b91364ec119cd6cb4635d285 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0.jar.sha1 deleted file mode 100644 index 95b85f7edbd6..000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a31a4d1476d45738a460374d9801dc5ed9b49c1a \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..03c96786de2a --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +0b216b7b9ff583bc1382edc8adfee4d4acd02859 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0.jar.sha1 deleted file mode 100644 index d5a28231e655..000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1a7505d011aca54c004d0fc86a490d5f054bb903 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 000000000000..f27a98f63bac --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +8d161a8c7e5b5b82f64dc5df2ca46197a3716672 \ No newline at end of file diff --git a/plugins/mapper-size/build.gradle b/plugins/mapper-size/build.gradle index 7af65d19ef35..7d5aa1ee2760 100644 --- a/plugins/mapper-size/build.gradle +++ b/plugins/mapper-size/build.gradle @@ -22,3 +22,6 @@ esplugin { classname 'org.elasticsearch.plugin.mapper.MapperSizePlugin' } +// TODO: migrate to points +compileJava.options.compilerArgs << "-Xlint:-deprecation" +compileTestJava.options.compilerArgs << "-Xlint:-deprecation" diff --git a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/SmbDirectoryWrapper.java b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/SmbDirectoryWrapper.java index 88b9d187dcf5..fe2c32723e2c 100644 --- a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/SmbDirectoryWrapper.java +++ b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/SmbDirectoryWrapper.java @@ -60,7 +60,7 @@ public final class SmbDirectoryWrapper extends FilterDirectory { static final int CHUNK_SIZE = 8192; public SmbFSIndexOutput(String name) throws IOException { - super("SmbFSIndexOutput(path=\"" + fsDirectory.getDirectory().resolve(name) + "\")", new FilterOutputStream(Channels.newOutputStream(Files.newByteChannel(fsDirectory.getDirectory().resolve(name), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.READ, StandardOpenOption.WRITE))) { + super("SmbFSIndexOutput(path=\"" + fsDirectory.getDirectory().resolve(name) + "\")", name, new FilterOutputStream(Channels.newOutputStream(Files.newByteChannel(fsDirectory.getDirectory().resolve(name), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.READ, StandardOpenOption.WRITE))) { // This implementation ensures, that we never write more than CHUNK_SIZE bytes: @Override public void write(byte[] b, int offset, int length) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 4a20d3c3fd60..84d887338024 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -631,27 +631,6 @@ public abstract class ESTestCase extends LuceneTestCase { assertEquals(expected.isNativeMethod(), actual.isNativeMethod()); } - /** A runnable that can throw any checked exception. */ - @FunctionalInterface - public interface ThrowingRunnable { - void run() throws Throwable; - } - - /** Checks a specific exception class is thrown by the given runnable, and returns it. */ - public static T expectThrows(Class expectedType, ThrowingRunnable runnable) { - try { - runnable.run(); - } catch (Throwable e) { - if (expectedType.isInstance(e)) { - return expectedType.cast(e); - } - AssertionFailedError assertion = new AssertionFailedError("Unexpected exception type, expected " + expectedType.getSimpleName()); - assertion.initCause(e); - throw assertion; - } - throw new AssertionFailedError("Expected exception " + expectedType.getSimpleName()); - } - protected static long spinForAtLeastOneMillisecond() { long nanosecondsInMillisecond = TimeUnit.NANOSECONDS.convert(1, TimeUnit.MILLISECONDS); // force at least one millisecond to elapse, but ensure the diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java index ef3be122cdb2..c945a308363b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java @@ -173,8 +173,7 @@ public class MockFSDirectoryService extends FsDirectoryService { w.setCheckIndexOnClose(false); // we do this on the index level w.setPreventDoubleWrite(preventDoubleWrite); // TODO: make this test robust to virus scanner - w.setEnableVirusScanner(false); - w.setNoDeleteOpenFile(noDeleteOpenFile); + w.setAssertNoDeleteOpenFile(false); w.setUseSlowOpenClosers(false); LuceneTestCase.closeAfterSuite(new CloseableDirectory(w)); return w; From 93adddc61bd55fbbdc8bb820ab63efdd58f33d26 Mon Sep 17 00:00:00 2001 From: Christopher Taylor Date: Fri, 4 Mar 2016 12:20:54 +0100 Subject: [PATCH 067/320] Document `sum` as supported scoring type the examples all use `sum` for the `"score_mode"` field, but it isn't listed in the list of supported modes. --- docs/reference/query-dsl/has-child-query.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/query-dsl/has-child-query.asciidoc b/docs/reference/query-dsl/has-child-query.asciidoc index 24951bbe9302..01c3c35db54e 100644 --- a/docs/reference/query-dsl/has-child-query.asciidoc +++ b/docs/reference/query-dsl/has-child-query.asciidoc @@ -23,7 +23,7 @@ an example: ==== Scoring capabilities The `has_child` also has scoring support. The -supported score modes are `min`, `max`, `total`, `avg` or `none`. The default is +supported score modes are `min`, `max`, `sum`, `avg` or `none`. The default is `none` and yields the same behaviour as in previous versions. If the score mode is set to another value than `none`, the scores of all the matching child documents are aggregated into the associated parent From 050bfc31b672f63ac3d76c49a7c2fc62833c9ab3 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 7 Mar 2016 11:25:52 +0100 Subject: [PATCH 068/320] Don't call IR#leaves() after global field data has been constructed This IR may already be closed and GlobalFieldData is cached and this can cause AlreadyClosedException while checking the assert. --- .../fielddata/plain/ParentChildIndexFieldData.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java index 14d0375ba576..f7b258322833 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java @@ -305,13 +305,15 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData leaves; private final AtomicParentChildFieldData[] fielddata; - private final IndexReader reader; private final long ramBytesUsed; private final Map ordinalMapPerType; GlobalFieldData(IndexReader reader, AtomicParentChildFieldData[] fielddata, long ramBytesUsed, Map ordinalMapPerType) { - this.reader = reader; + this.coreCacheKey = reader.getCoreCacheKey(); + this.leaves = reader.leaves(); this.ramBytesUsed = ramBytesUsed; this.fielddata = fielddata; this.ordinalMapPerType = ordinalMapPerType; @@ -329,7 +331,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData Date: Mon, 7 Mar 2016 11:02:16 +0100 Subject: [PATCH 069/320] Remove old and unsupported version constants All version <= 2.0 are not supported anymore. This commit removes all uses of these versions. --- .../main/java/org/elasticsearch/Version.java | 395 ------------------ .../cluster/health/ClusterHealthResponse.java | 8 +- .../analysis/PatternAnalyzerProvider.java | 8 +- .../analysis/StandardAnalyzerProvider.java | 10 +- .../StandardHtmlStripAnalyzerProvider.java | 9 +- .../analysis/StemmerTokenFilterFactory.java | 12 +- .../blobstore/BlobStoreRepository.java | 2 +- .../bucket/terms/DoubleTerms.java | 10 +- .../stats/extended/InternalExtendedStats.java | 10 +- .../java/org/elasticsearch/VersionTests.java | 44 +- .../state/ClusterStateRequestTests.java | 8 +- .../admin/indices/upgrade/UpgradeIT.java | 8 +- .../BasicAnalysisBackwardCompatibilityIT.java | 15 +- .../BasicBackwardsCompatibilityIT.java | 4 - .../OldIndexBackwardsCompatibilityIT.java | 2 +- .../bwcompat/RestoreBackwardsCompatIT.java | 13 +- .../MetaDataIndexUpgradeServiceTests.java | 2 +- .../discovery/zen/ZenDiscoveryIT.java | 14 +- .../org/elasticsearch/get/GetActionIT.java | 133 +----- .../index/analysis/AnalysisModuleTests.java | 45 +- .../AnalyzerBackwardsCompatTests.java | 69 --- .../PreBuiltAnalyzerProviderFactoryTests.java | 43 -- .../index/analysis/PreBuiltAnalyzerTests.java | 12 +- ...PreBuiltCharFilterFactoryFactoryTests.java | 45 -- ...reBuiltTokenFilterFactoryFactoryTests.java | 57 --- .../PreBuiltTokenizerFactoryFactoryTests.java | 48 --- .../StemmerTokenFilterFactoryTests.java | 22 +- .../mapper/all/SimpleAllMapperTests.java | 13 - .../SimpleExternalMappingTests.java | 6 +- .../mapper/geo/GeoPointFieldMapperTests.java | 38 +- .../geo/GeohashMappingGeoPointTests.java | 12 +- .../timestamp/TimestampMappingTests.java | 18 +- .../mapper/update/UpdateMappingTests.java | 2 +- .../query/SimpleQueryStringBuilderTests.java | 3 +- .../PreBuiltAnalyzerIntegrationIT.java | 1 - .../plugins/PluginInfoTests.java | 4 +- .../bucket/DateHistogramOffsetIT.java | 7 - .../aggregations/bucket/GeoDistanceIT.java | 2 +- .../aggregations/bucket/GeoHashGridIT.java | 2 +- .../search/innerhits/InnerHitsIT.java | 161 ------- .../test/rest/test/RestTestParserTests.java | 4 +- .../rest/test/SetupSectionParserTests.java | 6 +- .../rest/test/SkipSectionParserTests.java | 6 +- .../rest/test/TestSectionParserTests.java | 6 +- .../test/test/VersionUtilsTests.java | 24 +- 45 files changed, 122 insertions(+), 1241 deletions(-) delete mode 100644 core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerProviderFactoryTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/analysis/PreBuiltCharFilterFactoryFactoryTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenFilterFactoryFactoryTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenizerFactoryFactoryTests.java diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index 3751896d3616..b725a6464a0e 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -39,209 +39,6 @@ public class Version { // AA values below 50 are beta builds, and below 99 are RC builds, with 99 indicating a release // the (internal) format of the id is there so we can easily do after/before checks on the id - // NOTE: ancient indexes created before 5.0 use this constant for e.g. analysis chain emulation (imperfect) - // its named lucene 3 but also lucene 4 or anything else we no longer support. - public static final org.apache.lucene.util.Version LUCENE_3_EMULATION_VERSION = org.apache.lucene.util.Version.LUCENE_5_0_0; - - public static final int V_0_18_0_ID = /*00*/180099; - public static final Version V_0_18_0 = new Version(V_0_18_0_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_18_1_ID = /*00*/180199; - public static final Version V_0_18_1 = new Version(V_0_18_1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_18_2_ID = /*00*/180299; - public static final Version V_0_18_2 = new Version(V_0_18_2_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_18_3_ID = /*00*/180399; - public static final Version V_0_18_3 = new Version(V_0_18_3_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_18_4_ID = /*00*/180499; - public static final Version V_0_18_4 = new Version(V_0_18_4_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_18_5_ID = /*00*/180599; - public static final Version V_0_18_5 = new Version(V_0_18_5_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_18_6_ID = /*00*/180699; - public static final Version V_0_18_6 = new Version(V_0_18_6_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_18_7_ID = /*00*/180799; - public static final Version V_0_18_7 = new Version(V_0_18_7_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_18_8_ID = /*00*/180899; - public static final Version V_0_18_8 = new Version(V_0_18_8_ID, LUCENE_3_EMULATION_VERSION); - - public static final int V_0_19_0_RC1_ID = /*00*/190051; - public static final Version V_0_19_0_RC1 = new Version(V_0_19_0_RC1_ID, LUCENE_3_EMULATION_VERSION); - - public static final int V_0_19_0_RC2_ID = /*00*/190052; - public static final Version V_0_19_0_RC2 = new Version(V_0_19_0_RC2_ID, LUCENE_3_EMULATION_VERSION); - - public static final int V_0_19_0_RC3_ID = /*00*/190053; - public static final Version V_0_19_0_RC3 = new Version(V_0_19_0_RC3_ID, LUCENE_3_EMULATION_VERSION); - - public static final int V_0_19_0_ID = /*00*/190099; - public static final Version V_0_19_0 = new Version(V_0_19_0_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_1_ID = /*00*/190199; - public static final Version V_0_19_1 = new Version(V_0_19_1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_2_ID = /*00*/190299; - public static final Version V_0_19_2 = new Version(V_0_19_2_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_3_ID = /*00*/190399; - public static final Version V_0_19_3 = new Version(V_0_19_3_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_4_ID = /*00*/190499; - public static final Version V_0_19_4 = new Version(V_0_19_4_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_5_ID = /*00*/190599; - public static final Version V_0_19_5 = new Version(V_0_19_5_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_6_ID = /*00*/190699; - public static final Version V_0_19_6 = new Version(V_0_19_6_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_7_ID = /*00*/190799; - public static final Version V_0_19_7 = new Version(V_0_19_7_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_8_ID = /*00*/190899; - public static final Version V_0_19_8 = new Version(V_0_19_8_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_9_ID = /*00*/190999; - public static final Version V_0_19_9 = new Version(V_0_19_9_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_10_ID = /*00*/191099; - public static final Version V_0_19_10 = new Version(V_0_19_10_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_11_ID = /*00*/191199; - public static final Version V_0_19_11 = new Version(V_0_19_11_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_12_ID = /*00*/191299; - public static final Version V_0_19_12 = new Version(V_0_19_12_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_13_ID = /*00*/191399; - public static final Version V_0_19_13 = new Version(V_0_19_13_ID, LUCENE_3_EMULATION_VERSION); - - public static final int V_0_20_0_RC1_ID = /*00*/200051; - public static final Version V_0_20_0_RC1 = new Version(V_0_20_0_RC1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_20_0_ID = /*00*/200099; - public static final Version V_0_20_0 = new Version(V_0_20_0_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_20_1_ID = /*00*/200199; - public static final Version V_0_20_1 = new Version(V_0_20_1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_20_2_ID = /*00*/200299; - public static final Version V_0_20_2 = new Version(V_0_20_2_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_20_3_ID = /*00*/200399; - public static final Version V_0_20_3 = new Version(V_0_20_3_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_20_4_ID = /*00*/200499; - public static final Version V_0_20_4 = new Version(V_0_20_4_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_20_5_ID = /*00*/200599; - public static final Version V_0_20_5 = new Version(V_0_20_5_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_20_6_ID = /*00*/200699; - public static final Version V_0_20_6 = new Version(V_0_20_6_ID, LUCENE_3_EMULATION_VERSION); - - public static final int V_0_90_0_Beta1_ID = /*00*/900001; - public static final Version V_0_90_0_Beta1 = new Version(V_0_90_0_Beta1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_90_0_RC1_ID = /*00*/900051; - public static final Version V_0_90_0_RC1 = new Version(V_0_90_0_RC1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_90_0_RC2_ID = /*00*/900052; - public static final Version V_0_90_0_RC2 = new Version(V_0_90_0_RC2_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_90_0_ID = /*00*/900099; - public static final Version V_0_90_0 = new Version(V_0_90_0_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_90_1_ID = /*00*/900199; - public static final Version V_0_90_1 = new Version(V_0_90_1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_90_2_ID = /*00*/900299; - public static final Version V_0_90_2 = new Version(V_0_90_2_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_90_3_ID = /*00*/900399; - public static final Version V_0_90_3 = new Version(V_0_90_3_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_90_4_ID = /*00*/900499; - public static final Version V_0_90_4 = new Version(V_0_90_4_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_90_5_ID = /*00*/900599; - public static final Version V_0_90_5 = new Version(V_0_90_5_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_90_6_ID = /*00*/900699; - public static final Version V_0_90_6 = new Version(V_0_90_6_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_90_7_ID = /*00*/900799; - public static final Version V_0_90_7 = new Version(V_0_90_7_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_90_8_ID = /*00*/900899; - public static final Version V_0_90_8 = new Version(V_0_90_8_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_90_9_ID = /*00*/900999; - public static final Version V_0_90_9 = new Version(V_0_90_9_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_90_10_ID = /*00*/901099; - public static final Version V_0_90_10 = new Version(V_0_90_10_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_90_11_ID = /*00*/901199; - public static final Version V_0_90_11 = new Version(V_0_90_11_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_90_12_ID = /*00*/901299; - public static final Version V_0_90_12 = new Version(V_0_90_12_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_90_13_ID = /*00*/901399; - public static final Version V_0_90_13 = new Version(V_0_90_13_ID, LUCENE_3_EMULATION_VERSION); - - public static final int V_1_0_0_Beta1_ID = 1000001; - public static final Version V_1_0_0_Beta1 = new Version(V_1_0_0_Beta1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_0_0_Beta2_ID = 1000002; - public static final Version V_1_0_0_Beta2 = new Version(V_1_0_0_Beta2_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_0_0_RC1_ID = 1000051; - public static final Version V_1_0_0_RC1 = new Version(V_1_0_0_RC1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_0_0_RC2_ID = 1000052; - public static final Version V_1_0_0_RC2 = new Version(V_1_0_0_RC2_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_0_0_ID = 1000099; - public static final Version V_1_0_0 = new Version(V_1_0_0_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_0_1_ID = 1000199; - public static final Version V_1_0_1 = new Version(V_1_0_1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_0_2_ID = 1000299; - public static final Version V_1_0_2 = new Version(V_1_0_2_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_0_3_ID = 1000399; - public static final Version V_1_0_3 = new Version(V_1_0_3_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_1_0_ID = 1010099; - public static final Version V_1_1_0 = new Version(V_1_1_0_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_1_1_ID = 1010199; - public static final Version V_1_1_1 = new Version(V_1_1_1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_1_2_ID = 1010299; - public static final Version V_1_1_2 = new Version(V_1_1_2_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_2_0_ID = 1020099; - public static final Version V_1_2_0 = new Version(V_1_2_0_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_2_1_ID = 1020199; - public static final Version V_1_2_1 = new Version(V_1_2_1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_2_2_ID = 1020299; - public static final Version V_1_2_2 = new Version(V_1_2_2_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_2_3_ID = 1020399; - public static final Version V_1_2_3 = new Version(V_1_2_3_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_2_4_ID = 1020499; - public static final Version V_1_2_4 = new Version(V_1_2_4_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_3_0_ID = 1030099; - public static final Version V_1_3_0 = new Version(V_1_3_0_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_3_1_ID = 1030199; - public static final Version V_1_3_1 = new Version(V_1_3_1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_3_2_ID = 1030299; - public static final Version V_1_3_2 = new Version(V_1_3_2_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_3_3_ID = 1030399; - public static final Version V_1_3_3 = new Version(V_1_3_3_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_3_4_ID = 1030499; - public static final Version V_1_3_4 = new Version(V_1_3_4_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_3_5_ID = 1030599; - public static final Version V_1_3_5 = new Version(V_1_3_5_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_3_6_ID = 1030699; - public static final Version V_1_3_6 = new Version(V_1_3_6_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_3_7_ID = 1030799; - public static final Version V_1_3_7 = new Version(V_1_3_7_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_3_8_ID = 1030899; - public static final Version V_1_3_8 = new Version(V_1_3_8_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_3_9_ID = 1030999; - public static final Version V_1_3_9 = new Version(V_1_3_9_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_4_0_Beta1_ID = 1040001; - public static final Version V_1_4_0_Beta1 = new Version(V_1_4_0_Beta1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_4_0_ID = 1040099; - public static final Version V_1_4_0 = new Version(V_1_4_0_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_4_1_ID = 1040199; - public static final Version V_1_4_1 = new Version(V_1_4_1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_4_2_ID = 1040299; - public static final Version V_1_4_2 = new Version(V_1_4_2_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_4_3_ID = 1040399; - public static final Version V_1_4_3 = new Version(V_1_4_3_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_4_4_ID = 1040499; - public static final Version V_1_4_4 = new Version(V_1_4_4_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_4_5_ID = 1040599; - public static final Version V_1_4_5 = new Version(V_1_4_5_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_5_0_ID = 1050099; - public static final Version V_1_5_0 = new Version(V_1_5_0_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_5_1_ID = 1050199; - public static final Version V_1_5_1 = new Version(V_1_5_1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_5_2_ID = 1050299; - public static final Version V_1_5_2 = new Version(V_1_5_2_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_6_0_ID = 1060099; - public static final Version V_1_6_0 = new Version(V_1_6_0_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_6_1_ID = 1060199; - public static final Version V_1_6_1 = new Version(V_1_6_1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_6_2_ID = 1060299; - public static final Version V_1_6_2 = new Version(V_1_6_2_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_7_0_ID = 1070099; - public static final Version V_1_7_0 = new Version(V_1_7_0_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_7_1_ID = 1070199; - public static final Version V_1_7_1 = new Version(V_1_7_1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_7_2_ID = 1070299; - public static final Version V_1_7_2 = new Version(V_1_7_2_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_7_3_ID = 1070399; - public static final Version V_1_7_3 = new Version(V_1_7_3_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_7_4_ID = 1070499; - public static final Version V_1_7_4 = new Version(V_1_7_4_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_1_7_5_ID = 1070599; - public static final Version V_1_7_5 = new Version(V_1_7_5_ID, LUCENE_3_EMULATION_VERSION); public static final int V_2_0_0_beta1_ID = 2000001; public static final Version V_2_0_0_beta1 = new Version(V_2_0_0_beta1_ID, org.apache.lucene.util.Version.LUCENE_5_2_1); @@ -304,198 +101,6 @@ public class Version { return V_2_0_0_beta2; case V_2_0_0_beta1_ID: return V_2_0_0_beta1; - case V_1_7_5_ID: - return V_1_7_5; - case V_1_7_4_ID: - return V_1_7_4; - case V_1_7_3_ID: - return V_1_7_3; - case V_1_7_2_ID: - return V_1_7_2; - case V_1_7_1_ID: - return V_1_7_1; - case V_1_7_0_ID: - return V_1_7_0; - case V_1_6_2_ID: - return V_1_6_2; - case V_1_6_1_ID: - return V_1_6_1; - case V_1_6_0_ID: - return V_1_6_0; - case V_1_5_2_ID: - return V_1_5_2; - case V_1_5_1_ID: - return V_1_5_1; - case V_1_5_0_ID: - return V_1_5_0; - case V_1_4_5_ID: - return V_1_4_5; - case V_1_4_4_ID: - return V_1_4_4; - case V_1_4_3_ID: - return V_1_4_3; - case V_1_4_2_ID: - return V_1_4_2; - case V_1_4_1_ID: - return V_1_4_1; - case V_1_4_0_ID: - return V_1_4_0; - case V_1_4_0_Beta1_ID: - return V_1_4_0_Beta1; - case V_1_3_9_ID: - return V_1_3_9; - case V_1_3_8_ID: - return V_1_3_8; - case V_1_3_7_ID: - return V_1_3_7; - case V_1_3_6_ID: - return V_1_3_6; - case V_1_3_5_ID: - return V_1_3_5; - case V_1_3_4_ID: - return V_1_3_4; - case V_1_3_3_ID: - return V_1_3_3; - case V_1_3_2_ID: - return V_1_3_2; - case V_1_3_1_ID: - return V_1_3_1; - case V_1_3_0_ID: - return V_1_3_0; - case V_1_2_4_ID: - return V_1_2_4; - case V_1_2_3_ID: - return V_1_2_3; - case V_1_2_2_ID: - return V_1_2_2; - case V_1_2_1_ID: - return V_1_2_1; - case V_1_2_0_ID: - return V_1_2_0; - case V_1_1_2_ID: - return V_1_1_2; - case V_1_1_1_ID: - return V_1_1_1; - case V_1_1_0_ID: - return V_1_1_0; - case V_1_0_3_ID: - return V_1_0_3; - case V_1_0_2_ID: - return V_1_0_2; - case V_1_0_1_ID: - return V_1_0_1; - case V_1_0_0_ID: - return V_1_0_0; - case V_1_0_0_RC2_ID: - return V_1_0_0_RC2; - case V_1_0_0_RC1_ID: - return V_1_0_0_RC1; - case V_1_0_0_Beta2_ID: - return V_1_0_0_Beta2; - case V_1_0_0_Beta1_ID: - return V_1_0_0_Beta1; - case V_0_90_13_ID: - return V_0_90_13; - case V_0_90_12_ID: - return V_0_90_12; - case V_0_90_11_ID: - return V_0_90_11; - case V_0_90_10_ID: - return V_0_90_10; - case V_0_90_9_ID: - return V_0_90_9; - case V_0_90_8_ID: - return V_0_90_8; - case V_0_90_7_ID: - return V_0_90_7; - case V_0_90_6_ID: - return V_0_90_6; - case V_0_90_5_ID: - return V_0_90_5; - case V_0_90_4_ID: - return V_0_90_4; - case V_0_90_3_ID: - return V_0_90_3; - case V_0_90_2_ID: - return V_0_90_2; - case V_0_90_1_ID: - return V_0_90_1; - case V_0_90_0_ID: - return V_0_90_0; - case V_0_90_0_RC2_ID: - return V_0_90_0_RC2; - case V_0_90_0_RC1_ID: - return V_0_90_0_RC1; - case V_0_90_0_Beta1_ID: - return V_0_90_0_Beta1; - case V_0_20_6_ID: - return V_0_20_6; - case V_0_20_5_ID: - return V_0_20_5; - case V_0_20_4_ID: - return V_0_20_4; - case V_0_20_3_ID: - return V_0_20_3; - case V_0_20_2_ID: - return V_0_20_2; - case V_0_20_1_ID: - return V_0_20_1; - case V_0_20_0_ID: - return V_0_20_0; - case V_0_20_0_RC1_ID: - return V_0_20_0_RC1; - case V_0_19_0_RC1_ID: - return V_0_19_0_RC1; - case V_0_19_0_RC2_ID: - return V_0_19_0_RC2; - case V_0_19_0_RC3_ID: - return V_0_19_0_RC3; - case V_0_19_0_ID: - return V_0_19_0; - case V_0_19_1_ID: - return V_0_19_1; - case V_0_19_2_ID: - return V_0_19_2; - case V_0_19_3_ID: - return V_0_19_3; - case V_0_19_4_ID: - return V_0_19_4; - case V_0_19_5_ID: - return V_0_19_5; - case V_0_19_6_ID: - return V_0_19_6; - case V_0_19_7_ID: - return V_0_19_7; - case V_0_19_8_ID: - return V_0_19_8; - case V_0_19_9_ID: - return V_0_19_9; - case V_0_19_10_ID: - return V_0_19_10; - case V_0_19_11_ID: - return V_0_19_11; - case V_0_19_12_ID: - return V_0_19_12; - case V_0_19_13_ID: - return V_0_19_13; - case V_0_18_0_ID: - return V_0_18_0; - case V_0_18_1_ID: - return V_0_18_1; - case V_0_18_2_ID: - return V_0_18_2; - case V_0_18_3_ID: - return V_0_18_3; - case V_0_18_4_ID: - return V_0_18_4; - case V_0_18_5_ID: - return V_0_18_5; - case V_0_18_6_ID: - return V_0_18_6; - case V_0_18_7_ID: - return V_0_18_7; - case V_0_18_8_ID: - return V_0_18_8; default: return new Version(id, org.apache.lucene.util.Version.LATEST); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java index ccae17b1eebe..946897a2c978 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java @@ -197,9 +197,7 @@ public class ClusterHealthResponse extends ActionResponse implements StatusToXCo numberOfPendingTasks = in.readInt(); timedOut = in.readBoolean(); numberOfInFlightFetch = in.readInt(); - if (in.getVersion().onOrAfter(Version.V_1_7_0)) { - delayedUnassignedShards= in.readInt(); - } + delayedUnassignedShards= in.readInt(); taskMaxWaitingTime = TimeValue.readTimeValue(in); } @@ -212,9 +210,7 @@ public class ClusterHealthResponse extends ActionResponse implements StatusToXCo out.writeInt(numberOfPendingTasks); out.writeBoolean(timedOut); out.writeInt(numberOfInFlightFetch); - if (out.getVersion().onOrAfter(Version.V_1_7_0)) { - out.writeInt(delayedUnassignedShards); - } + out.writeInt(delayedUnassignedShards); taskMaxWaitingTime.writeTo(out); } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzerProvider.java index 74150c13bf63..f00988f4ad2b 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzerProvider.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzerProvider.java @@ -40,13 +40,7 @@ public class PatternAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final StandardAnalyzer standardAnalyzer; - private final Version esVersion; public StandardAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); - this.esVersion = indexSettings.getIndexVersionCreated(); - final CharArraySet defaultStopwords; - if (esVersion.onOrAfter(Version.V_1_0_0_Beta1)) { - defaultStopwords = CharArraySet.EMPTY_SET; - } else { - defaultStopwords = StopAnalyzer.ENGLISH_STOP_WORDS_SET; - } - + final CharArraySet defaultStopwords = CharArraySet.EMPTY_SET; CharArraySet stopWords = Analysis.parseStopWords(env, settings, defaultStopwords); int maxTokenLength = settings.getAsInt("max_token_length", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); standardAnalyzer = new StandardAnalyzer(stopWords); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzerProvider.java index b2e95737ee1e..a3c65b0a17bd 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzerProvider.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzerProvider.java @@ -32,17 +32,10 @@ import org.elasticsearch.index.IndexSettings; public class StandardHtmlStripAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final StandardHtmlStripAnalyzer analyzer; - private final Version esVersion; public StandardHtmlStripAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); - this.esVersion = indexSettings.getIndexVersionCreated(); - final CharArraySet defaultStopwords; - if (esVersion.onOrAfter(Version.V_1_0_0_RC1)) { - defaultStopwords = CharArraySet.EMPTY_SET; - } else { - defaultStopwords = StopAnalyzer.ENGLISH_STOP_WORDS_SET; - } + final CharArraySet defaultStopwords = CharArraySet.EMPTY_SET; CharArraySet stopWords = Analysis.parseStopWords(env, settings, defaultStopwords); analyzer = new StandardHtmlStripAnalyzer(stopWords); analyzer.setVersion(version); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactory.java index 1154f9b0f798..317b3e07850a 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactory.java @@ -122,11 +122,7 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { // English stemmers } else if ("english".equalsIgnoreCase(language)) { - if (indexVersion.onOrAfter(Version.V_1_3_0)) { - return new PorterStemFilter(tokenStream); - } else { - return new SnowballFilter(tokenStream, new EnglishStemmer()); - } + return new PorterStemFilter(tokenStream); } else if ("light_english".equalsIgnoreCase(language) || "lightEnglish".equalsIgnoreCase(language) || "kstem".equalsIgnoreCase(language)) { return new KStemFilter(tokenStream); @@ -135,11 +131,7 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { } else if ("porter".equalsIgnoreCase(language)) { return new PorterStemFilter(tokenStream); } else if ("porter2".equalsIgnoreCase(language)) { - if (indexVersion.onOrAfter(Version.V_1_3_0)) { - return new SnowballFilter(tokenStream, new EnglishStemmer()); - } else { - return new SnowballFilter(tokenStream, new PorterStemmer()); - } + return new SnowballFilter(tokenStream, new EnglishStemmer()); } else if ("minimal_english".equalsIgnoreCase(language) || "minimalEnglish".equalsIgnoreCase(language)) { return new EnglishMinimalStemFilter(tokenStream); } else if ("possessive_english".equalsIgnoreCase(language) || "possessiveEnglish".equalsIgnoreCase(language)) { diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 552e6aaf2e4c..a6ea381adb40 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -458,7 +458,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent @Override protected void doReadFrom(StreamInput in) throws IOException { - if (in.getVersion().onOrAfter(Version.V_1_4_0_Beta1)) { - this.docCountError = in.readLong(); - } else { - this.docCountError = -1; - } + this.docCountError = in.readLong(); this.order = InternalOrder.Streams.readOrder(in); this.formatter = ValueFormatterStreams.readOptional(in); this.requiredSize = readSize(in); @@ -218,9 +214,7 @@ public class DoubleTerms extends InternalTerms @Override protected void doWriteTo(StreamOutput out) throws IOException { - if (out.getVersion().onOrAfter(Version.V_1_4_0_Beta1)) { - out.writeLong(docCountError); - } + out.writeLong(docCountError); InternalOrder.Streams.writeOrder(order, out); ValueFormatterStreams.writeOptional(formatter, out); writeSize(requiredSize, out); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/InternalExtendedStats.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/InternalExtendedStats.java index 543c5907070e..9fac5809cefc 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/InternalExtendedStats.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/InternalExtendedStats.java @@ -158,19 +158,13 @@ public class InternalExtendedStats extends InternalStats implements ExtendedStat @Override public void readOtherStatsFrom(StreamInput in) throws IOException { sumOfSqrs = in.readDouble(); - if (in.getVersion().onOrAfter(Version.V_1_4_3)) { - sigma = in.readDouble(); - } else { - sigma = 2.0; - } + sigma = in.readDouble(); } @Override protected void writeOtherStatsTo(StreamOutput out) throws IOException { out.writeDouble(sumOfSqrs); - if (out.getVersion().onOrAfter(Version.V_1_4_3)) { - out.writeDouble(sigma); - } + out.writeDouble(sigma); } diff --git a/core/src/test/java/org/elasticsearch/VersionTests.java b/core/src/test/java/org/elasticsearch/VersionTests.java index 4669f5bc718c..05dabb2d8ffc 100644 --- a/core/src/test/java/org/elasticsearch/VersionTests.java +++ b/core/src/test/java/org/elasticsearch/VersionTests.java @@ -31,8 +31,8 @@ import java.util.HashMap; import java.util.Locale; import java.util.Map; -import static org.elasticsearch.Version.V_0_20_0; -import static org.elasticsearch.Version.V_0_90_0; +import static org.elasticsearch.Version.V_2_2_0; +import static org.elasticsearch.Version.V_5_0_0; import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.containsString; @@ -42,21 +42,21 @@ import static org.hamcrest.Matchers.sameInstance; public class VersionTests extends ESTestCase { public void testVersionComparison() throws Exception { - assertThat(V_0_20_0.before(V_0_90_0), is(true)); - assertThat(V_0_20_0.before(V_0_20_0), is(false)); - assertThat(V_0_90_0.before(V_0_20_0), is(false)); + assertThat(V_2_2_0.before(V_5_0_0), is(true)); + assertThat(V_2_2_0.before(V_2_2_0), is(false)); + assertThat(V_5_0_0.before(V_2_2_0), is(false)); - assertThat(V_0_20_0.onOrBefore(V_0_90_0), is(true)); - assertThat(V_0_20_0.onOrBefore(V_0_20_0), is(true)); - assertThat(V_0_90_0.onOrBefore(V_0_20_0), is(false)); + assertThat(V_2_2_0.onOrBefore(V_5_0_0), is(true)); + assertThat(V_2_2_0.onOrBefore(V_2_2_0), is(true)); + assertThat(V_5_0_0.onOrBefore(V_2_2_0), is(false)); - assertThat(V_0_20_0.after(V_0_90_0), is(false)); - assertThat(V_0_20_0.after(V_0_20_0), is(false)); - assertThat(V_0_90_0.after(V_0_20_0), is(true)); + assertThat(V_2_2_0.after(V_5_0_0), is(false)); + assertThat(V_2_2_0.after(V_2_2_0), is(false)); + assertThat(V_5_0_0.after(V_2_2_0), is(true)); - assertThat(V_0_20_0.onOrAfter(V_0_90_0), is(false)); - assertThat(V_0_20_0.onOrAfter(V_0_20_0), is(true)); - assertThat(V_0_90_0.onOrAfter(V_0_20_0), is(true)); + assertThat(V_2_2_0.onOrAfter(V_5_0_0), is(false)); + assertThat(V_2_2_0.onOrAfter(V_2_2_0), is(true)); + assertThat(V_5_0_0.onOrAfter(V_2_2_0), is(true)); } public void testVersionConstantPresent() { @@ -127,29 +127,27 @@ public class VersionTests extends ESTestCase { public void testIndexCreatedVersion() { // an actual index has a IndexMetaData.SETTING_INDEX_UUID - final Version version = randomFrom(Version.V_0_18_0, Version.V_0_90_13, Version.V_1_3_0); + final Version version = randomFrom(Version.V_2_0_0, Version.V_2_3_0, Version.V_5_0_0); assertEquals(version, Version.indexCreated(Settings.builder().put(IndexMetaData.SETTING_INDEX_UUID, "foo").put(IndexMetaData.SETTING_VERSION_CREATED, version).build())); } public void testMinCompatVersion() { assertThat(Version.V_2_0_0_beta1.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0_beta1)); - assertThat(Version.V_1_3_0.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0)); - assertThat(Version.V_1_2_0.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0)); - assertThat(Version.V_1_2_3.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0)); - assertThat(Version.V_1_0_0_RC2.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0_RC2)); + assertThat(Version.V_2_1_0.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0)); + assertThat(Version.V_2_2_0.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0)); + assertThat(Version.V_2_3_0.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0)); + assertThat(Version.V_5_0_0.minimumCompatibilityVersion(), equalTo(Version.V_5_0_0)); } public void testToString() { // with 2.0.beta we lowercase assertEquals("2.0.0-beta1", Version.V_2_0_0_beta1.toString()); - assertEquals("1.4.0.Beta1", Version.V_1_4_0_Beta1.toString()); - assertEquals("1.4.0", Version.V_1_4_0.toString()); + assertEquals("5.0.0", Version.V_5_0_0.toString()); + assertEquals("2.3.0", Version.V_2_3_0.toString()); } public void testIsBeta() { assertTrue(Version.V_2_0_0_beta1.isBeta()); - assertTrue(Version.V_1_4_0_Beta1.isBeta()); - assertFalse(Version.V_1_4_0.isBeta()); } public void testParseVersion() { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java index a2d838bc3fda..fc04de812549 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java @@ -55,13 +55,7 @@ public class ClusterStateRequestTests extends ESTestCase { assertThat(deserializedCSRequest.nodes(), equalTo(clusterStateRequest.nodes())); assertThat(deserializedCSRequest.blocks(), equalTo(clusterStateRequest.blocks())); assertThat(deserializedCSRequest.indices(), equalTo(clusterStateRequest.indices())); - - if (testVersion.onOrAfter(Version.V_1_5_0)) { - assertOptionsMatch(deserializedCSRequest.indicesOptions(), clusterStateRequest.indicesOptions()); - } else { - // versions before V_1_5_0 use IndicesOptions.lenientExpandOpen() - assertOptionsMatch(deserializedCSRequest.indicesOptions(), IndicesOptions.lenientExpandOpen()); - } + assertOptionsMatch(deserializedCSRequest.indicesOptions(), clusterStateRequest.indicesOptions()); } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java index 9d8002210e7f..baca9508a8bb 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java @@ -98,13 +98,7 @@ public class UpgradeIT extends ESBackcompatTestCase { } indexRandom(true, docs); ensureGreen(indexName); - if (globalCompatibilityVersion().before(Version.V_1_4_0_Beta1)) { - // before 1.4 and the wait_if_ongoing flag, flushes could fail randomly, so we - // need to continue to try flushing until all shards succeed - assertTrue(awaitBusy(() -> flush(indexName).getFailedShards() == 0)); - } else { - assertEquals(0, flush(indexName).getFailedShards()); - } + assertEquals(0, flush(indexName).getFailedShards()); // index more docs that won't be flushed numDocs = scaledRandomIntBetween(100, 1000); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java index 9a0316050b1d..9ea9b340c203 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java @@ -104,17 +104,8 @@ public class BasicAnalysisBackwardCompatibilityIT extends ESBackcompatTestCase { } private String randomAnalyzer() { - while(true) { - PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(getRandom(), PreBuiltAnalyzers.values()); - if (preBuiltAnalyzers == PreBuiltAnalyzers.SORANI && compatibilityVersion().before(Version.V_1_3_0)) { - continue; // SORANI was added in 1.3.0 - } - if (preBuiltAnalyzers == PreBuiltAnalyzers.LITHUANIAN && compatibilityVersion().before(Version.V_2_1_0)) { - continue; // LITHUANIAN was added in 2.1.0 - } - return preBuiltAnalyzers.name().toLowerCase(Locale.ROOT); - } - + PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(getRandom(), PreBuiltAnalyzers.values()); + return preBuiltAnalyzers.name().toLowerCase(Locale.ROOT); } private static final class InputOutput { @@ -127,7 +118,5 @@ public class BasicAnalysisBackwardCompatibilityIT extends ESBackcompatTestCase { this.input = input; this.field = field; } - - } } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java index 5b7c4fa37ba3..7e46825398b6 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java @@ -188,10 +188,6 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { docs[i] = client().prepareIndex("test", "type1", id).setSource("field1", English.intToEnglish(numDocs + i)); } indexRandom(true, docs); - if (compatibilityVersion().before(Version.V_1_3_0)) { - // issue another refresh through a new node to side step issue #6545 - assertNoFailures(backwardsCluster().internalCluster().dataNodeClient().admin().indices().prepareRefresh().setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().get()); - } numDocs *= 2; } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index b13cee985657..8e3dbd5f563d 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -446,7 +446,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { // #10067: create-bwc-index.py deleted any doc with long_sort:[10-20] void assertDeleteByQueryWorked(String indexName, Version version) throws Exception { - if (version.onOrBefore(Version.V_1_0_0_Beta2) || version.onOrAfter(Version.V_2_0_0_beta1)) { + if (version.onOrAfter(Version.V_2_0_0_beta1)) { // TODO: remove this once #10262 is fixed return; } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java index ec73edd493f2..483040209d08 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java @@ -194,14 +194,11 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { assertThat(template.settings().getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, -1), equalTo(1)); assertThat(template.mappings().size(), equalTo(1)); assertThat(template.mappings().get("type1").string(), equalTo("{\"type1\":{\"_source\":{\"enabled\":false}}}")); - if (Version.fromString(version).onOrAfter(Version.V_1_1_0)) { - // Support for aliases in templates was added in v1.1.0 - assertThat(template.aliases().size(), equalTo(3)); - assertThat(template.aliases().get("alias1"), notNullValue()); - assertThat(template.aliases().get("alias2").filter().string(), containsString(version)); - assertThat(template.aliases().get("alias2").indexRouting(), equalTo("kimchy")); - assertThat(template.aliases().get("{index}-alias"), notNullValue()); - } + assertThat(template.aliases().size(), equalTo(3)); + assertThat(template.aliases().get("alias1"), notNullValue()); + assertThat(template.aliases().get("alias2").filter().string(), containsString(version)); + assertThat(template.aliases().get("alias2").indexRouting(), equalTo("kimchy")); + assertThat(template.aliases().get("{index}-alias"), notNullValue()); logger.info("--> cleanup"); cluster().wipeIndices(restoreInfo.indices().toArray(new String[restoreInfo.indices().size()])); diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java index a43da9e53fac..0d8784834fa6 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java @@ -83,7 +83,7 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_CREATION_DATE, 1) .put(IndexMetaData.SETTING_INDEX_UUID, "BOOM") - .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_0_18_1_ID) + .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_2_0_0_beta1) .put(indexSettings) .build(); IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build(); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java index ee92945c4ff5..9ad10cc38886 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java @@ -275,10 +275,10 @@ public class ZenDiscoveryIT extends ESIntegTestCase { Settings nodeSettings = Settings.settingsBuilder() .put("discovery.type", "zen") // <-- To override the local setting if set externally .build(); - String nodeName = internalCluster().startNode(nodeSettings, Version.V_2_0_0_beta1); + String nodeName = internalCluster().startNode(nodeSettings, Version.V_5_0_0); ZenDiscovery zenDiscovery = (ZenDiscovery) internalCluster().getInstance(Discovery.class, nodeName); ClusterService clusterService = internalCluster().getInstance(ClusterService.class, nodeName); - DiscoveryNode node = new DiscoveryNode("_node_id", new InetSocketTransportAddress(InetAddress.getByName("0.0.0.0"), 0), Version.V_1_6_0); + DiscoveryNode node = new DiscoveryNode("_node_id", new InetSocketTransportAddress(InetAddress.getByName("0.0.0.0"), 0), Version.V_2_0_0); final AtomicReference holder = new AtomicReference<>(); zenDiscovery.handleJoinRequest(node, clusterService.state(), new MembershipAction.JoinCallback() { @Override @@ -292,16 +292,16 @@ public class ZenDiscoveryIT extends ESIntegTestCase { }); assertThat(holder.get(), notNullValue()); - assertThat(holder.get().getMessage(), equalTo("Can't handle join request from a node with a version [1.6.0] that is lower than the minimum compatible version [" + Version.V_2_0_0_beta1.minimumCompatibilityVersion() + "]")); + assertThat(holder.get().getMessage(), equalTo("Can't handle join request from a node with a version [2.0.0] that is lower than the minimum compatible version [" + Version.V_5_0_0.minimumCompatibilityVersion() + "]")); } public void testJoinElectedMaster_incompatibleMinVersion() { - ElectMasterService electMasterService = new ElectMasterService(Settings.EMPTY, Version.V_2_0_0_beta1); + ElectMasterService electMasterService = new ElectMasterService(Settings.EMPTY, Version.V_5_0_0); - DiscoveryNode node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_2_0_0_beta1); + DiscoveryNode node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_5_0_0); assertThat(electMasterService.electMaster(Collections.singletonList(node)), sameInstance(node)); - node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_1_6_0); - assertThat("Can't join master because version 1.6.0 is lower than the minimum compatable version 2.0.0 can support", electMasterService.electMaster(Collections.singletonList(node)), nullValue()); + node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_2_0_0); + assertThat("Can't join master because version 2.0.0 is lower than the minimum compatable version 5.0.0 can support", electMasterService.electMaster(Collections.singletonList(node)), nullValue()); } public void testDiscoveryStats() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/get/GetActionIT.java b/core/src/test/java/org/elasticsearch/get/GetActionIT.java index 68a4df685be8..c797a5a1968b 100644 --- a/core/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/core/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -65,11 +65,6 @@ import static org.hamcrest.Matchers.startsWith; public class GetActionIT extends ESIntegTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(InternalSettingsPlugin.class); // uses index.version.created - } - public void testSimpleGet() { assertAcked(prepareCreate("test") .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1)) @@ -324,128 +319,6 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2")); } - public void testThatGetFromTranslogShouldWorkWithExcludeBackcompat() throws Exception { - String index = "test"; - String type = "type1"; - - String mapping = jsonBuilder() - .startObject() - .startObject(type) - .startObject("_source") - .array("excludes", "excluded") - .endObject() - .endObject() - .endObject() - .string(); - - assertAcked(prepareCreate(index) - .addMapping(type, mapping) - .setSettings("index.refresh_interval", -1, IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)); - - client().prepareIndex(index, type, "1") - .setSource(jsonBuilder().startObject().field("field", "1", "2").field("excluded", "should not be seen").endObject()) - .get(); - - GetResponse responseBeforeFlush = client().prepareGet(index, type, "1").get(); - client().admin().indices().prepareFlush(index).get(); - GetResponse responseAfterFlush = client().prepareGet(index, type, "1").get(); - - assertThat(responseBeforeFlush.isExists(), is(true)); - assertThat(responseAfterFlush.isExists(), is(true)); - assertThat(responseBeforeFlush.getSourceAsMap(), hasKey("field")); - assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("excluded"))); - assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString())); - } - - public void testThatGetFromTranslogShouldWorkWithIncludeBackcompat() throws Exception { - String index = "test"; - String type = "type1"; - - String mapping = jsonBuilder() - .startObject() - .startObject(type) - .startObject("_source") - .array("includes", "included") - .endObject() - .endObject() - .endObject() - .string(); - - assertAcked(prepareCreate(index) - .addMapping(type, mapping) - .setSettings("index.refresh_interval", -1, IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)); - - client().prepareIndex(index, type, "1") - .setSource(jsonBuilder().startObject().field("field", "1", "2").field("included", "should be seen").endObject()) - .get(); - - GetResponse responseBeforeFlush = client().prepareGet(index, type, "1").get(); - flush(); - GetResponse responseAfterFlush = client().prepareGet(index, type, "1").get(); - - assertThat(responseBeforeFlush.isExists(), is(true)); - assertThat(responseAfterFlush.isExists(), is(true)); - assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("field"))); - assertThat(responseBeforeFlush.getSourceAsMap(), hasKey("included")); - assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString())); - } - - @SuppressWarnings("unchecked") - public void testThatGetFromTranslogShouldWorkWithIncludeExcludeAndFieldsBackcompat() throws Exception { - String index = "test"; - String type = "type1"; - - String mapping = jsonBuilder() - .startObject() - .startObject(type) - .startObject("_source") - .array("includes", "included") - .array("excludes", "excluded") - .endObject() - .endObject() - .endObject() - .string(); - - assertAcked(prepareCreate(index) - .addMapping(type, mapping) - .setSettings("index.refresh_interval", -1, IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)); - - client().prepareIndex(index, type, "1") - .setSource(jsonBuilder().startObject() - .field("field", "1", "2") - .startObject("included").field("field", "should be seen").field("field2", "extra field to remove").endObject() - .startObject("excluded").field("field", "should not be seen").field("field2", "should not be seen").endObject() - .endObject()) - .get(); - - GetResponse responseBeforeFlush = client().prepareGet(index, type, "1").setFields("_source", "included.field", "excluded.field").get(); - assertThat(responseBeforeFlush.isExists(), is(true)); - assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("excluded"))); - assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("field"))); - assertThat(responseBeforeFlush.getSourceAsMap(), hasKey("included")); - - // now tests that extra source filtering works as expected - GetResponse responseBeforeFlushWithExtraFilters = client().prepareGet(index, type, "1").setFields("included.field", "excluded.field") - .setFetchSource(new String[]{"field", "*.field"}, new String[]{"*.field2"}).get(); - assertThat(responseBeforeFlushWithExtraFilters.isExists(), is(true)); - assertThat(responseBeforeFlushWithExtraFilters.getSourceAsMap(), not(hasKey("excluded"))); - assertThat(responseBeforeFlushWithExtraFilters.getSourceAsMap(), not(hasKey("field"))); - assertThat(responseBeforeFlushWithExtraFilters.getSourceAsMap(), hasKey("included")); - assertThat((Map) responseBeforeFlushWithExtraFilters.getSourceAsMap().get("included"), hasKey("field")); - assertThat((Map) responseBeforeFlushWithExtraFilters.getSourceAsMap().get("included"), not(hasKey("field2"))); - - flush(); - GetResponse responseAfterFlush = client().prepareGet(index, type, "1").setFields("_source", "included.field", "excluded.field").get(); - GetResponse responseAfterFlushWithExtraFilters = client().prepareGet(index, type, "1").setFields("included.field", "excluded.field") - .setFetchSource("*.field", "*.field2").get(); - - assertThat(responseAfterFlush.isExists(), is(true)); - assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString())); - - assertThat(responseAfterFlushWithExtraFilters.isExists(), is(true)); - assertThat(responseBeforeFlushWithExtraFilters.getSourceAsString(), is(responseAfterFlushWithExtraFilters.getSourceAsString())); - } - public void testGetWithVersion() { assertAcked(prepareCreate("test").addAlias(new Alias("alias")) .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1))); @@ -1006,8 +879,7 @@ public class GetActionIT extends ESIntegTestCase { String createIndexSource = "{\n" + " \"settings\": {\n" + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + - " \"refresh_interval\": \"-1\",\n" + - " \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" + + " \"refresh_interval\": \"-1\"\n" + " },\n" + " \"mappings\": {\n" + " \"doc\": {\n" + @@ -1058,8 +930,7 @@ public class GetActionIT extends ESIntegTestCase { String createIndexSource = "{\n" + " \"settings\": {\n" + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + - " \"refresh_interval\": \"-1\",\n" + - " \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" + + " \"refresh_interval\": \"-1\"\n" + " },\n" + " \"mappings\": {\n" + " \"doc\": {\n" + diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java index 43455c2a11d5..6468fae93973 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java @@ -108,7 +108,7 @@ public class AnalysisModuleTests extends ModuleTestCase { Settings settings2 = settingsBuilder() .loadFromStream(yaml, getClass().getResourceAsStream(yaml)) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0) .build(); AnalysisRegistry newRegistry = getNewRegistry(settings2); AnalysisService analysisService2 = getAnalysisService(newRegistry, settings2); @@ -121,8 +121,8 @@ public class AnalysisModuleTests extends ModuleTestCase { // analysis service has the expected version assertThat(analysisService2.analyzer("standard").analyzer(), is(instanceOf(StandardAnalyzer.class))); - assertEquals(Version.V_0_90_0.luceneVersion, analysisService2.analyzer("standard").analyzer().getVersion()); - assertEquals(Version.V_0_90_0.luceneVersion, analysisService2.analyzer("thai").analyzer().getVersion()); + assertEquals(Version.V_2_0_0.luceneVersion, analysisService2.analyzer("standard").analyzer().getVersion()); + assertEquals(Version.V_2_0_0.luceneVersion, analysisService2.analyzer("thai").analyzer().getVersion()); assertThat(analysisService2.analyzer("custom7").analyzer(), is(instanceOf(StandardAnalyzer.class))); assertEquals(org.apache.lucene.util.Version.fromBits(3,6,0), analysisService2.analyzer("custom7").analyzer().getVersion()); @@ -270,45 +270,6 @@ public class AnalysisModuleTests extends ModuleTestCase { } } - public void testBackwardCompatible() throws IOException { - Settings settings = settingsBuilder() - .put("index.analysis.analyzer.custom1.tokenizer", "standard") - .put("index.analysis.analyzer.custom1.position_offset_gap", "128") - .put("index.analysis.analyzer.custom2.tokenizer", "standard") - .put("index.analysis.analyzer.custom2.position_increment_gap", "256") - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, - Version.V_1_7_1)) - .build(); - AnalysisService analysisService = getAnalysisService(settings); - - Analyzer custom1 = analysisService.analyzer("custom1").analyzer(); - assertThat(custom1, instanceOf(CustomAnalyzer.class)); - assertThat(custom1.getPositionIncrementGap("custom1"), equalTo(128)); - - Analyzer custom2 = analysisService.analyzer("custom2").analyzer(); - assertThat(custom2, instanceOf(CustomAnalyzer.class)); - assertThat(custom2.getPositionIncrementGap("custom2"), equalTo(256)); - } - - public void testWithBothSettings() throws IOException { - Settings settings = settingsBuilder() - .put("index.analysis.analyzer.custom.tokenizer", "standard") - .put("index.analysis.analyzer.custom.position_offset_gap", "128") - .put("index.analysis.analyzer.custom.position_increment_gap", "256") - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, - Version.V_1_7_1)) - .build(); - try { - getAnalysisService(settings); - fail("Analyzer has both position_offset_gap and position_increment_gap should fail"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Custom Analyzer [custom] defined both [position_offset_gap] and [position_increment_gap]" + - ", use only [position_increment_gap]")); - } - } - public void testDeprecatedPositionOffsetGap() throws IOException { Settings settings = settingsBuilder() .put("index.analysis.analyzer.custom.tokenizer", "standard") diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java deleted file mode 100644 index a163d9e42b42..000000000000 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.analysis; - -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.test.ESTokenStreamTestCase; - -import java.io.IOException; - -import static com.carrotsearch.randomizedtesting.RandomizedTest.scaledRandomIntBetween; -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED; - -public class AnalyzerBackwardsCompatTests extends ESTokenStreamTestCase { - - private void assertNoStopwordsAfter(org.elasticsearch.Version noStopwordVersion, String type) throws IOException { - final int iters = scaledRandomIntBetween(10, 100); - org.elasticsearch.Version version = org.elasticsearch.Version.CURRENT; - for (int i = 0; i < iters; i++) { - Settings.Builder builder = Settings.settingsBuilder().put("index.analysis.filter.my_stop.type", "stop"); - if (version.onOrAfter(noStopwordVersion)) { - if (random().nextBoolean()) { - builder.put(SETTING_VERSION_CREATED, version); - } - } else { - builder.put(SETTING_VERSION_CREATED, version); - } - builder.put("index.analysis.analyzer.foo.type", type); - builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()); - AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(builder.build()); - NamedAnalyzer analyzer = analysisService.analyzer("foo"); - assertNotNull(analyzer); - if (version.onOrAfter(noStopwordVersion)) { - assertAnalyzesTo(analyzer, "this is bogus", new String[]{"this", "is", "bogus"}); - } else { - assertAnalyzesTo(analyzer, "this is bogus", new String[]{"bogus"}); - } - version = randomVersion(); - } - } - - public void testPatternAnalyzer() throws IOException { - assertNoStopwordsAfter(org.elasticsearch.Version.V_1_0_0_RC1, "pattern"); - } - - public void testStandardHTMLStripAnalyzer() throws IOException { - assertNoStopwordsAfter(org.elasticsearch.Version.V_1_0_0_RC1, "standard_html_strip"); - } - - public void testStandardAnalyzer() throws IOException { - assertNoStopwordsAfter(org.elasticsearch.Version.V_1_0_0_Beta1, "standard"); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerProviderFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerProviderFactoryTests.java deleted file mode 100644 index 2cb8f99e7b83..000000000000 --- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerProviderFactoryTests.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.analysis; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.indices.analysis.PreBuiltAnalyzers; -import org.elasticsearch.test.ESTestCase; - -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; - -/** - * - */ -public class PreBuiltAnalyzerProviderFactoryTests extends ESTestCase { - public void testVersioningInFactoryProvider() throws Exception { - PreBuiltAnalyzerProviderFactory factory = new PreBuiltAnalyzerProviderFactory("default", AnalyzerScope.INDEX, PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT)); - - AnalyzerProvider former090AnalyzerProvider = factory.create("default", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0).build()); - AnalyzerProvider currentAnalyzerProviderReference = factory.create("default", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()); - - // would love to access the version inside of the lucene analyzer, but that is not possible... - assertThat(currentAnalyzerProviderReference, is(not(former090AnalyzerProvider))); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index fbb69ea1eb0a..13f30f288ccc 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -59,20 +59,14 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase { public void testThatInstancesAreTheSameAlwaysForKeywordAnalyzer() { assertThat(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.CURRENT), - is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.V_0_18_0))); + is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.V_2_0_0))); } public void testThatInstancesAreCachedAndReused() { assertThat(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT), is(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT))); - assertThat(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_0_18_0), - is(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_0_18_0))); - } - - public void testThatInstancesWithSameLuceneVersionAreReused() { - // both are lucene 4.4 and should return the same instance - assertThat(PreBuiltAnalyzers.CATALAN.getAnalyzer(Version.V_0_90_4), - is(PreBuiltAnalyzers.CATALAN.getAnalyzer(Version.V_0_90_5))); + assertThat(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_0_0), + is(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_2_0))); } public void testThatAnalyzersAreUsedInMapping() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltCharFilterFactoryFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltCharFilterFactoryFactoryTests.java deleted file mode 100644 index 39de728a4842..000000000000 --- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltCharFilterFactoryFactoryTests.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.analysis; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.indices.analysis.PreBuiltCharFilters; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; - -import static org.hamcrest.CoreMatchers.is; - -/** - * - */ -public class PreBuiltCharFilterFactoryFactoryTests extends ESTestCase { - public void testThatDifferentVersionsCanBeLoaded() throws IOException { - PreBuiltCharFilterFactoryFactory factory = new PreBuiltCharFilterFactoryFactory(PreBuiltCharFilters.HTML_STRIP.getCharFilterFactory(Version.CURRENT)); - - CharFilterFactory former090TokenizerFactory = factory.get(null, null, "html_strip", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0).build()); - CharFilterFactory former090TokenizerFactoryCopy = factory.get(null, null, "html_strip", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0).build()); - CharFilterFactory currentTokenizerFactory = factory.get(null, null, "html_strip", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()); - - assertThat(currentTokenizerFactory, is(former090TokenizerFactory)); - assertThat(currentTokenizerFactory, is(former090TokenizerFactoryCopy)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenFilterFactoryFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenFilterFactoryFactoryTests.java deleted file mode 100644 index 670df0699264..000000000000 --- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenFilterFactoryFactoryTests.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.analysis; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.indices.analysis.PreBuiltTokenFilters; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.not; - -/** - * - */ -public class PreBuiltTokenFilterFactoryFactoryTests extends ESTestCase { - public void testThatCachingWorksForCachingStrategyOne() throws IOException { - PreBuiltTokenFilterFactoryFactory factory = new PreBuiltTokenFilterFactoryFactory(PreBuiltTokenFilters.WORD_DELIMITER.getTokenFilterFactory(Version.CURRENT)); - - TokenFilterFactory former090TokenizerFactory = factory.get(null, null, "word_delimiter", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_1).build()); - TokenFilterFactory former090TokenizerFactoryCopy = factory.get(null, null, "word_delimiter", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_2).build()); - TokenFilterFactory currentTokenizerFactory = factory.get(null, null, "word_delimiter", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()); - - assertThat(currentTokenizerFactory, is(former090TokenizerFactory)); - assertThat(currentTokenizerFactory, is(former090TokenizerFactoryCopy)); - } - - public void testThatDifferentVersionsCanBeLoaded() throws IOException { - PreBuiltTokenFilterFactoryFactory factory = new PreBuiltTokenFilterFactoryFactory(PreBuiltTokenFilters.STOP.getTokenFilterFactory(Version.CURRENT)); - - TokenFilterFactory former090TokenizerFactory = factory.get(null, null, "stop", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_1).build()); - TokenFilterFactory former090TokenizerFactoryCopy = factory.get(null, null, "stop", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_2).build()); - TokenFilterFactory currentTokenizerFactory = factory.get(null, null, "stop", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()); - - assertThat(currentTokenizerFactory, is(not(former090TokenizerFactory))); - assertThat(former090TokenizerFactory, is(former090TokenizerFactoryCopy)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenizerFactoryFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenizerFactoryFactoryTests.java deleted file mode 100644 index 162dbb364249..000000000000 --- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenizerFactoryFactoryTests.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.analysis; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.indices.analysis.PreBuiltTokenizers; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.not; - -/** - * - */ -public class PreBuiltTokenizerFactoryFactoryTests extends ESTestCase { - public void testThatDifferentVersionsCanBeLoaded() throws IOException { - PreBuiltTokenizerFactoryFactory factory = new PreBuiltTokenizerFactoryFactory(PreBuiltTokenizers.STANDARD.getTokenizerFactory(Version.CURRENT)); - - // different es versions, same lucene version, thus cached - TokenizerFactory former090TokenizerFactory = factory.get(null, null, "standard", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_1).build()); - TokenizerFactory former090TokenizerFactoryCopy = factory.get(null, null, "standard", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_2).build()); - TokenizerFactory currentTokenizerFactory = factory.get(null, null, "standard", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()); - - assertThat(currentTokenizerFactory, is(not(former090TokenizerFactory))); - assertThat(currentTokenizerFactory, is(not(former090TokenizerFactoryCopy))); - assertThat(former090TokenizerFactory, is(former090TokenizerFactoryCopy)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java index 37844dce69db..f0a6077b4979 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java @@ -40,10 +40,9 @@ import static org.hamcrest.Matchers.instanceOf; * */ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase { - public void testEnglishBackwardsCompatibility() throws IOException { + public void testEnglishFilterFactory() throws IOException { int iters = scaledRandomIntBetween(20, 100); for (int i = 0; i < iters; i++) { - Version v = VersionUtils.randomVersion(random()); Settings settings = Settings.settingsBuilder() .put("index.analysis.filter.my_english.type", "stemmer") @@ -61,19 +60,13 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase { tokenizer.setReader(new StringReader("foo bar")); TokenStream create = tokenFilter.create(tokenizer); NamedAnalyzer analyzer = analysisService.analyzer("my_english"); - - if (v.onOrAfter(Version.V_1_3_0)) { - assertThat(create, instanceOf(PorterStemFilter.class)); - assertAnalyzesTo(analyzer, "consolingly", new String[]{"consolingli"}); - } else { - assertThat(create, instanceOf(SnowballFilter.class)); - assertAnalyzesTo(analyzer, "consolingly", new String[]{"consol"}); - } + assertThat(create, instanceOf(PorterStemFilter.class)); + assertAnalyzesTo(analyzer, "consolingly", new String[]{"consolingli"}); } } - public void testPorter2BackwardsCompatibility() throws IOException { + public void testPorter2FilterFactory() throws IOException { int iters = scaledRandomIntBetween(20, 100); for (int i = 0; i < iters; i++) { @@ -95,12 +88,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase { TokenStream create = tokenFilter.create(tokenizer); NamedAnalyzer analyzer = analysisService.analyzer("my_porter2"); assertThat(create, instanceOf(SnowballFilter.class)); - - if (v.onOrAfter(Version.V_1_3_0)) { - assertAnalyzesTo(analyzer, "possibly", new String[]{"possibl"}); - } else { - assertAnalyzesTo(analyzer, "possibly", new String[]{"possibli"}); - } + assertAnalyzesTo(analyzer, "possibly", new String[]{"possibl"}); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index 53d5e1744eb1..191ce5d477eb 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -433,19 +433,6 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values")); } - - mapping = jsonBuilder().startObject().startObject("type") - .startObject("_all") - .startObject("fielddata") - .field("format", "doc_values") - .endObject().endObject().endObject().endObject().string(); - Settings legacySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - try { - createIndex("test_old", legacySettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - fail(); - } catch (MapperParsingException e) { - assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values")); - } } public void testAutoBoost() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java index 558e3bc83fb8..9d6236234af1 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java @@ -56,7 +56,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { } public void testExternalValues() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); IndexService indexService = createIndex("test", settings); MapperRegistry mapperRegistry = new MapperRegistry( @@ -101,7 +101,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { } public void testExternalValuesWithMultifield() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); IndexService indexService = createIndex("test", settings); Map mapperParsers = new HashMap<>(); @@ -159,7 +159,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { } public void testExternalValuesWithMultifieldTwoLevels() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); IndexService indexService = createIndex("test", settings); Map mapperParsers = new HashMap<>(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index ed6c574a865e..6b9282e2704c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -66,7 +66,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject() .endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -96,7 +96,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .field("geohash", true).endObject().endObject() .endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -116,7 +116,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("geohash", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -136,7 +136,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("geohash", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -156,7 +156,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -172,7 +172,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } public void testNormalizeLatLonValuesDefault() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); // default to normalize XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); @@ -222,7 +222,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } public void testValidateLatLonValues() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true); if (version.before(Version.V_2_2_0)) { @@ -285,7 +285,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } public void testNoValidateLatLonValues() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true); if (version.before(Version.V_2_2_0)) { @@ -332,7 +332,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("store", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -359,7 +359,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("store", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -395,7 +395,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -419,7 +419,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", true).endObject().endObject() .endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -445,7 +445,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("store", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -481,7 +481,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject() .endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -506,7 +506,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("point").field("match", "point*").startObject("mapping").field("type", "geo_point") .field("lat_lon", true).endObject().endObject().endObject().endArray().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -530,7 +530,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("store", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -556,7 +556,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("store", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -699,7 +699,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().string(); // create index and add a test point (dr5regy6rc6z) - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test").setSettings(settings) .addMapping("pin", mapping); @@ -724,7 +724,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().endObject().string(); // create index and add a test point (dr5regy6rc6z) - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test").setSettings(settings) .addMapping("pin", mapping); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java index 5de6c517ab2f..bd23817ba50a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java @@ -57,7 +57,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false) .endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -81,7 +81,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false).endObject().endObject() .endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -105,7 +105,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true) .endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -126,7 +126,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true) .field("geohash_precision", 10).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point"); @@ -140,7 +140,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).field("geohash_precision", "5m").endObject().endObject() .endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point"); @@ -154,7 +154,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").endObject().endObject() .endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index ed58bb63b650..d5efd6dcfc36 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -414,27 +414,11 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { assertThat(request.timestamp(), is("1433239200000")); } - public void testThatIndicesBefore2xMustSupportUnixTimestampsInAnyDateFormat() throws Exception { + public void testThatIndicesAfter2_0DontSupportUnixTimestampsInAnyDateFormat() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", true).field("format", "dateOptionalTime").endObject() .endObject().endObject().string(); - BytesReference source = XContentFactory.jsonBuilder().startObject().field("field", "value").endObject().bytes(); - - // - // test with older versions - Settings oldSettings = settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, randomVersionBetween(random(), Version.V_0_90_0, Version.V_1_6_0)).build(); - DocumentMapper docMapper = createIndex("old-index", oldSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); - - // both index request are successfully processed - IndexRequest oldIndexDateIndexRequest = new IndexRequest("old-index", "type", "1").source(source).timestamp("1970-01-01"); - oldIndexDateIndexRequest.process(metaData, new MappingMetaData(docMapper), true, "old-index"); - IndexRequest oldIndexTimestampIndexRequest = new IndexRequest("old-index", "type", "1").source(source).timestamp("1234567890"); - oldIndexTimestampIndexRequest.process(metaData, new MappingMetaData(docMapper), true, "old-index"); - - // // test with 2.x DocumentMapper currentMapper = createIndex("new-index").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); MetaData newMetaData = client().admin().cluster().prepareState().get().getState().getMetaData(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java index ab0182aa0efe..2e2f5f2446fd 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java @@ -256,7 +256,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { } public void testTimestampParsing() throws IOException { - IndexService indexService = createIndex("test", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build()); + IndexService indexService = createIndex("test"); XContentBuilder indexMapping = XContentFactory.jsonBuilder(); boolean enabled = randomBoolean(); indexMapping.startObject() diff --git a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java index c5cfa7ebd367..560ee1881f00 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java @@ -249,8 +249,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 0 || shardContext.indexVersionCreated().before(Version.V_1_4_0_Beta1)) { + if (getCurrentTypes().length > 0) { Query luceneQuery = queryBuilder.toQuery(shardContext); assertThat(luceneQuery, instanceOf(TermQuery.class)); TermQuery termQuery = (TermQuery) luceneQuery; diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java index 09371c38dabc..4597765c11cc 100644 --- a/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java @@ -43,7 +43,6 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) -@ESBackcompatTestCase.CompatibilityVersion(version = Version.V_1_2_0_ID) // we throw an exception if we create an index with _field_names that is 1.3 public class PreBuiltAnalyzerIntegrationIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java index 37a0f4e358e0..04bff31057d5 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java @@ -176,12 +176,12 @@ public class PluginInfoTests extends ESTestCase { "description", "fake desc", "name", "my_plugin", "version", "1.0", - "elasticsearch.version", Version.V_1_7_0.toString()); + "elasticsearch.version", Version.V_2_0_0.toString()); try { PluginInfo.readFromProperties(pluginDir); fail("expected old elasticsearch version exception"); } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("Was designed for version [1.7.0]")); + assertTrue(e.getMessage().contains("Was designed for version [2.0.0]")); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java index cc96555c3729..2200e0e30ca9 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -65,13 +65,6 @@ public class DateHistogramOffsetIT extends ESIntegTestCase { return Collections.singleton(AssertingLocalTransport.TestPlugin.class); } - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put(AssertingLocalTransport.ASSERTING_TRANSPORT_MIN_VERSION_KEY.getKey(), Version.V_1_4_0_Beta1).build(); - } - @Before public void beforeEachTest() throws IOException { prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java index 8312f4aca048..6d2d11e2799d 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java @@ -67,7 +67,7 @@ public class GeoDistanceIT extends ESIntegTestCase { return pluginList(InternalSettingsPlugin.class); // uses index.version.created } - private Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + private Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); private IndexRequestBuilder indexCity(String idx, String name, String... latLons) throws Exception { XContentBuilder source = jsonBuilder().startObject().field("city", name); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java index 22413a7b319d..5aa7ba44466f 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java @@ -63,7 +63,7 @@ public class GeoHashGridIT extends ESIntegTestCase { return pluginList(InternalSettingsPlugin.class); // uses index.version.created } - private Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + private Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); static ObjectIntMap expectedDocCountsForGeoHash = null; static ObjectIntMap multiValuedExpectedDocCountsForGeoHash = null; diff --git a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java index fad1cc3a0ef8..abd73a36adf4 100644 --- a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java @@ -20,15 +20,12 @@ package org.elasticsearch.search.innerhits; import org.apache.lucene.util.ArrayUtil; -import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.HasChildQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.support.QueryInnerHits; import org.elasticsearch.plugins.Plugin; @@ -73,10 +70,6 @@ import static org.hamcrest.Matchers.nullValue; /** */ public class InnerHitsIT extends ESIntegTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(MockScriptEngine.TestPlugin.class, InternalSettingsPlugin.class); - } public void testSimpleNested() throws Exception { assertAcked(prepareCreate("articles").addMapping("article", jsonBuilder().startObject().startObject("article").startObject("properties") @@ -753,160 +746,6 @@ public class InnerHitsIT extends ESIntegTestCase { assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue()); } - public void testNestedInnerHitsWithStoredFieldsAndNoSourceBackcompat() throws Exception { - assertAcked(prepareCreate("articles") - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id) - .addMapping("article", jsonBuilder().startObject() - .startObject("_source").field("enabled", false).endObject() - .startObject("properties") - .startObject("comments") - .field("type", "nested") - .startObject("properties") - .startObject("message").field("type", "text").field("store", true).endObject() - .endObject() - .endObject() - .endObject() - .endObject() - ) - ); - - List requests = new ArrayList<>(); - requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject() - .field("title", "quick brown fox") - .startObject("comments").field("message", "fox eat quick").endObject() - .endObject())); - indexRandom(true, requests); - - SearchResponse response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, new InnerHitsBuilder.InnerHit().field("comments.message")))) - .get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue()); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).fields().get("comments.message").getValue(), equalTo("fox eat quick")); - } - - public void testNestedInnerHitsWithHighlightOnStoredFieldBackcompat() throws Exception { - assertAcked(prepareCreate("articles") - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id) - .addMapping("article", jsonBuilder().startObject() - .startObject("_source").field("enabled", false).endObject() - .startObject("properties") - .startObject("comments") - .field("type", "nested") - .startObject("properties") - .startObject("message").field("type", "text").field("store", true).endObject() - .endObject() - .endObject() - .endObject() - .endObject() - ) - ); - - List requests = new ArrayList<>(); - requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject() - .field("title", "quick brown fox") - .startObject("comments").field("message", "fox eat quick").endObject() - .endObject())); - indexRandom(true, requests); - InnerHitsBuilder.InnerHit builder = new InnerHitsBuilder.InnerHit(); - builder.highlighter(new HighlightBuilder().field("comments.message")); - SearchResponse response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, builder))) - .get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue()); - assertThat(String.valueOf(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).highlightFields().get("comments.message").getFragments()[0]), equalTo("fox eat quick")); - } - - public void testNestedInnerHitsWithExcludeSourceBackcompat() throws Exception { - assertAcked(prepareCreate("articles").setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id) - .addMapping("article", jsonBuilder().startObject() - .startObject("_source").field("excludes", new String[]{"comments"}).endObject() - .startObject("properties") - .startObject("comments") - .field("type", "nested") - .startObject("properties") - .startObject("message").field("type", "text").field("store", true).endObject() - .endObject() - .endObject() - .endObject() - .endObject() - ) - ); - - List requests = new ArrayList<>(); - requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject() - .field("title", "quick brown fox") - .startObject("comments").field("message", "fox eat quick").endObject() - .endObject())); - indexRandom(true, requests); - InnerHitsBuilder.InnerHit builder = new InnerHitsBuilder.InnerHit(); - builder.field("comments.message"); - builder.setFetchSource(true); - SearchResponse response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, builder))) - .get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue()); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).fields().get("comments.message").getValue(), equalTo("fox eat quick")); - } - - public void testNestedInnerHitsHiglightWithExcludeSourceBackcompat() throws Exception { - assertAcked(prepareCreate("articles").setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id) - .addMapping("article", jsonBuilder().startObject() - .startObject("_source").field("excludes", new String[]{"comments"}).endObject() - .startObject("properties") - .startObject("comments") - .field("type", "nested") - .startObject("properties") - .startObject("message").field("type", "text").field("store", true).endObject() - .endObject() - .endObject() - .endObject() - .endObject() - ) - ); - - List requests = new ArrayList<>(); - requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject() - .field("title", "quick brown fox") - .startObject("comments").field("message", "fox eat quick").endObject() - .endObject())); - indexRandom(true, requests); - InnerHitsBuilder.InnerHit builder = new InnerHitsBuilder.InnerHit(); - builder.highlighter(new HighlightBuilder().field("comments.message")); - SearchResponse response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, builder))) - .get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue()); - assertThat(String.valueOf(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).highlightFields().get("comments.message").getFragments()[0]), equalTo("fox eat quick")); - } - public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { assertAcked(prepareCreate("articles") .addMapping("article", jsonBuilder().startObject() diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java index e15b62147cfb..298f230d64ab 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java @@ -74,7 +74,7 @@ public class RestTestParserTests extends ESTestCase { "\"Get type mapping - pre 1.0\":\n" + "\n" + " - skip:\n" + - " version: \"0.90.9 - \"\n" + + " version: \"2.0.0 - \"\n" + " reason: \"for newer versions the index name is always returned\"\n" + "\n" + " - do:\n" + @@ -121,7 +121,7 @@ public class RestTestParserTests extends ESTestCase { assertThat(restTestSuite.getTestSections().get(1).getName(), equalTo("Get type mapping - pre 1.0")); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().isEmpty(), equalTo(false)); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getReason(), equalTo("for newer versions the index name is always returned")); - assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getLowerVersion(), equalTo(Version.V_0_90_9)); + assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getLowerVersion(), equalTo(Version.V_2_0_0)); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getUpperVersion(), equalTo(Version.CURRENT)); assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().size(), equalTo(3)); assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().get(0), instanceOf(DoSection.class)); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java index 9dd388056d5b..b3fe1f0f23bc 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java @@ -57,7 +57,7 @@ public class SetupSectionParserTests extends AbstractParserTestCase { public void testParseSetupAndSkipSectionNoSkip() throws Exception { parser = YamlXContent.yamlXContent.createParser( " - skip:\n" + - " version: \"0.90.0 - 0.90.7\"\n" + + " version: \"2.0.0 - 2.3.0\"\n" + " reason: \"Update doesn't return metadata fields, waiting for #3259\"\n" + " - do:\n" + " index1:\n" + @@ -79,8 +79,8 @@ public class SetupSectionParserTests extends AbstractParserTestCase { assertThat(setupSection, notNullValue()); assertThat(setupSection.getSkipSection().isEmpty(), equalTo(false)); assertThat(setupSection.getSkipSection(), notNullValue()); - assertThat(setupSection.getSkipSection().getLowerVersion(), equalTo(Version.V_0_90_0)); - assertThat(setupSection.getSkipSection().getUpperVersion(), equalTo(Version.V_0_90_7)); + assertThat(setupSection.getSkipSection().getLowerVersion(), equalTo(Version.V_2_0_0)); + assertThat(setupSection.getSkipSection().getUpperVersion(), equalTo(Version.V_2_3_0)); assertThat(setupSection.getSkipSection().getReason(), equalTo("Update doesn't return metadata fields, waiting for #3259")); assertThat(setupSection.getDoSections().size(), equalTo(2)); assertThat(setupSection.getDoSections().get(0).getApiCallSection().getApi(), equalTo("index1")); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java index 5864e78134dd..39b0f284b5ea 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java @@ -34,7 +34,7 @@ import static org.hamcrest.Matchers.nullValue; public class SkipSectionParserTests extends AbstractParserTestCase { public void testParseSkipSectionVersionNoFeature() throws Exception { parser = YamlXContent.yamlXContent.createParser( - "version: \" - 0.90.2\"\n" + + "version: \" - 2.1.0\"\n" + "reason: Delete ignores the parent param" ); @@ -44,7 +44,7 @@ public class SkipSectionParserTests extends AbstractParserTestCase { assertThat(skipSection, notNullValue()); assertThat(skipSection.getLowerVersion(), equalTo(VersionUtils.getFirstVersion())); - assertThat(skipSection.getUpperVersion(), equalTo(Version.V_0_90_2)); + assertThat(skipSection.getUpperVersion(), equalTo(Version.V_2_1_0)); assertThat(skipSection.getFeatures().size(), equalTo(0)); assertThat(skipSection.getReason(), equalTo("Delete ignores the parent param")); } @@ -144,4 +144,4 @@ public class SkipSectionParserTests extends AbstractParserTestCase { assertThat(e.getMessage(), is("version or features is mandatory within skip section")); } } -} \ No newline at end of file +} diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java index c157610b6454..d034ae56a716 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java @@ -70,7 +70,7 @@ public class TestSectionParserTests extends AbstractParserTestCase { String yaml = "\"First test section\": \n" + " - skip:\n" + - " version: \"0.90.0 - 0.90.7\"\n" + + " version: \"2.0.0 - 2.2.0\"\n" + " reason: \"Update doesn't return metadata fields, waiting for #3259\"\n" + " - do :\n" + " catch: missing\n" + @@ -87,8 +87,8 @@ public class TestSectionParserTests extends AbstractParserTestCase { assertThat(testSection, notNullValue()); assertThat(testSection.getName(), equalTo("First test section")); assertThat(testSection.getSkipSection(), notNullValue()); - assertThat(testSection.getSkipSection().getLowerVersion(), equalTo(Version.V_0_90_0)); - assertThat(testSection.getSkipSection().getUpperVersion(), equalTo(Version.V_0_90_7)); + assertThat(testSection.getSkipSection().getLowerVersion(), equalTo(Version.V_2_0_0)); + assertThat(testSection.getSkipSection().getUpperVersion(), equalTo(Version.V_2_2_0)); assertThat(testSection.getSkipSection().getReason(), equalTo("Update doesn't return metadata fields, waiting for #3259")); assertThat(testSection.getExecutableSections().size(), equalTo(2)); DoSection doSection = (DoSection)testSection.getExecutableSections().get(0); diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/VersionUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/VersionUtilsTests.java index cc2f613eb27f..ea1929a55b06 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/VersionUtilsTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/VersionUtilsTests.java @@ -32,7 +32,7 @@ public class VersionUtilsTests extends ESTestCase { assertTrue(allVersions.get(i).before(allVersions.get(j))); } } - + public void testRandomVersionBetween() { // full range Version got = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), Version.CURRENT); @@ -46,34 +46,34 @@ public class VersionUtilsTests extends ESTestCase { assertTrue(got.onOrBefore(Version.CURRENT)); // sub range - got = VersionUtils.randomVersionBetween(random(), Version.V_0_90_12, Version.V_1_4_5); - assertTrue(got.onOrAfter(Version.V_0_90_12)); - assertTrue(got.onOrBefore(Version.V_1_4_5)); + got = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0); + assertTrue(got.onOrAfter(Version.V_2_0_0)); + assertTrue(got.onOrBefore(Version.V_5_0_0)); // unbounded lower - got = VersionUtils.randomVersionBetween(random(), null, Version.V_1_4_5); + got = VersionUtils.randomVersionBetween(random(), null, Version.V_5_0_0); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); - assertTrue(got.onOrBefore(Version.V_1_4_5)); + assertTrue(got.onOrBefore(Version.V_5_0_0)); got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.allVersions().get(0)); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); assertTrue(got.onOrBefore(VersionUtils.allVersions().get(0))); // unbounded upper - got = VersionUtils.randomVersionBetween(random(), Version.V_0_90_12, null); - assertTrue(got.onOrAfter(Version.V_0_90_12)); + got = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, null); + assertTrue(got.onOrAfter(Version.V_2_0_0)); assertTrue(got.onOrBefore(Version.CURRENT)); got = VersionUtils.randomVersionBetween(random(), VersionUtils.getPreviousVersion(), null); assertTrue(got.onOrAfter(VersionUtils.getPreviousVersion())); assertTrue(got.onOrBefore(Version.CURRENT)); - + // range of one got = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getFirstVersion()); assertEquals(got, VersionUtils.getFirstVersion()); got = VersionUtils.randomVersionBetween(random(), Version.CURRENT, Version.CURRENT); assertEquals(got, Version.CURRENT); - got = VersionUtils.randomVersionBetween(random(), Version.V_1_2_4, Version.V_1_2_4); - assertEquals(got, Version.V_1_2_4); - + got = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.V_5_0_0); + assertEquals(got, Version.V_5_0_0); + // implicit range of one got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.getFirstVersion()); assertEquals(got, VersionUtils.getFirstVersion()); From a29f734b75be8a2badf341598a80f6a5a86a9b0d Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 7 Mar 2016 11:55:56 +0100 Subject: [PATCH 070/320] Only assert that analyzers are the same if lucene version is the same --- .../index/analysis/PreBuiltAnalyzerTests.java | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index 13f30f288ccc..06a242c8277e 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -63,10 +63,14 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase { } public void testThatInstancesAreCachedAndReused() { - assertThat(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT), - is(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT))); - assertThat(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_0_0), - is(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_2_0))); + assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT), + PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT)); + // same lucene version should be cached + assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_0_0), + PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_0_1)); + + assertNotSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_0_0), + PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_2_0)); } public void testThatAnalyzersAreUsedInMapping() throws IOException { From 46c295d029b72be9988c2458437a784e229fabe1 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 7 Mar 2016 12:03:05 +0100 Subject: [PATCH 071/320] use true/false rather than yes/no in tests --- core/src/test/java/org/elasticsearch/get/GetActionIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/get/GetActionIT.java b/core/src/test/java/org/elasticsearch/get/GetActionIT.java index c797a5a1968b..64d293e8bd0b 100644 --- a/core/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/core/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -875,7 +875,7 @@ public class GetActionIT extends ESIntegTestCase { void indexSingleDocumentWithStringFieldsGeneratedFromText(boolean stored, boolean sourceEnabled) { - String storedString = stored ? "yes" : "no"; + String storedString = stored ? "true" : "false"; String createIndexSource = "{\n" + " \"settings\": {\n" + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + @@ -926,7 +926,7 @@ public class GetActionIT extends ESIntegTestCase { } void indexSingleDocumentWithNumericFieldsGeneratedFromText(boolean stored, boolean sourceEnabled) { - String storedString = stored ? "yes" : "no"; + String storedString = stored ? "true" : "false"; String createIndexSource = "{\n" + " \"settings\": {\n" + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + From 7e4c4cd8d9138fad13de701d81ed96f160fa4e90 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 7 Mar 2016 12:17:32 +0100 Subject: [PATCH 072/320] bring back accidentially removed MockScriptEngine plugin --- .../java/org/elasticsearch/search/innerhits/InnerHitsIT.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java index abd73a36adf4..084e07e03893 100644 --- a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java @@ -70,6 +70,10 @@ import static org.hamcrest.Matchers.nullValue; /** */ public class InnerHitsIT extends ESIntegTestCase { + @Override + protected Collection> nodePlugins() { + return pluginList(MockScriptEngine.TestPlugin.class); + } public void testSimpleNested() throws Exception { assertAcked(prepareCreate("articles").addMapping("article", jsonBuilder().startObject().startObject("article").startObject("properties") From d9ddd3fa458819031563237965c084ea756d391f Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 7 Mar 2016 12:23:30 +0100 Subject: [PATCH 073/320] Remove leniency from segments info integrity checks Closes #16973 --- .../main/java/org/elasticsearch/common/lucene/Lucene.java | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 39f34ad867e5..54e5738e78c5 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -235,11 +235,7 @@ public class Lucene { @Override protected Object doBody(String segmentFileName) throws IOException { try (IndexInput input = directory.openInput(segmentFileName, IOContext.READ)) { - final int format = input.readInt(); - if (format == CodecUtil.CODEC_MAGIC) { - CodecUtil.checksumEntireFile(input); - } - // legacy.... + CodecUtil.checksumEntireFile(input); } return null; } From f96900013c5a00d8e641c3d213096849e1dd6905 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 7 Mar 2016 12:44:53 +0100 Subject: [PATCH 074/320] Remove bw compat from murmur3 mapper --- .../mapper/murmur3/Murmur3FieldMapper.java | 24 ++++-------- .../murmur3/Murmur3FieldMapperTests.java | 39 ------------------- 2 files changed, 8 insertions(+), 55 deletions(-) diff --git a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java index ce78c75d783f..76ffaa16ae6a 100644 --- a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java +++ b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java @@ -72,12 +72,10 @@ public class Murmur3FieldMapper extends LongFieldMapper { @Override protected void setupFieldType(BuilderContext context) { super.setupFieldType(context); - if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0_beta1)) { - fieldType.setIndexOptions(IndexOptions.NONE); - defaultFieldType.setIndexOptions(IndexOptions.NONE); - fieldType.setHasDocValues(true); - defaultFieldType.setHasDocValues(true); - } + fieldType.setIndexOptions(IndexOptions.NONE); + defaultFieldType.setIndexOptions(IndexOptions.NONE); + fieldType.setHasDocValues(true); + defaultFieldType.setHasDocValues(true); } @Override @@ -97,17 +95,11 @@ public class Murmur3FieldMapper extends LongFieldMapper { Builder builder = new Builder(name); // tweaking these settings is no longer allowed, the entire purpose of murmur3 fields is to store a hash - if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) { - if (node.get("doc_values") != null) { - throw new MapperParsingException("Setting [doc_values] cannot be modified for field [" + name + "]"); - } - if (node.get("index") != null) { - throw new MapperParsingException("Setting [index] cannot be modified for field [" + name + "]"); - } + if (node.get("doc_values") != null) { + throw new MapperParsingException("Setting [doc_values] cannot be modified for field [" + name + "]"); } - - if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - builder.indexOptions(IndexOptions.DOCS); + if (node.get("index") != null) { + throw new MapperParsingException("Setting [index] cannot be modified for field [" + name + "]"); } parseNumberField(builder, name, node, parserContext); diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java index 072c0db3e591..621ce4b8a1f5 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java @@ -44,11 +44,6 @@ import java.util.Collections; public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - MapperRegistry mapperRegistry; IndexService indexService; DocumentMapperParser parser; @@ -131,38 +126,4 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { assertTrue(e.getMessage().contains("Setting [index] cannot be modified")); } } - - public void testDocValuesSettingBackcompat() throws Exception { - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - indexService = createIndex("test_bwc", settings); - parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), - indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "murmur3") - .field("doc_values", false) - .endObject().endObject().endObject().endObject().string(); - - DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); - Murmur3FieldMapper mapper = (Murmur3FieldMapper)docMapper.mappers().getMapper("field"); - assertFalse(mapper.fieldType().hasDocValues()); - } - - public void testIndexSettingBackcompat() throws Exception { - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - indexService = createIndex("test_bwc", settings); - parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), - indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "murmur3") - .field("index", "not_analyzed") - .endObject().endObject().endObject().endObject().string(); - - DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); - Murmur3FieldMapper mapper = (Murmur3FieldMapper)docMapper.mappers().getMapper("field"); - assertEquals(IndexOptions.DOCS, mapper.fieldType().indexOptions()); - } - - // TODO: add more tests } From fdfb0e56f686054617b235358856004e719c9e1c Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 7 Mar 2016 12:48:02 +0100 Subject: [PATCH 075/320] Remove bw compat from size mapper --- .../mapper/murmur3/Murmur3FieldMapper.java | 1 - .../murmur3/Murmur3FieldMapperTests.java | 6 --- .../index/mapper/size/SizeFieldMapper.java | 5 --- .../index/mapper/size/SizeMappingTests.java | 41 ------------------- 4 files changed, 53 deletions(-) diff --git a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java index 76ffaa16ae6a..802ca1d7653c 100644 --- a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java +++ b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java @@ -26,7 +26,6 @@ import java.util.Map; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.hash.MurmurHash3; import org.elasticsearch.common.settings.Settings; diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java index 621ce4b8a1f5..16865eb98b64 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java @@ -22,10 +22,7 @@ package org.elasticsearch.index.mapper.murmur3; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; @@ -33,13 +30,10 @@ import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.indices.mapper.MapperRegistry; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; import java.util.Arrays; -import java.util.Collection; import java.util.Collections; public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { diff --git a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java index 984e83a438e3..cfc7e29486cd 100644 --- a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java +++ b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.size; import org.apache.lucene.document.Field; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -39,7 +38,6 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseStore; public class SizeFieldMapper extends MetadataFieldMapper { @@ -94,9 +92,6 @@ public class SizeFieldMapper extends MetadataFieldMapper { if (fieldName.equals("enabled")) { builder.enabled(lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED); iterator.remove(); - } else if (fieldName.equals("store") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - builder.store(parseStore(fieldName, fieldNode.toString(), parserContext)); - iterator.remove(); } } return builder; diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java index d6b64df9e5d4..174520cfada5 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java @@ -19,30 +19,20 @@ package org.elasticsearch.index.mapper.size; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.indices.IndicesModule; -import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -55,15 +45,9 @@ public class SizeMappingTests extends ESSingleNodeTestCase { MapperService mapperService; DocumentMapperParser parser; - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); // uses index.version.created - } - @Before public void before() { indexService = createIndex("test"); - Map metadataMappers = new HashMap<>(); IndicesModule indices = new IndicesModule(); indices.registerMetadataMapper(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser()); mapperService = new MapperService(indexService.getIndexSettings(), indexService.analysisService(), indexService.similarityService(), indices.getMapperRegistry(), indexService::newQueryShardContext); @@ -87,31 +71,6 @@ public class SizeMappingTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue()); } - public void testSizeEnabledAndStoredBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_size").field("enabled", true).field("store", "yes").endObject() - .endObject().endObject().string(); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - - indexService = createIndex("test2", indexSettings); - MapperRegistry mapperRegistry = new MapperRegistry( - Collections.emptyMap(), - Collections.singletonMap(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser())); - parser = new DocumentMapperParser(indexService.getIndexSettings(), mapperService, - indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext); - DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); - - BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); - ParsedDocument doc = docMapper.parse(SourceToParse.source(source).type("type").id("1")); - - assertThat(doc.rootDoc().getField("_size").fieldType().stored(), equalTo(true)); - assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue()); - } - public void testSizeDisabled() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", false).endObject() From 82d01e431557296ab279c0936a145a2047c2920f Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 29 Feb 2016 17:41:54 +0100 Subject: [PATCH 076/320] Added ingest info to node info API, which contains a list of available processors. Internally the put pipeline API uses this information in node info API to validate if all specified processors in a pipeline exist on all nodes in the cluster. --- .../admin/cluster/node/info/NodeInfo.java | 24 +++- .../cluster/node/info/NodesInfoRequest.java | 21 ++++ .../node/info/NodesInfoRequestBuilder.java | 8 ++ .../cluster/node/info/NodesInfoResponse.java | 3 + .../node/info/TransportNodesInfoAction.java | 4 +- .../stats/TransportClusterStatsAction.java | 2 +- .../ingest/PutPipelineTransportAction.java | 37 +++++- .../elasticsearch/ingest/IngestService.java | 15 +++ .../elasticsearch/ingest/PipelineStore.java | 38 ++++-- .../ingest/ProcessorsRegistry.java | 1 + .../ingest/core/CompoundProcessor.java | 21 ++++ .../elasticsearch/ingest/core/IngestInfo.java | 100 ++++++++++++++++ .../elasticsearch/ingest/core/Pipeline.java | 8 ++ .../ingest/core/ProcessorInfo.java | 81 +++++++++++++ .../node/service/NodeService.java | 8 +- .../node/info/RestNodesInfoAction.java | 3 +- ...gestProcessorNotInstalledOnAllNodesIT.java | 112 ++++++++++++++++++ .../ingest/PipelineStoreTests.java | 43 +++++++ .../ingest/core/PipelineFactoryTests.java | 12 ++ .../nodesinfo/NodeInfoStreamingTests.java | 5 +- docs/reference/cluster/nodes-info.asciidoc | 70 ++++++++++- docs/reference/ingest/ingest-node.asciidoc | 14 +++ .../test/ingest_attachment/10_basic.yaml | 10 +- .../test/ingest_geoip/10_basic.yaml | 10 +- .../rest-api-spec/api/nodes.info.json | 2 +- .../rest-api-spec/test/ingest/10_crud.yaml | 25 ++++ 26 files changed, 649 insertions(+), 28 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/ingest/core/IngestInfo.java create mode 100644 core/src/main/java/org/elasticsearch/ingest/core/ProcessorInfo.java create mode 100644 core/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index 1fa64d5e7b7f..11c542863b5a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.http.HttpInfo; +import org.elasticsearch.ingest.core.IngestInfo; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.monitor.os.OsInfo; import org.elasticsearch.monitor.process.ProcessInfo; @@ -74,12 +75,15 @@ public class NodeInfo extends BaseNodeResponse { @Nullable private PluginsAndModules plugins; - NodeInfo() { + @Nullable + private IngestInfo ingest; + + public NodeInfo() { } public NodeInfo(Version version, Build build, DiscoveryNode node, @Nullable Map serviceAttributes, @Nullable Settings settings, @Nullable OsInfo os, @Nullable ProcessInfo process, @Nullable JvmInfo jvm, @Nullable ThreadPoolInfo threadPool, - @Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsAndModules plugins) { + @Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsAndModules plugins, @Nullable IngestInfo ingest) { super(node); this.version = version; this.build = build; @@ -92,6 +96,7 @@ public class NodeInfo extends BaseNodeResponse { this.transport = transport; this.http = http; this.plugins = plugins; + this.ingest = ingest; } /** @@ -176,6 +181,11 @@ public class NodeInfo extends BaseNodeResponse { return this.plugins; } + @Nullable + public IngestInfo getIngest() { + return ingest; + } + public static NodeInfo readNodeInfo(StreamInput in) throws IOException { NodeInfo nodeInfo = new NodeInfo(); nodeInfo.readFrom(in); @@ -220,6 +230,10 @@ public class NodeInfo extends BaseNodeResponse { plugins = new PluginsAndModules(); plugins.readFrom(in); } + if (in.readBoolean()) { + ingest = new IngestInfo(); + ingest.readFrom(in); + } } @Override @@ -285,5 +299,11 @@ public class NodeInfo extends BaseNodeResponse { out.writeBoolean(true); plugins.writeTo(out); } + if (ingest == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + ingest.writeTo(out); + } } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java index 46a36f1d8a31..66c5cfd65d42 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java @@ -38,6 +38,7 @@ public class NodesInfoRequest extends BaseNodesRequest { private boolean transport = true; private boolean http = true; private boolean plugins = true; + private boolean ingest = true; public NodesInfoRequest() { } @@ -62,6 +63,7 @@ public class NodesInfoRequest extends BaseNodesRequest { transport = false; http = false; plugins = false; + ingest = false; return this; } @@ -77,6 +79,7 @@ public class NodesInfoRequest extends BaseNodesRequest { transport = true; http = true; plugins = true; + ingest = true; return this; } @@ -202,6 +205,22 @@ public class NodesInfoRequest extends BaseNodesRequest { return plugins; } + /** + * Should information about ingest be returned + * @param ingest true if you want info + */ + public NodesInfoRequest ingest(boolean ingest) { + this.ingest = ingest; + return this; + } + + /** + * @return true if information about ingest is requested + */ + public boolean ingest() { + return ingest; + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -213,6 +232,7 @@ public class NodesInfoRequest extends BaseNodesRequest { transport = in.readBoolean(); http = in.readBoolean(); plugins = in.readBoolean(); + ingest = in.readBoolean(); } @Override @@ -226,5 +246,6 @@ public class NodesInfoRequest extends BaseNodesRequest { out.writeBoolean(transport); out.writeBoolean(http); out.writeBoolean(plugins); + out.writeBoolean(ingest); } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequestBuilder.java index d73b3d47dfb8..fc484012379b 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequestBuilder.java @@ -110,4 +110,12 @@ public class NodesInfoRequestBuilder extends NodesOperationRequestBuilder implements To if (nodeInfo.getPlugins() != null) { nodeInfo.getPlugins().toXContent(builder, params); } + if (nodeInfo.getIngest() != null) { + nodeInfo.getIngest().toXContent(builder, params); + } builder.endObject(); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java index 2a763910dddf..b14450f9eb19 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java @@ -80,7 +80,7 @@ public class TransportNodesInfoAction extends TransportNodesAction shardsStats = new ArrayList<>(); for (IndexService indexService : indicesService) { diff --git a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java index 31a911207ab4..aafd9ee75a43 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java @@ -20,6 +20,10 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; @@ -27,24 +31,32 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.PipelineStore; +import org.elasticsearch.ingest.core.IngestInfo; import org.elasticsearch.node.service.NodeService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.HashMap; +import java.util.Map; + public class PutPipelineTransportAction extends TransportMasterNodeAction { private final PipelineStore pipelineStore; private final ClusterService clusterService; + private final TransportNodesInfoAction nodesInfoAction; @Inject public PutPipelineTransportAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, NodeService nodeService) { + IndexNameExpressionResolver indexNameExpressionResolver, NodeService nodeService, + TransportNodesInfoAction nodesInfoAction) { super(settings, PutPipelineAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, PutPipelineRequest::new); this.clusterService = clusterService; + this.nodesInfoAction = nodesInfoAction; this.pipelineStore = nodeService.getIngestService().getPipelineStore(); } @@ -60,7 +72,28 @@ public class PutPipelineTransportAction extends TransportMasterNodeAction listener) throws Exception { - pipelineStore.put(clusterService, request, listener); + NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); + nodesInfoRequest.clear(); + nodesInfoRequest.ingest(true); + nodesInfoAction.execute(nodesInfoRequest, new ActionListener() { + @Override + public void onResponse(NodesInfoResponse nodeInfos) { + try { + Map ingestInfos = new HashMap<>(); + for (NodeInfo nodeInfo : nodeInfos) { + ingestInfos.put(nodeInfo.getNode(), nodeInfo.getIngest()); + } + pipelineStore.put(clusterService, ingestInfos, request, listener); + } catch (Exception e) { + onFailure(e); + } + } + + @Override + public void onFailure(Throwable e) { + listener.onFailure(e); + } + }); } @Override diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestService.java b/core/src/main/java/org/elasticsearch/ingest/IngestService.java index 78a1f66fb802..b38f7470e399 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -20,11 +20,17 @@ package org.elasticsearch.ingest; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.ingest.core.IngestInfo; +import org.elasticsearch.ingest.core.Processor; +import org.elasticsearch.ingest.core.ProcessorInfo; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; import java.io.Closeable; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; /** * Holder class for several ingest related services. @@ -53,6 +59,15 @@ public class IngestService implements Closeable { pipelineStore.buildProcessorFactoryRegistry(processorsRegistryBuilder, scriptService); } + public IngestInfo info() { + Map processorFactories = pipelineStore.getProcessorRegistry().getProcessorFactories(); + List processorInfoList = new ArrayList<>(processorFactories.size()); + for (Map.Entry entry : processorFactories.entrySet()) { + processorInfoList.add(new ProcessorInfo(entry.getKey())); + } + return new IngestInfo(processorInfoList); + } + @Override public void close() throws IOException { pipelineStore.close(); diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java index 3999f357b864..ac2df419f55a 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java @@ -20,6 +20,7 @@ package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ingest.DeletePipelineRequest; @@ -31,12 +32,15 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.ingest.core.IngestInfo; import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.ingest.core.Processor; +import org.elasticsearch.ingest.core.ProcessorInfo; import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.script.ScriptService; @@ -47,6 +51,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; public class PipelineStore extends AbstractComponent implements Closeable, ClusterStateListener { @@ -130,8 +135,8 @@ public class PipelineStore extends AbstractComponent implements Closeable, Clust pipelines.remove(request.getId()); ClusterState.Builder newState = ClusterState.builder(currentState); newState.metaData(MetaData.builder(currentState.getMetaData()) - .putCustom(IngestMetadata.TYPE, new IngestMetadata(pipelines)) - .build()); + .putCustom(IngestMetadata.TYPE, new IngestMetadata(pipelines)) + .build()); return newState.build(); } } @@ -139,15 +144,9 @@ public class PipelineStore extends AbstractComponent implements Closeable, Clust /** * Stores the specified pipeline definition in the request. */ - public void put(ClusterService clusterService, PutPipelineRequest request, ActionListener listener) { + public void put(ClusterService clusterService, Map ingestInfos, PutPipelineRequest request, ActionListener listener) throws Exception { // validates the pipeline and processor configuration before submitting a cluster update task: - Map pipelineConfig = XContentHelper.convertToMap(request.getSource(), false).v2(); - try { - factory.create(request.getId(), pipelineConfig, processorRegistry); - } catch(Exception e) { - listener.onFailure(e); - return; - } + validatePipeline(ingestInfos, request); clusterService.submitStateUpdateTask("put-pipeline-" + request.getId(), new AckedClusterStateUpdateTask(request, listener) { @Override @@ -162,6 +161,25 @@ public class PipelineStore extends AbstractComponent implements Closeable, Clust }); } + void validatePipeline(Map ingestInfos, PutPipelineRequest request) throws Exception { + if (ingestInfos.isEmpty()) { + throw new IllegalStateException("Ingest info is empty"); + } + + Map pipelineConfig = XContentHelper.convertToMap(request.getSource(), false).v2(); + Pipeline pipeline = factory.create(request.getId(), pipelineConfig, processorRegistry); + List exceptions = new ArrayList<>(); + for (Processor processor : pipeline.flattenAllProcessors()) { + for (Map.Entry entry : ingestInfos.entrySet()) { + if (entry.getValue().containsProcessor(processor.getType()) == false) { + String message = "Processor type [" + processor.getType() + "] is not installed on node [" + entry.getKey() + "]"; + exceptions.add(new IllegalArgumentException(message)); + } + } + } + ExceptionsHelper.rethrowAndSuppress(exceptions); + } + ClusterState innerPut(PutPipelineRequest request, ClusterState currentState) { IngestMetadata currentIngestMetadata = currentState.metaData().custom(IngestMetadata.TYPE); Map pipelines; diff --git a/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java index bd885c578b33..e831d70702e6 100644 --- a/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java +++ b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java @@ -21,6 +21,7 @@ package org.elasticsearch.ingest; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ingest.core.Processor; +import org.elasticsearch.ingest.core.ProcessorInfo; import org.elasticsearch.ingest.core.TemplateService; import java.io.Closeable; diff --git a/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java index c784ea1c57a9..ddf3781d1a6c 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java @@ -20,6 +20,9 @@ package org.elasticsearch.ingest.core; +import org.elasticsearch.common.util.iterable.Iterables; + +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -56,6 +59,24 @@ public class CompoundProcessor implements Processor { return processors; } + public List flattenProcessors() { + List allProcessors = new ArrayList<>(flattenProcessors(processors)); + allProcessors.addAll(flattenProcessors(onFailureProcessors)); + return allProcessors; + } + + private static List flattenProcessors(List processors) { + List flattened = new ArrayList<>(); + for (Processor processor : processors) { + if (processor instanceof CompoundProcessor) { + flattened.addAll(((CompoundProcessor) processor).flattenProcessors()); + } else { + flattened.add(processor); + } + } + return flattened; + } + @Override public String getType() { return "compound"; diff --git a/core/src/main/java/org/elasticsearch/ingest/core/IngestInfo.java b/core/src/main/java/org/elasticsearch/ingest/core/IngestInfo.java new file mode 100644 index 000000000000..8625e1d8884d --- /dev/null +++ b/core/src/main/java/org/elasticsearch/ingest/core/IngestInfo.java @@ -0,0 +1,100 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.core; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Objects; +import java.util.Set; + +public class IngestInfo implements Streamable, ToXContent { + + private Set processors; + + public IngestInfo() { + processors = Collections.emptySet(); + } + + public IngestInfo(List processors) { + this.processors = new LinkedHashSet<>(processors); + } + + public Iterable getProcessors() { + return processors; + } + + public boolean containsProcessor(String type) { + return processors.contains(new ProcessorInfo(type)); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + int size = in.readVInt(); + Set processors = new LinkedHashSet<>(size); + for (int i = 0; i < size; i++) { + ProcessorInfo info = new ProcessorInfo(); + info.readFrom(in); + processors.add(info); + } + this.processors = processors; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.write(processors.size()); + for (ProcessorInfo info : processors) { + info.writeTo(out); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject("ingest"); + builder.startArray("processors"); + for (ProcessorInfo info : processors) { + info.toXContent(builder, params); + } + builder.endArray(); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IngestInfo that = (IngestInfo) o; + return Objects.equals(processors, that.processors); + } + + @Override + public int hashCode() { + return Objects.hash(processors); + } +} diff --git a/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java b/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java index 9b887ec229c4..821a44c0a969 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java @@ -83,6 +83,14 @@ public final class Pipeline { return compoundProcessor.getOnFailureProcessors(); } + /** + * Flattens the normal and on failure processors into a single list. The original order is lost. + * This can be useful for pipeline validation purposes. + */ + public List flattenAllProcessors() { + return compoundProcessor.flattenProcessors(); + } + public final static class Factory { public Pipeline create(String id, Map config, ProcessorsRegistry processorRegistry) throws Exception { diff --git a/core/src/main/java/org/elasticsearch/ingest/core/ProcessorInfo.java b/core/src/main/java/org/elasticsearch/ingest/core/ProcessorInfo.java new file mode 100644 index 000000000000..64c3d19719b6 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/ingest/core/ProcessorInfo.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.core; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; + +public class ProcessorInfo implements Streamable, ToXContent { + + private String type; + + ProcessorInfo() { + } + + public ProcessorInfo(String type) { + this.type = type; + } + + /** + * @return The unique processor type + */ + public String getType() { + return type; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + this.type = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(this.type); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("type", type); + builder.endObject(); + return null; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + ProcessorInfo that = (ProcessorInfo) o; + + return type.equals(that.type); + + } + + @Override + public int hashCode() { + return type.hashCode(); + } +} diff --git a/core/src/main/java/org/elasticsearch/node/service/NodeService.java b/core/src/main/java/org/elasticsearch/node/service/NodeService.java index b5b8e8f2cb66..7ae98a20dbb7 100644 --- a/core/src/main/java/org/elasticsearch/node/service/NodeService.java +++ b/core/src/main/java/org/elasticsearch/node/service/NodeService.java @@ -132,12 +132,13 @@ public class NodeService extends AbstractComponent implements Closeable { threadPool.info(), transportService.info(), httpServer == null ? null : httpServer.info(), - pluginService == null ? null : pluginService.info() + pluginService == null ? null : pluginService.info(), + ingestService == null ? null : ingestService.info() ); } public NodeInfo info(boolean settings, boolean os, boolean process, boolean jvm, boolean threadPool, - boolean transport, boolean http, boolean plugin) { + boolean transport, boolean http, boolean plugin, boolean ingest) { return new NodeInfo(version, Build.CURRENT, discovery.localNode(), serviceAttributes, settings ? settingsFilter.filter(this.settings) : null, os ? monitorService.osService().info() : null, @@ -146,7 +147,8 @@ public class NodeService extends AbstractComponent implements Closeable { threadPool ? this.threadPool.info() : null, transport ? transportService.info() : null, http ? (httpServer == null ? null : httpServer.info()) : null, - plugin ? (pluginService == null ? null : pluginService.info()) : null + plugin ? (pluginService == null ? null : pluginService.info()) : null, + ingest ? (ingestService == null ? null : ingestService.info()) : null ); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java index f11efeca87d6..bd6637cb788a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java @@ -48,7 +48,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestNodesInfoAction extends BaseRestHandler { private final SettingsFilter settingsFilter; - private final static Set ALLOWED_METRICS = Sets.newHashSet("http", "jvm", "os", "plugins", "process", "settings", "thread_pool", "transport"); + private final static Set ALLOWED_METRICS = Sets.newHashSet("http", "jvm", "os", "plugins", "process", "settings", "thread_pool", "transport", "ingest"); @Inject public RestNodesInfoAction(Settings settings, RestController controller, Client client, SettingsFilter settingsFilter) { @@ -101,6 +101,7 @@ public class RestNodesInfoAction extends BaseRestHandler { nodesInfoRequest.transport(metrics.contains("transport")); nodesInfoRequest.http(metrics.contains("http")); nodesInfoRequest.plugins(metrics.contains("plugins")); + nodesInfoRequest.ingest(metrics.contains("ingest")); } settingsFilter.addFilterSettingParams(request); diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java new file mode 100644 index 000000000000..abfe18f8c584 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java @@ -0,0 +1,112 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ingest.WritePipelineResponse; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.ingest.core.Pipeline; +import org.elasticsearch.node.service.NodeService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +@ESIntegTestCase.ClusterScope(numDataNodes = 0, numClientNodes = 0, scope = ESIntegTestCase.Scope.TEST) +public class IngestProcessorNotInstalledOnAllNodesIT extends ESIntegTestCase { + + private final BytesReference pipelineSource; + private volatile boolean installPlugin; + + public IngestProcessorNotInstalledOnAllNodesIT() throws IOException { + pipelineSource = jsonBuilder().startObject() + .startArray("processors") + .startObject() + .startObject("test") + .endObject() + .endObject() + .endArray() + .endObject().bytes(); + } + + @Override + protected Collection> nodePlugins() { + return installPlugin ? pluginList(IngestClientIT.IngestPlugin.class) : Collections.emptyList(); + } + + @Override + protected Collection> getMockPlugins() { + return Collections.singletonList(TestSeedPlugin.class); + } + + public void testFailPipelineCreation() throws Exception { + installPlugin = true; + internalCluster().startNode(); + installPlugin = false; + internalCluster().startNode(); + + try { + client().admin().cluster().preparePutPipeline("_id", pipelineSource).get(); + fail("exception expected"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("Processor type [test] is not installed on node")); + } + } + + public void testFailPipelineCreationProcessorNotInstalledOnMasterNode() throws Exception { + internalCluster().startNode(); + installPlugin = true; + internalCluster().startNode(); + + try { + client().admin().cluster().preparePutPipeline("_id", pipelineSource).get(); + fail("exception expected"); + } catch (ElasticsearchParseException e) { + assertThat(e.getMessage(), equalTo("No processor type exists with name [test]")); + } + } + + // If there is pipeline defined and a node joins that doesn't have the processor installed then + // that pipeline can't be used on this node. + public void testFailStartNode() throws Exception { + installPlugin = true; + String node1 = internalCluster().startNode(); + + WritePipelineResponse response = client().admin().cluster().preparePutPipeline("_id", pipelineSource).get(); + assertThat(response.isAcknowledged(), is(true)); + Pipeline pipeline = internalCluster().getInstance(NodeService.class, node1).getIngestService().getPipelineStore().get("_id"); + assertThat(pipeline, notNullValue()); + + installPlugin = false; + String node2 = internalCluster().startNode(); + pipeline = internalCluster().getInstance(NodeService.class, node2).getIngestService().getPipelineStore().get("_id"); + assertThat(pipeline, nullValue()); + } + +} diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java index fb0605f90b53..4009e4877b94 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java @@ -21,24 +21,32 @@ package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.Version; import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.LocalTransportAddress; +import org.elasticsearch.ingest.core.IngestInfo; import org.elasticsearch.ingest.core.Pipeline; +import org.elasticsearch.ingest.core.ProcessorInfo; +import org.elasticsearch.ingest.processor.RemoveProcessor; import org.elasticsearch.ingest.processor.SetProcessor; import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -52,6 +60,7 @@ public class PipelineStoreTests extends ESTestCase { store = new PipelineStore(Settings.EMPTY); ProcessorsRegistry.Builder registryBuilder = new ProcessorsRegistry.Builder(); registryBuilder.registerProcessor("set", (templateService, registry) -> new SetProcessor.Factory(TestTemplateService.instance())); + registryBuilder.registerProcessor("remove", (templateService, registry) -> new RemoveProcessor.Factory(TestTemplateService.instance())); store.buildProcessorFactoryRegistry(registryBuilder, null); } @@ -197,4 +206,38 @@ public class PipelineStoreTests extends ESTestCase { assertThat(pipeline, nullValue()); } + public void testValidate() throws Exception { + PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}},{\"remove\" : {\"field\": \"_field\"}}]}")); + + DiscoveryNode node1 = new DiscoveryNode("_node_id1", new LocalTransportAddress("_id"), Version.CURRENT); + DiscoveryNode node2 = new DiscoveryNode("_node_id2", new LocalTransportAddress("_id"), Version.CURRENT); + Map ingestInfos = new HashMap<>(); + ingestInfos.put(node1, new IngestInfo(Arrays.asList(new ProcessorInfo("set"), new ProcessorInfo("remove")))); + ingestInfos.put(node2, new IngestInfo(Arrays.asList(new ProcessorInfo("set")))); + + try { + store.validatePipeline(ingestInfos, putRequest); + fail("exception expected"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("Processor type [remove] is not installed on node [{_node_id2}{local}{local[_id]}]")); + } + + ingestInfos.put(node2, new IngestInfo(Arrays.asList(new ProcessorInfo("set"), new ProcessorInfo("remove")))); + store.validatePipeline(ingestInfos, putRequest); + } + + public void testValidateNoIngestInfo() throws Exception { + PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}}]}")); + try { + store.validatePipeline(Collections.emptyMap(), putRequest); + fail("exception expected"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), equalTo("Ingest info is empty")); + } + + DiscoveryNode discoveryNode = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.CURRENT); + IngestInfo ingestInfo = new IngestInfo(Collections.singletonList(new ProcessorInfo("set"))); + store.validatePipeline(Collections.singletonMap(discoveryNode, ingestInfo), putRequest); + } + } diff --git a/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java index fdf48ff42818..537d8f020e6e 100644 --- a/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java @@ -23,11 +23,14 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ingest.ProcessorsRegistry; import org.elasticsearch.ingest.TestProcessor; import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.ingest.processor.FailProcessor; +import org.elasticsearch.ingest.processor.SetProcessor; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.prefs.PreferencesFactory; @@ -115,6 +118,15 @@ public class PipelineFactoryTests extends ESTestCase { assertThat(pipeline.getProcessors().get(0).getType(), equalTo("compound")); } + public void testFlattenProcessors() throws Exception { + TestProcessor testProcessor = new TestProcessor(ingestDocument -> {}); + CompoundProcessor processor1 = new CompoundProcessor(testProcessor, testProcessor); + CompoundProcessor processor2 = new CompoundProcessor(Collections.singletonList(testProcessor), Collections.singletonList(testProcessor)); + Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor1, processor2)); + List flattened = pipeline.flattenAllProcessors(); + assertThat(flattened.size(), equalTo(4)); + } + private ProcessorsRegistry createProcessorRegistry(Map processorRegistry) { ProcessorsRegistry.Builder builder = new ProcessorsRegistry.Builder(); for (Map.Entry entry : processorRegistry.entrySet()) { diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java index 693ba4a2eba0..2a845303675d 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.http.HttpInfo; +import org.elasticsearch.ingest.core.IngestInfo; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.monitor.os.DummyOsInfo; import org.elasticsearch.monitor.os.OsInfo; @@ -90,6 +91,7 @@ public class NodeInfoStreamingTests extends ESTestCase { compareJsonOutput(nodeInfo.getNode(), readNodeInfo.getNode()); compareJsonOutput(nodeInfo.getOs(), readNodeInfo.getOs()); comparePluginsAndModules(nodeInfo, readNodeInfo); + compareJsonOutput(nodeInfo.getIngest(), readNodeInfo.getIngest()); } private void comparePluginsAndModules(NodeInfo nodeInfo, NodeInfo readNodeInfo) throws IOException { @@ -135,6 +137,7 @@ public class NodeInfoStreamingTests extends ESTestCase { PluginsAndModules plugins = new PluginsAndModules(); plugins.addModule(DummyPluginInfo.INSTANCE); plugins.addPlugin(DummyPluginInfo.INSTANCE); - return new NodeInfo(VersionUtils.randomVersion(random()), build, node, serviceAttributes, settings, osInfo, process, jvm, threadPoolInfo, transport, htttpInfo, plugins); + IngestInfo ingestInfo = new IngestInfo(); + return new NodeInfo(VersionUtils.randomVersion(random()), build, node, serviceAttributes, settings, osInfo, process, jvm, threadPoolInfo, transport, htttpInfo, plugins, ingestInfo); } } diff --git a/docs/reference/cluster/nodes-info.asciidoc b/docs/reference/cluster/nodes-info.asciidoc index a3072768ca67..5ed979abd0d8 100644 --- a/docs/reference/cluster/nodes-info.asciidoc +++ b/docs/reference/cluster/nodes-info.asciidoc @@ -17,7 +17,7 @@ The second command selectively retrieves nodes information of only By default, it just returns all attributes and core settings for a node. It also allows to get only information on `settings`, `os`, `process`, `jvm`, -`thread_pool`, `transport`, `http` and `plugins`: +`thread_pool`, `transport`, `http`, `plugins` and `ingest`: [source,js] -------------------------------------------------- @@ -122,3 +122,71 @@ The result will look similar to: } } -------------------------------------------------- + +[float] +[[ingest-info]] +==== Ingest information + +`ingest` - if set, the result will contain details about the available +processors per node: + +* `type`: the processor type + +The result will look similar to: + +[source,js] +-------------------------------------------------- +{ + "cluster_name": "elasticsearch", + "nodes": { + "O70_wBv6S9aPPcAKdSUBtw": { + "ingest": { + "processors": [ + { + "type": "date" + }, + { + "type": "uppercase" + }, + { + "type": "set" + }, + { + "type": "lowercase" + }, + { + "type": "gsub" + }, + { + "type": "convert" + }, + { + "type": "remove" + }, + { + "type": "fail" + }, + { + "type": "foreach" + }, + { + "type": "split" + }, + { + "type": "trim" + }, + { + "type": "rename" + }, + { + "type": "join" + }, + { + "type": "append" + } + ] + } + } + } +} +-------------------------------------------------- \ No newline at end of file diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 95d7005ee34d..0827baa6ea14 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -620,6 +620,20 @@ but is very useful for bookkeeping and tracing errors to specific processors. See <> to learn more about the `on_failure` field and error handling in pipelines. +The <> can be used to figure out what processors are available in a cluster. +The <> will provide a per node list of what processors are available. + +Custom processors must be installed on all nodes. The put pipeline API will fail if a processor specified in a pipeline +doesn't exist on all nodes. If you rely on custom processor plugins make sure to mark these plugins as mandatory by adding +`plugin.mandatory` setting to the `config/elasticsearch.yml` file, for example: + +[source,yaml] +-------------------------------------------------- +plugin.mandatory: ingest-attachment,ingest-geoip +-------------------------------------------------- + +A node will not start if either of these plugins are not available. + [[append-procesesor]] === Append Processor Appends one or more values to an existing array if the field already exists and it is an array. diff --git a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml index ed752971fcb2..67bb7340ce39 100644 --- a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml +++ b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml @@ -1,5 +1,11 @@ "Ingest attachment plugin installed": - do: - cluster.stats: {} + cluster.state: {} - - match: { nodes.plugins.0.name: ingest-attachment } + - set: {master_node: master} + + - do: + nodes.info: {} + + - match: { nodes.$master.plugins.0.name: ingest-attachment } + - match: { nodes.$master.ingest.processors.11.type: attachment } diff --git a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml index b522cb777802..b924484aa7dd 100644 --- a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml +++ b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml @@ -1,5 +1,11 @@ "Ingest plugin installed": - do: - cluster.stats: {} + cluster.state: {} - - match: { nodes.plugins.0.name: ingest-geoip } + - set: {master_node: master} + + - do: + nodes.info: {} + + - match: { nodes.$master.plugins.0.name: ingest-geoip } + - match: { nodes.$master.ingest.processors.3.type: geoip } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json index 43be35a5a864..12f0d11c5fcb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json @@ -12,7 +12,7 @@ }, "metric": { "type": "list", - "options": ["settings", "os", "process", "jvm", "thread_pool", "transport", "http", "plugins"], + "options": ["settings", "os", "process", "jvm", "thread_pool", "transport", "http", "plugins", "ingest"], "description": "A comma-separated list of metrics you wish returned. Leave empty to return all." } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml index b494161aff11..ced2e9e4850a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml @@ -1,3 +1,28 @@ +--- +"Check availability of default processors": + - do: + cluster.state: {} + + - set: {master_node: master} + + - do: + nodes.info: {} + + - match: { nodes.$master.ingest.processors.0.type: date } + - match: { nodes.$master.ingest.processors.1.type: uppercase } + - match: { nodes.$master.ingest.processors.2.type: set } + - match: { nodes.$master.ingest.processors.3.type: lowercase } + - match: { nodes.$master.ingest.processors.4.type: gsub } + - match: { nodes.$master.ingest.processors.5.type: convert } + - match: { nodes.$master.ingest.processors.6.type: remove } + - match: { nodes.$master.ingest.processors.7.type: fail } + - match: { nodes.$master.ingest.processors.8.type: foreach } + - match: { nodes.$master.ingest.processors.9.type: split } + - match: { nodes.$master.ingest.processors.10.type: trim } + - match: { nodes.$master.ingest.processors.11.type: rename } + - match: { nodes.$master.ingest.processors.12.type: join } + - match: { nodes.$master.ingest.processors.13.type: append } + --- "Test basic pipeline crud": - do: From 5ff413074a6b11f242de3835ad73fd7b24428645 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 3 Mar 2016 18:44:22 +0100 Subject: [PATCH 077/320] Adding tests for `time_zone` parameter for date range aggregation --- .../resources/checkstyle_suppressions.xml | 1 - .../aggregations/bucket/DateHistogramIT.java | 278 +--------------- .../aggregations/bucket/DateRangeIT.java | 68 ++-- .../aggregations/bucket/DateRangeTests.java | 4 + .../aggregations/bucket/ScriptMocks.java | 303 ++++++++++++++++++ .../rescore/QueryRescoreBuilderTests.java | 1 - 6 files changed, 351 insertions(+), 304 deletions(-) rename modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java => core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java (94%) create mode 100644 core/src/test/java/org/elasticsearch/search/aggregations/bucket/ScriptMocks.java diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 60a11d951f9a..cbe612e5358b 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -1486,7 +1486,6 @@ - diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index 2e4a974b7789..76128daa9069 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -18,8 +18,6 @@ */ package org.elasticsearch.search.aggregations.bucket; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.Scorer; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.joda.DateMathParser; @@ -28,22 +26,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.core.DateFieldMapper; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.CompiledScript; -import org.elasticsearch.script.ExecutableScript; -import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptEngineRegistry; -import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.metrics.sum.Sum; -import org.elasticsearch.search.lookup.LeafSearchLookup; -import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; import org.joda.time.DateTime; @@ -55,10 +44,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; @@ -142,8 +128,8 @@ public class DateHistogramIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { return Arrays.asList( - ExtractFieldScriptPlugin.class, - FieldValueScriptPlugin.class); + ScriptMocks.ExtractFieldScriptPlugin.class, + ScriptMocks.FieldValueScriptPlugin.class); } @After @@ -469,7 +455,7 @@ public class DateHistogramIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("date") - .script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null)) + .script(new Script("", ScriptType.INLINE, ScriptMocks.FieldValueScriptEngine.NAME, null)) .dateHistogramInterval(DateHistogramInterval.MONTH)).execute().actionGet(); assertSearchResponse(response); @@ -603,7 +589,7 @@ public class DateHistogramIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("dates") - .script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null)) + .script(new Script("", ScriptType.INLINE, ScriptMocks.FieldValueScriptEngine.NAME, null)) .dateHistogramInterval(DateHistogramInterval.MONTH)).execute().actionGet(); assertSearchResponse(response); @@ -653,7 +639,7 @@ public class DateHistogramIT extends ESIntegTestCase { */ public void testScriptSingleValue() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").script(new Script("date", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)).dateHistogramInterval(DateHistogramInterval.MONTH)) + .addAggregation(dateHistogram("histo").script(new Script("date", ScriptType.INLINE, ScriptMocks.ExtractFieldScriptEngine.NAME, null)).dateHistogramInterval(DateHistogramInterval.MONTH)) .execute().actionGet(); assertSearchResponse(response); @@ -688,7 +674,7 @@ public class DateHistogramIT extends ESIntegTestCase { public void testScriptMultiValued() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").script(new Script("dates", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)).dateHistogramInterval(DateHistogramInterval.MONTH)) + .addAggregation(dateHistogram("histo").script(new Script("dates", ScriptType.INLINE, ScriptMocks.ExtractFieldScriptEngine.NAME, null)).dateHistogramInterval(DateHistogramInterval.MONTH)) .execute().actionGet(); assertSearchResponse(response); @@ -1148,256 +1134,4 @@ public class DateHistogramIT extends ESIntegTestCase { Histogram histo = response.getAggregations().get("histo"); assertThat(histo.getBuckets().size(), greaterThan(0)); } - - /** - * Mock plugin for the {@link ExtractFieldScriptEngine} - */ - public static class ExtractFieldScriptPlugin extends Plugin { - - @Override - public String name() { - return ExtractFieldScriptEngine.NAME; - } - - @Override - public String description() { - return "Mock script engine for " + DateHistogramIT.class; - } - - public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExtractFieldScriptEngine.class, ExtractFieldScriptEngine.TYPES)); - } - - } - - /** - * This mock script returns the field that is specified by name in the script body - */ - public static class ExtractFieldScriptEngine implements ScriptEngineService { - - public static final String NAME = "extract_field"; - - public static final List TYPES = Collections.singletonList(NAME); - - @Override - public void close() throws IOException { - } - - @Override - public List getTypes() { - return TYPES; - } - - @Override - public List getExtensions() { - return TYPES; - } - - @Override - public boolean isSandboxed() { - return true; - } - - @Override - public Object compile(String script, Map params) { - return script; - } - - @Override - public ExecutableScript executable(CompiledScript compiledScript, Map params) { - throw new UnsupportedOperationException(); - } - @Override - public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map vars) { - return new SearchScript() { - - @Override - public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException { - - final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context); - - return new LeafSearchScript() { - @Override - public void setNextVar(String name, Object value) { - } - - @Override - public Object run() { - String fieldName = (String) compiledScript.compiled(); - return leafLookup.doc().get(fieldName); - } - - @Override - public void setScorer(Scorer scorer) { - } - - @Override - public void setSource(Map source) { - } - - @Override - public void setDocument(int doc) { - if (leafLookup != null) { - leafLookup.setDocument(doc); - } - } - - @Override - public long runAsLong() { - throw new UnsupportedOperationException(); - } - - @Override - public float runAsFloat() { - throw new UnsupportedOperationException(); - } - - @Override - public double runAsDouble() { - throw new UnsupportedOperationException(); - } - }; - } - - @Override - public boolean needsScores() { - return false; - } - }; - } - - @Override - public void scriptRemoved(CompiledScript script) { - } - } - - /** - * Mock plugin for the {@link FieldValueScriptEngine} - */ - public static class FieldValueScriptPlugin extends Plugin { - - @Override - public String name() { - return FieldValueScriptEngine.NAME; - } - - @Override - public String description() { - return "Mock script engine for " + DateHistogramIT.class; - } - - public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldValueScriptEngine.class, FieldValueScriptEngine.TYPES)); - } - - } - - /** - * This mock script returns the field value and adds one month to the returned date - */ - public static class FieldValueScriptEngine implements ScriptEngineService { - - public static final String NAME = "field_value"; - - public static final List TYPES = Collections.singletonList(NAME); - - @Override - public void close() throws IOException { - } - - @Override - public List getTypes() { - return TYPES; - } - - @Override - public List getExtensions() { - return TYPES; - } - - @Override - public boolean isSandboxed() { - return true; - } - - @Override - public Object compile(String script, Map params) { - return script; - } - - @Override - public ExecutableScript executable(CompiledScript compiledScript, Map params) { - throw new UnsupportedOperationException(); - } - @Override - public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map vars) { - return new SearchScript() { - - private Map vars = new HashMap<>(2); - - @Override - public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException { - - final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context); - - return new LeafSearchScript() { - - @Override - public Object unwrap(Object value) { - throw new UnsupportedOperationException(); - } - - @Override - public void setNextVar(String name, Object value) { - vars.put(name, value); - } - - @Override - public Object run() { - throw new UnsupportedOperationException(); - } - - @Override - public void setScorer(Scorer scorer) { - } - - @Override - public void setSource(Map source) { - } - - @Override - public void setDocument(int doc) { - if (leafLookup != null) { - leafLookup.setDocument(doc); - } - } - - @Override - public long runAsLong() { - return new DateTime((long) vars.get("_value"), DateTimeZone.UTC).plusMonths(1).getMillis(); - } - - @Override - public float runAsFloat() { - throw new UnsupportedOperationException(); - } - - @Override - public double runAsDouble() { - return new DateTime(new Double((double) vars.get("_value")).longValue(), DateTimeZone.UTC).plusMonths(1).getMillis(); - } - }; - } - - @Override - public boolean needsScores() { - return false; - } - }; - } - - @Override - public void scriptRemoved(CompiledScript script) { - } - } } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java similarity index 94% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java index 44f7a93ade10..5a2adf053b7c 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java @@ -16,13 +16,13 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; -import org.elasticsearch.script.groovy.GroovyPlugin; +import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket; @@ -36,7 +36,6 @@ import org.joda.time.DateTimeZone; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -55,12 +54,7 @@ import static org.hamcrest.core.IsNull.nullValue; * */ @ESIntegTestCase.SuiteScopeTestCase -public class DateRangeTests extends ESIntegTestCase { - - @Override - protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); - } +public class DateRangeIT extends ESIntegTestCase { private static IndexRequestBuilder indexDoc(int month, int day, int value) throws Exception { return client().prepareIndex("idx", "type").setSource(jsonBuilder() @@ -107,18 +101,25 @@ public class DateRangeTests extends ESIntegTestCase { ensureSearchable(); } + @Override + protected Collection> nodePlugins() { + return Arrays.asList( + ScriptMocks.ExtractFieldScriptPlugin.class, + ScriptMocks.FieldValueScriptPlugin.class); + } + public void testDateMath() throws Exception { DateRangeAggregatorBuilder rangeBuilder = dateRange("range"); if (randomBoolean()) { rangeBuilder.field("date"); } else { - rangeBuilder.script(new Script("doc['date'].value")); + rangeBuilder.script(new Script("date", ScriptType.INLINE, ScriptMocks.ExtractFieldScriptEngine.NAME, null)); } SearchResponse response = client() .prepareSearch("idx") .addAggregation( rangeBuilder.addUnboundedTo("a long time ago", "now-50y").addRange("recently", "now-50y", "now-1y") - .addUnboundedFrom("last year", "now-1y")).execute().actionGet(); + .addUnboundedFrom("last year", "now-1y").timeZone(DateTimeZone.forID("EST"))).execute().actionGet(); assertSearchResponse(response); @@ -286,17 +287,22 @@ public class DateRangeTests extends ESIntegTestCase { } public void testSingleValueFieldWithDateMath() throws Exception { + int timeZoneOffset = randomIntBetween(-12, 12); + DateTimeZone timezone = DateTimeZone.forOffsetHours(timeZoneOffset); + String timeZoneSuffix = (timeZoneOffset == 0) ? "Z" : DateTime.now(timezone).toString("ZZ"); + long expectedFirstBucketCount = timeZoneOffset < 0 ? 3L : 2L; + SearchResponse response = client().prepareSearch("idx") .addAggregation(dateRange("range") .field("date") .addUnboundedTo("2012-02-15") .addRange("2012-02-15", "2012-02-15||+1M") - .addUnboundedFrom("2012-02-15||+1M")) + .addUnboundedFrom("2012-02-15||+1M") + .timeZone(timezone)) .execute().actionGet(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -305,30 +311,31 @@ public class DateRangeTests extends ESIntegTestCase { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z")); + assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000" + timeZoneSuffix)); assertThat(((DateTime) bucket.getFrom()), nullValue()); - assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15))); + assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15).minusHours(timeZoneOffset))); assertThat(bucket.getFromAsString(), nullValue()); - assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z")); - assertThat(bucket.getDocCount(), equalTo(2L)); + assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000" + timeZoneSuffix)); + assertThat(bucket.getDocCount(), equalTo(expectedFirstBucketCount)); bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15))); - assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15))); - assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z")); - assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z")); + assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000" + timeZoneSuffix + + "-2012-03-15T00:00:00.000" + timeZoneSuffix)); + assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15).minusHours(timeZoneOffset))); + assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15).minusHours(timeZoneOffset))); + assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000" + timeZoneSuffix)); + assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000" + timeZoneSuffix)); assertThat(bucket.getDocCount(), equalTo(2L)); bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15))); + assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000" + timeZoneSuffix + "-*")); + assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15).minusHours(timeZoneOffset))); assertThat(((DateTime) bucket.getTo()), nullValue()); - assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z")); + assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000" + timeZoneSuffix)); assertThat(bucket.getToAsString(), nullValue()); - assertThat(bucket.getDocCount(), equalTo(numDocs - 4L)); + assertThat(bucket.getDocCount(), equalTo(numDocs - 2L - expectedFirstBucketCount)); } public void testSingleValueFieldWithCustomKey() throws Exception { @@ -523,7 +530,7 @@ public class DateRangeTests extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateRange("range") .field("dates") - .script(new Script("new DateTime(_value.longValue(), DateTimeZone.UTC).plusMonths(1).getMillis()")) + .script(new Script("", ScriptType.INLINE, ScriptMocks.FieldValueScriptEngine.NAME, null)) .addUnboundedTo(date(2, 15)).addRange(date(2, 15), date(3, 15)).addUnboundedFrom(date(3, 15))).execute() .actionGet(); @@ -577,7 +584,7 @@ public class DateRangeTests extends ESIntegTestCase { public void testScriptSingleValue() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateRange("range") - .script(new Script("doc['date'].value")) + .script(new Script("date", ScriptType.INLINE, ScriptMocks.ExtractFieldScriptEngine.NAME, null)) .addUnboundedTo(date(2, 15)) .addRange(date(2, 15), date(3, 15)) .addUnboundedFrom(date(3, 15))) @@ -637,8 +644,9 @@ public class DateRangeTests extends ESIntegTestCase { SearchResponse response = client() .prepareSearch("idx") .addAggregation( - dateRange("range").script(new Script("doc['dates'].values")).addUnboundedTo(date(2, 15)) - .addRange(date(2, 15), date(3, 15)).addUnboundedFrom(date(3, 15))).execute().actionGet(); + dateRange("range").script(new Script("dates", ScriptType.INLINE, ScriptMocks.ExtractFieldScriptEngine.NAME, null)) + .addUnboundedTo(date(2, 15)).addRange(date(2, 15), date(3, 15)) + .addUnboundedFrom(date(3, 15))).execute().actionGet(); assertSearchResponse(response); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java index 94156fc3a5da..de982b162537 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range; import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorBuilder; +import org.joda.time.DateTimeZone; public class DateRangeTests extends BaseAggregationTestCase { @@ -56,6 +57,9 @@ public class DateRangeTests extends BaseAggregationTestCase TYPES = Collections.singletonList(NAME); + + @Override + public void close() throws IOException { + } + + @Override + public List getTypes() { + return TYPES; + } + + @Override + public List getExtensions() { + return TYPES; + } + + @Override + public boolean isSandboxed() { + return true; + } + + @Override + public Object compile(String script, Map params) { + return script; + } + + @Override + public ExecutableScript executable(CompiledScript compiledScript, Map params) { + throw new UnsupportedOperationException(); + } + @Override + public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map vars) { + return new SearchScript() { + + @Override + public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException { + + final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context); + + return new LeafSearchScript() { + @Override + public void setNextVar(String name, Object value) { + } + + @Override + public Object run() { + String fieldName = (String) compiledScript.compiled(); + return leafLookup.doc().get(fieldName); + } + + @Override + public void setScorer(Scorer scorer) { + } + + @Override + public void setSource(Map source) { + } + + @Override + public void setDocument(int doc) { + if (leafLookup != null) { + leafLookup.setDocument(doc); + } + } + + @Override + public long runAsLong() { + throw new UnsupportedOperationException(); + } + + @Override + public float runAsFloat() { + throw new UnsupportedOperationException(); + } + + @Override + public double runAsDouble() { + throw new UnsupportedOperationException(); + } + }; + } + + @Override + public boolean needsScores() { + return false; + } + }; + } + + @Override + public void scriptRemoved(CompiledScript script) { + } + } + + /** + * Mock plugin for the {@link ScriptMocks.FieldValueScriptEngine} + */ + public static class FieldValueScriptPlugin extends Plugin { + + @Override + public String name() { + return ScriptMocks.FieldValueScriptEngine.NAME; + } + + @Override + public String description() { + return "Mock script engine for " + DateHistogramIT.class; + } + + public void onModule(ScriptModule module) { + module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ScriptMocks.FieldValueScriptEngine.class, + ScriptMocks.FieldValueScriptEngine.TYPES)); + } + + } + + /** + * This mock script returns the field value and adds one month to the returned date + */ + public static class FieldValueScriptEngine implements ScriptEngineService { + + public static final String NAME = "field_value"; + + public static final List TYPES = Collections.singletonList(NAME); + + @Override + public void close() throws IOException { + } + + @Override + public List getTypes() { + return TYPES; + } + + @Override + public List getExtensions() { + return TYPES; + } + + @Override + public boolean isSandboxed() { + return true; + } + + @Override + public Object compile(String script, Map params) { + return script; + } + + @Override + public ExecutableScript executable(CompiledScript compiledScript, Map params) { + throw new UnsupportedOperationException(); + } + @Override + public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map vars) { + return new SearchScript() { + + private Map vars = new HashMap<>(2); + + @Override + public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException { + + final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context); + + return new LeafSearchScript() { + + @Override + public Object unwrap(Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public void setNextVar(String name, Object value) { + vars.put(name, value); + } + + @Override + public Object run() { + throw new UnsupportedOperationException(); + } + + @Override + public void setScorer(Scorer scorer) { + } + + @Override + public void setSource(Map source) { + } + + @Override + public void setDocument(int doc) { + if (leafLookup != null) { + leafLookup.setDocument(doc); + } + } + + @Override + public long runAsLong() { + return new DateTime((long) vars.get("_value"), DateTimeZone.UTC).plusMonths(1).getMillis(); + } + + @Override + public float runAsFloat() { + throw new UnsupportedOperationException(); + } + + @Override + public double runAsDouble() { + return new DateTime(new Double((double) vars.get("_value")).longValue(), + DateTimeZone.UTC).plusMonths(1).getMillis(); + } + }; + } + + @Override + public boolean needsScores() { + return false; + } + }; + } + + @Override + public void scriptRemoved(CompiledScript script) { + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java index a39c618fe9d1..f0bb35cc9d1f 100644 --- a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java @@ -39,7 +39,6 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; From 6b0f63e1a60b353311ddc7de42c3f1869214126f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 4 Mar 2016 15:18:15 +0100 Subject: [PATCH 078/320] Adding `time_zone` parameter to daterange-aggregation docs --- .../bucket/daterange-aggregation.asciidoc | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc b/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc index 8b1f58f7ff0f..96a3d72f9acc 100644 --- a/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc @@ -111,3 +111,31 @@ Zone:: 'Z' outputs offset without a colon, 'ZZ' outputs the offset with a colon, Zone names:: Time zone names ('z') cannot be parsed. Any characters in the pattern that are not in the ranges of ['a'..'z'] and ['A'..'Z'] will be treated as quoted text. For instance, characters like ':', '.', ' ', '#' and '?' will appear in the resulting time text even they are not embraced within single quotes. + +[[time-zones]] +==== Time zone in date range aggregations + +Dates can be converted from another time zone to UTC by specifying the `time_zone` parameter. +The `time_zone` parameter is also applied to rounding in date math expressions. As an example, +to round to the beginning of the day in the CET time zone, you can do the following: + +[source,js] +-------------------------------------------------- +{ + "aggs": { + "range": { + "date_range": { + "field": "date", + "time_zone": "CET", + "ranges": [ + { "to": "2016-02-15/d" }, <1> + { "from": "2016-02-15/d", "to" : "now/d" <2>}, + { "from": "now/d" }, + ] + } + } + } + } +-------------------------------------------------- +<1> This date will be converted to `2016-02-15T00:00:00.000+01:00`. +<2> `now\d` will be rounded to the beginning of the day in the CET time zone. From ff46303f15d023c26c3aef1ae9b5759d3b5f559c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 4 Mar 2016 19:34:56 +0100 Subject: [PATCH 079/320] Simplify mock scripts --- .../aggregations/bucket/DateHistogramIT.java | 24 +- .../aggregations/bucket/DateRangeIT.java | 61 ++-- .../aggregations/bucket/DateRangeTests.java | 4 +- .../aggregations/bucket/DateScriptMocks.java | 133 ++++++++ .../aggregations/bucket/ScriptMocks.java | 303 ------------------ .../bucket/daterange-aggregation.asciidoc | 6 +- 6 files changed, 198 insertions(+), 333 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateScriptMocks.java delete mode 100644 core/src/test/java/org/elasticsearch/search/aggregations/bucket/ScriptMocks.java diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index 76128daa9069..c2ac2078c06a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; +import org.elasticsearch.search.aggregations.bucket.DateScriptMocks.DateScriptsMockPlugin; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; @@ -44,7 +45,9 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; @@ -128,8 +131,7 @@ public class DateHistogramIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { return Arrays.asList( - ScriptMocks.ExtractFieldScriptPlugin.class, - ScriptMocks.FieldValueScriptPlugin.class); + DateScriptsMockPlugin.class); } @After @@ -452,10 +454,12 @@ public class DateHistogramIT extends ESIntegTestCase { } public void testSingleValuedFieldWithValueScript() throws Exception { + Map params = new HashMap<>(); + params.put("fieldname", "date"); SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("date") - .script(new Script("", ScriptType.INLINE, ScriptMocks.FieldValueScriptEngine.NAME, null)) + .script(new Script(DateScriptMocks.PlusOneMonthScript.NAME, ScriptType.INLINE, "native", params)) .dateHistogramInterval(DateHistogramInterval.MONTH)).execute().actionGet(); assertSearchResponse(response); @@ -586,10 +590,12 @@ public class DateHistogramIT extends ESIntegTestCase { * doc 6: [ Apr 23, May 24] */ public void testMultiValuedFieldWithValueScript() throws Exception { + Map params = new HashMap<>(); + params.put("fieldname", "dates"); SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("dates") - .script(new Script("", ScriptType.INLINE, ScriptMocks.FieldValueScriptEngine.NAME, null)) + .script(new Script(DateScriptMocks.PlusOneMonthScript.NAME, ScriptType.INLINE, "native", params)) .dateHistogramInterval(DateHistogramInterval.MONTH)).execute().actionGet(); assertSearchResponse(response); @@ -638,8 +644,11 @@ public class DateHistogramIT extends ESIntegTestCase { * Mar 23 */ public void testScriptSingleValue() throws Exception { + Map params = new HashMap<>(); + params.put("fieldname", "date"); SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").script(new Script("date", ScriptType.INLINE, ScriptMocks.ExtractFieldScriptEngine.NAME, null)).dateHistogramInterval(DateHistogramInterval.MONTH)) + .addAggregation(dateHistogram("histo").script(new Script(DateScriptMocks.ExtractFieldScript.NAME, + ScriptType.INLINE, "native", params)).dateHistogramInterval(DateHistogramInterval.MONTH)) .execute().actionGet(); assertSearchResponse(response); @@ -673,8 +682,11 @@ public class DateHistogramIT extends ESIntegTestCase { } public void testScriptMultiValued() throws Exception { + Map params = new HashMap<>(); + params.put("fieldname", "dates"); SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").script(new Script("dates", ScriptType.INLINE, ScriptMocks.ExtractFieldScriptEngine.NAME, null)).dateHistogramInterval(DateHistogramInterval.MONTH)) + .addAggregation(dateHistogram("histo").script(new Script(DateScriptMocks.ExtractFieldScript.NAME, + ScriptType.INLINE, "native", params)).dateHistogramInterval(DateHistogramInterval.MONTH)) .execute().actionGet(); assertSearchResponse(response); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java index 5a2adf053b7c..b1dc61a9b9ec 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; +import org.elasticsearch.search.aggregations.bucket.DateScriptMocks.DateScriptsMockPlugin; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket; @@ -36,7 +37,9 @@ import org.joda.time.DateTimeZone; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; import java.util.List; +import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; @@ -66,7 +69,11 @@ public class DateRangeIT extends ESIntegTestCase { } private static DateTime date(int month, int day) { - return new DateTime(2012, month, day, 0, 0, DateTimeZone.UTC); + return date(month, day, DateTimeZone.UTC); + } + + private static DateTime date(int month, int day, DateTimeZone timezone) { + return new DateTime(2012, month, day, 0, 0, timezone); } private static int numDocs; @@ -104,16 +111,17 @@ public class DateRangeIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { return Arrays.asList( - ScriptMocks.ExtractFieldScriptPlugin.class, - ScriptMocks.FieldValueScriptPlugin.class); + DateScriptsMockPlugin.class); } public void testDateMath() throws Exception { + Map params = new HashMap<>(); + params.put("fieldname", "date"); DateRangeAggregatorBuilder rangeBuilder = dateRange("range"); if (randomBoolean()) { rangeBuilder.field("date"); } else { - rangeBuilder.script(new Script("date", ScriptType.INLINE, ScriptMocks.ExtractFieldScriptEngine.NAME, null)); + rangeBuilder.script(new Script(DateScriptMocks.ExtractFieldScript.NAME, ScriptType.INLINE, "native", params)); } SearchResponse response = client() .prepareSearch("idx") @@ -287,9 +295,12 @@ public class DateRangeIT extends ESIntegTestCase { } public void testSingleValueFieldWithDateMath() throws Exception { - int timeZoneOffset = randomIntBetween(-12, 12); - DateTimeZone timezone = DateTimeZone.forOffsetHours(timeZoneOffset); - String timeZoneSuffix = (timeZoneOffset == 0) ? "Z" : DateTime.now(timezone).toString("ZZ"); + String[] ids = DateTimeZone.getAvailableIDs().toArray(new String[DateTimeZone.getAvailableIDs().size()]); + DateTimeZone timezone = DateTimeZone.forID(randomFrom(ids)); + int timeZoneOffset = timezone.getOffset(date(2, 15)); + // if time zone is UTC (or equivalent), time zone suffix is "Z", else something like "+03:00", which we get with the "ZZ" format + String feb15Suffix = timeZoneOffset == 0 ? "Z" : date(2,15, timezone).toString("ZZ"); + String mar15Suffix = timeZoneOffset == 0 ? "Z" : date(3,15, timezone).toString("ZZ"); long expectedFirstBucketCount = timeZoneOffset < 0 ? 3L : 2L; SearchResponse response = client().prepareSearch("idx") @@ -311,29 +322,29 @@ public class DateRangeIT extends ESIntegTestCase { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000" + timeZoneSuffix)); + assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000" + feb15Suffix)); assertThat(((DateTime) bucket.getFrom()), nullValue()); - assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15).minusHours(timeZoneOffset))); + assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15, timezone).toDateTime(DateTimeZone.UTC))); assertThat(bucket.getFromAsString(), nullValue()); - assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000" + timeZoneSuffix)); + assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix)); assertThat(bucket.getDocCount(), equalTo(expectedFirstBucketCount)); bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000" + timeZoneSuffix + - "-2012-03-15T00:00:00.000" + timeZoneSuffix)); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15).minusHours(timeZoneOffset))); - assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15).minusHours(timeZoneOffset))); - assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000" + timeZoneSuffix)); - assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000" + timeZoneSuffix)); + assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix + + "-2012-03-15T00:00:00.000" + mar15Suffix)); + assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15, timezone).toDateTime(DateTimeZone.UTC))); + assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15, timezone).toDateTime(DateTimeZone.UTC))); + assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix)); + assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix)); assertThat(bucket.getDocCount(), equalTo(2L)); bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000" + timeZoneSuffix + "-*")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15).minusHours(timeZoneOffset))); + assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix + "-*")); + assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15, timezone).toDateTime(DateTimeZone.UTC))); assertThat(((DateTime) bucket.getTo()), nullValue()); - assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000" + timeZoneSuffix)); + assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix)); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(numDocs - 2L - expectedFirstBucketCount)); } @@ -527,10 +538,12 @@ public class DateRangeIT extends ESIntegTestCase { public void testMultiValuedFieldWithValueScript() throws Exception { + Map params = new HashMap<>(); + params.put("fieldname", "dates"); SearchResponse response = client().prepareSearch("idx") .addAggregation(dateRange("range") .field("dates") - .script(new Script("", ScriptType.INLINE, ScriptMocks.FieldValueScriptEngine.NAME, null)) + .script(new Script(DateScriptMocks.PlusOneMonthScript.NAME, ScriptType.INLINE, "native", params)) .addUnboundedTo(date(2, 15)).addRange(date(2, 15), date(3, 15)).addUnboundedFrom(date(3, 15))).execute() .actionGet(); @@ -582,9 +595,11 @@ public class DateRangeIT extends ESIntegTestCase { */ public void testScriptSingleValue() throws Exception { + Map params = new HashMap<>(); + params.put("fieldname", "date"); SearchResponse response = client().prepareSearch("idx") .addAggregation(dateRange("range") - .script(new Script("date", ScriptType.INLINE, ScriptMocks.ExtractFieldScriptEngine.NAME, null)) + .script(new Script(DateScriptMocks.ExtractFieldScript.NAME, ScriptType.INLINE, "native", params)) .addUnboundedTo(date(2, 15)) .addRange(date(2, 15), date(3, 15)) .addUnboundedFrom(date(3, 15))) @@ -641,10 +656,12 @@ public class DateRangeIT extends ESIntegTestCase { */ public void testScriptMultiValued() throws Exception { + Map params = new HashMap<>(); + params.put("fieldname", "dates"); SearchResponse response = client() .prepareSearch("idx") .addAggregation( - dateRange("range").script(new Script("dates", ScriptType.INLINE, ScriptMocks.ExtractFieldScriptEngine.NAME, null)) + dateRange("range").script(new Script(DateScriptMocks.ExtractFieldScript.NAME, ScriptType.INLINE, "native", params)) .addUnboundedTo(date(2, 15)).addRange(date(2, 15), date(3, 15)) .addUnboundedFrom(date(3, 15))).execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java index de982b162537..71b61c0e6e62 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java @@ -26,6 +26,8 @@ import org.joda.time.DateTimeZone; public class DateRangeTests extends BaseAggregationTestCase { + private final static String[] timeZoneIds = DateTimeZone.getAvailableIDs().toArray(new String[DateTimeZone.getAvailableIDs().size()]); + @Override protected DateRangeAggregatorBuilder createTestAggregatorBuilder() { int numRanges = randomIntBetween(1, 10); @@ -58,7 +60,7 @@ public class DateRangeTests extends BaseAggregationTestCase params) { + return new ExtractFieldScript((String) params.get("fieldname")); + } + @Override + public boolean needsScores() { + return false; + } + } + + public static class ExtractFieldScript extends AbstractSearchScript { + + public static final String NAME = "extract_field"; + private String fieldname; + + public ExtractFieldScript(String fieldname) { + this.fieldname = fieldname; + } + + @Override + public Object run() { + return doc().get(fieldname); + } + } + + public static class PlusOneMonthScriptFactory implements NativeScriptFactory { + + @Override + public ExecutableScript newScript(Map params) { + return new PlusOneMonthScript((String) params.get("fieldname")); + } + + @Override + public boolean needsScores() { + return false; + } + } + + /** + * This mock script takes date field value and adds one month to the returned date + */ + public static class PlusOneMonthScript extends AbstractSearchScript { + + public static final String NAME = "date_plus_1_month"; + private String fieldname; + + private Map vars = new HashMap<>(); + + public PlusOneMonthScript(String fieldname) { + this.fieldname = fieldname; + } + + @Override + public void setNextVar(String name, Object value) { + vars.put(name, value); + } + + @Override + public long runAsLong() { + return new DateTime((long) vars.get("_value"), DateTimeZone.UTC).plusMonths(1).getMillis(); + } + + @Override + public double runAsDouble() { + return new DateTime(new Double((double) vars.get("_value")).longValue(), DateTimeZone.UTC).plusMonths(1).getMillis(); + } + + @Override + public Object run() { + return new UnsupportedOperationException(); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ScriptMocks.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ScriptMocks.java deleted file mode 100644 index dcc5ba0b6ed8..000000000000 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ScriptMocks.java +++ /dev/null @@ -1,303 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.bucket; - -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.Scorer; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.CompiledScript; -import org.elasticsearch.script.ExecutableScript; -import org.elasticsearch.script.LeafSearchScript; -import org.elasticsearch.script.ScriptEngineRegistry; -import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptModule; -import org.elasticsearch.script.SearchScript; -import org.elasticsearch.search.lookup.LeafSearchLookup; -import org.elasticsearch.search.lookup.SearchLookup; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * Helper mocks for script plugins and engines - */ -public class ScriptMocks { - - /** - * Mock plugin for the {@link ScriptMocks.ExtractFieldScriptEngine} - */ - public static class ExtractFieldScriptPlugin extends Plugin { - - @Override - public String name() { - return ScriptMocks.ExtractFieldScriptEngine.NAME; - } - - @Override - public String description() { - return "Mock script engine for " + DateHistogramIT.class; - } - - public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ScriptMocks.ExtractFieldScriptEngine.class, - ScriptMocks.ExtractFieldScriptEngine.TYPES)); - } - - } - - /** - * This mock script returns the field that is specified by name in the script body - */ - public static class ExtractFieldScriptEngine implements ScriptEngineService { - - public static final String NAME = "extract_field"; - - public static final List TYPES = Collections.singletonList(NAME); - - @Override - public void close() throws IOException { - } - - @Override - public List getTypes() { - return TYPES; - } - - @Override - public List getExtensions() { - return TYPES; - } - - @Override - public boolean isSandboxed() { - return true; - } - - @Override - public Object compile(String script, Map params) { - return script; - } - - @Override - public ExecutableScript executable(CompiledScript compiledScript, Map params) { - throw new UnsupportedOperationException(); - } - @Override - public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map vars) { - return new SearchScript() { - - @Override - public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException { - - final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context); - - return new LeafSearchScript() { - @Override - public void setNextVar(String name, Object value) { - } - - @Override - public Object run() { - String fieldName = (String) compiledScript.compiled(); - return leafLookup.doc().get(fieldName); - } - - @Override - public void setScorer(Scorer scorer) { - } - - @Override - public void setSource(Map source) { - } - - @Override - public void setDocument(int doc) { - if (leafLookup != null) { - leafLookup.setDocument(doc); - } - } - - @Override - public long runAsLong() { - throw new UnsupportedOperationException(); - } - - @Override - public float runAsFloat() { - throw new UnsupportedOperationException(); - } - - @Override - public double runAsDouble() { - throw new UnsupportedOperationException(); - } - }; - } - - @Override - public boolean needsScores() { - return false; - } - }; - } - - @Override - public void scriptRemoved(CompiledScript script) { - } - } - - /** - * Mock plugin for the {@link ScriptMocks.FieldValueScriptEngine} - */ - public static class FieldValueScriptPlugin extends Plugin { - - @Override - public String name() { - return ScriptMocks.FieldValueScriptEngine.NAME; - } - - @Override - public String description() { - return "Mock script engine for " + DateHistogramIT.class; - } - - public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ScriptMocks.FieldValueScriptEngine.class, - ScriptMocks.FieldValueScriptEngine.TYPES)); - } - - } - - /** - * This mock script returns the field value and adds one month to the returned date - */ - public static class FieldValueScriptEngine implements ScriptEngineService { - - public static final String NAME = "field_value"; - - public static final List TYPES = Collections.singletonList(NAME); - - @Override - public void close() throws IOException { - } - - @Override - public List getTypes() { - return TYPES; - } - - @Override - public List getExtensions() { - return TYPES; - } - - @Override - public boolean isSandboxed() { - return true; - } - - @Override - public Object compile(String script, Map params) { - return script; - } - - @Override - public ExecutableScript executable(CompiledScript compiledScript, Map params) { - throw new UnsupportedOperationException(); - } - @Override - public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map vars) { - return new SearchScript() { - - private Map vars = new HashMap<>(2); - - @Override - public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException { - - final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context); - - return new LeafSearchScript() { - - @Override - public Object unwrap(Object value) { - throw new UnsupportedOperationException(); - } - - @Override - public void setNextVar(String name, Object value) { - vars.put(name, value); - } - - @Override - public Object run() { - throw new UnsupportedOperationException(); - } - - @Override - public void setScorer(Scorer scorer) { - } - - @Override - public void setSource(Map source) { - } - - @Override - public void setDocument(int doc) { - if (leafLookup != null) { - leafLookup.setDocument(doc); - } - } - - @Override - public long runAsLong() { - return new DateTime((long) vars.get("_value"), DateTimeZone.UTC).plusMonths(1).getMillis(); - } - - @Override - public float runAsFloat() { - throw new UnsupportedOperationException(); - } - - @Override - public double runAsDouble() { - return new DateTime(new Double((double) vars.get("_value")).longValue(), - DateTimeZone.UTC).plusMonths(1).getMillis(); - } - }; - } - - @Override - public boolean needsScores() { - return false; - } - }; - } - - @Override - public void scriptRemoved(CompiledScript script) { - } - } - -} diff --git a/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc b/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc index 96a3d72f9acc..e649928810b9 100644 --- a/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc @@ -116,6 +116,10 @@ Any characters in the pattern that are not in the ranges of ['a'..'z'] and ['A'. ==== Time zone in date range aggregations Dates can be converted from another time zone to UTC by specifying the `time_zone` parameter. + +Time zones may either be specified as an ISO 8601 UTC offset (e.g. +01:00 or -08:00) or as one of +the the http://joda-time.sourceforge.net/timezones.html[time zone ids] from the TZ database. + The `time_zone` parameter is also applied to rounding in date math expressions. As an example, to round to the beginning of the day in the CET time zone, you can do the following: @@ -138,4 +142,4 @@ to round to the beginning of the day in the CET time zone, you can do the follow } -------------------------------------------------- <1> This date will be converted to `2016-02-15T00:00:00.000+01:00`. -<2> `now\d` will be rounded to the beginning of the day in the CET time zone. +<2> `now/d` will be rounded to the beginning of the day in the CET time zone. From a0191dff01d137c9cdf039c233654859f9f4b575 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 7 Mar 2016 09:45:55 -0500 Subject: [PATCH 080/320] Enable unmap hack for java 9 --- .../resources/org/elasticsearch/bootstrap/security.policy | 3 +++ .../elasticsearch/cluster/routing/PrimaryAllocationIT.java | 7 ++++++- .../java/org/elasticsearch/common/lucene/LuceneTests.java | 2 +- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index f953123c7a41..4909959015b0 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -33,7 +33,10 @@ grant codeBase "${codebase.securesm-1.0.jar}" { grant codeBase "${codebase.lucene-core-6.0.0-snapshot-bea235f.jar}" { // needed to allow MMapDirectory's "unmap hack" (die unmap hack, die) + // java 8 package permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; + // java 9 "package" + permission java.lang.RuntimePermission "accessClassInPackage.jdk.internal.ref"; permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; // NOTE: also needed for RAMUsageEstimator size calculations permission java.lang.RuntimePermission "accessDeclaredMembers"; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java index d911a1175c7a..94336d23623b 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -109,7 +109,12 @@ public class PrimaryAllocationIT extends ESIntegTestCase { logger.info("--> check that old primary shard does not get promoted to primary again"); // kick reroute and wait for all shard states to be fetched client(master).admin().cluster().prepareReroute().get(); - assertBusy(() -> assertThat(internalCluster().getInstance(GatewayAllocator.class, master).getNumberOfInFlightFetch(), equalTo(0))); + assertBusy(new Runnable() { + @Override + public void run() { + assertThat(internalCluster().getInstance(GatewayAllocator.class, master).getNumberOfInFlightFetch(), equalTo(0)); + } + }); // kick reroute a second time and check that all shards are unassigned assertThat(client(master).admin().cluster().prepareReroute().get().getState().getRoutingNodes().unassigned().size(), equalTo(2)); } diff --git a/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index 8f8aea578de6..8df6f5c78cc5 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -364,6 +364,6 @@ public class LuceneTests extends ESTestCase { */ public void testMMapHackSupported() throws Exception { // add assume's here if needed for certain platforms, but we should know if it does not work. - assertTrue(MMapDirectory.UNMAP_SUPPORTED); + assertTrue("MMapDirectory does not support unmapping: " + MMapDirectory.UNMAP_NOT_SUPPORTED_REASON, MMapDirectory.UNMAP_SUPPORTED); } } From 887b69b58b800bfab44918895e339d8333e6da02 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 7 Mar 2016 16:39:15 +0100 Subject: [PATCH 081/320] Add support for alpha versions Elasticsearch 5.0 will come with alpha versions which is not supported in the current version scheme. This commit adds support for aplpha starting with es 5.0.0 in a backwards compatible way. --- .../main/java/org/elasticsearch/Version.java | 41 ++++++++++++++----- .../java/org/elasticsearch/VersionTests.java | 38 +++++++++++++++++ 2 files changed, 68 insertions(+), 11 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index b725a6464a0e..e96360f5115b 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -35,8 +35,8 @@ import java.io.IOException; @SuppressWarnings("deprecation") public class Version { - // The logic for ID is: XXYYZZAA, where XX is major version, YY is minor version, ZZ is revision, and AA is Beta/RC indicator - // AA values below 50 are beta builds, and below 99 are RC builds, with 99 indicating a release + // The logic for ID is: XXYYZZAA, where XX is major version, YY is minor version, ZZ is revision, and AA is alpha/beta/rc indicator + // AA values below 25 are fro alpha builder (since 5.0), and above 25 and below 50 are beta builds, and below 99 are RC builds, with 99 indicating a release // the (internal) format of the id is there so we can easily do after/before checks on the id @@ -143,9 +143,10 @@ public class Version { } try { - + final int rawMajor = Integer.parseInt(parts[0]); + final int betaOffset = rawMajor < 5 ? 0 : 25; //we reverse the version id calculation based on some assumption as we can't reliably reverse the modulo - final int major = Integer.parseInt(parts[0]) * 1000000; + final int major = rawMajor * 1000000; final int minor = Integer.parseInt(parts[1]) * 10000; final int revision = Integer.parseInt(parts[2]) * 100; @@ -153,11 +154,17 @@ public class Version { int build = 99; if (parts.length == 4) { String buildStr = parts[3]; - if (buildStr.startsWith("Beta") || buildStr.startsWith("beta")) { - build = Integer.parseInt(buildStr.substring(4)); - } - if (buildStr.startsWith("RC") || buildStr.startsWith("rc")) { + if (buildStr.startsWith("alpha")) { + assert rawMajor >= 5 : "major must be >= 5 but was " + major; + build = Integer.parseInt(buildStr.substring(5)); + assert build < 25 : "expected a beta build but " + build + " >= 25"; + } else if (buildStr.startsWith("Beta") || buildStr.startsWith("beta")) { + build = betaOffset + Integer.parseInt(buildStr.substring(4)); + assert build < 50 : "expected a beta build but " + build + " >= 50"; + } else if (buildStr.startsWith("RC") || buildStr.startsWith("rc")) { build = Integer.parseInt(buildStr.substring(2)) + 50; + } else { + throw new IllegalArgumentException("unable to parse version " + version); } } @@ -220,13 +227,16 @@ public class Version { public String toString() { StringBuilder sb = new StringBuilder(); sb.append(major).append('.').append(minor).append('.').append(revision); - if (isBeta()) { + if (isAlpha()) { + sb.append("-alpha"); + sb.append(build); + } else if (isBeta()) { if (major >= 2) { sb.append("-beta"); } else { sb.append(".Beta"); } - sb.append(build); + sb.append(major < 5 ? build : build-25); } else if (build < 99) { if (major >= 2) { sb.append("-rc"); @@ -262,7 +272,16 @@ public class Version { } public boolean isBeta() { - return build < 50; + return major < 5 ? build < 50 : build >= 25 && build < 50; + } + + /** + * Returns true iff this version is an alpha version + * Note: This has been introduced in elasticsearch version 5. Previous versions will never + * have an alpha version. + */ + public boolean isAlpha() { + return major < 5 ? false : build < 25; } public boolean isRC() { diff --git a/core/src/test/java/org/elasticsearch/VersionTests.java b/core/src/test/java/org/elasticsearch/VersionTests.java index 05dabb2d8ffc..f15ead1ff9e2 100644 --- a/core/src/test/java/org/elasticsearch/VersionTests.java +++ b/core/src/test/java/org/elasticsearch/VersionTests.java @@ -57,6 +57,12 @@ public class VersionTests extends ESTestCase { assertThat(V_2_2_0.onOrAfter(V_5_0_0), is(false)); assertThat(V_2_2_0.onOrAfter(V_2_2_0), is(true)); assertThat(V_5_0_0.onOrAfter(V_2_2_0), is(true)); + + assertTrue(Version.fromString("5.0.0-alpha2").onOrAfter(Version.fromString("5.0.0-alpha1"))); + assertTrue(Version.fromString("5.0.0").onOrAfter(Version.fromString("5.0.0-beta2"))); + assertTrue(Version.fromString("5.0.0-rc1").onOrAfter(Version.fromString("5.0.0-beta24"))); + assertTrue(Version.fromString("5.0.0-alpha24").before(Version.fromString("5.0.0-beta0"))); + } public void testVersionConstantPresent() { @@ -144,12 +150,40 @@ public class VersionTests extends ESTestCase { assertEquals("2.0.0-beta1", Version.V_2_0_0_beta1.toString()); assertEquals("5.0.0", Version.V_5_0_0.toString()); assertEquals("2.3.0", Version.V_2_3_0.toString()); + assertEquals("0.90.0.Beta1", Version.fromString("0.90.0.Beta1").toString()); + assertEquals("1.0.0.Beta1", Version.fromString("1.0.0.Beta1").toString()); + assertEquals("2.0.0-beta1", Version.fromString("2.0.0-beta1").toString()); + assertEquals("5.0.0-beta1", Version.fromString("5.0.0-beta1").toString()); + assertEquals("5.0.0-alpha1", Version.fromString("5.0.0-alpha1").toString()); } public void testIsBeta() { assertTrue(Version.V_2_0_0_beta1.isBeta()); + assertTrue(Version.fromString("1.0.0.Beta1").isBeta()); + assertTrue(Version.fromString("0.90.0.Beta1").isBeta()); } + + public void testIsAlpha() { + assertTrue(new Version(5000001, org.apache.lucene.util.Version.LUCENE_6_0_0).isAlpha()); + assertFalse(new Version(4000002, org.apache.lucene.util.Version.LUCENE_6_0_0).isAlpha()); + assertTrue(new Version(4000002, org.apache.lucene.util.Version.LUCENE_6_0_0).isBeta()); + assertTrue(Version.fromString("5.0.0-alpha14").isAlpha()); + assertEquals(5000014, Version.fromString("5.0.0-alpha14").id); + assertTrue(Version.fromId(5000015).isAlpha()); + + for (int i = 0 ; i < 25; i++) { + assertEquals(Version.fromString("5.0.0-alpha" + i).id, Version.fromId(5000000 + i).id); + assertEquals("5.0.0-alpha" + i, Version.fromId(5000000 + i).toString()); + } + + for (int i = 0 ; i < 25; i++) { + assertEquals(Version.fromString("5.0.0-beta" + i).id, Version.fromId(5000000 + i + 25).id); + assertEquals("5.0.0-beta" + i, Version.fromId(5000000 + i + 25).toString()); + } + } + + public void testParseVersion() { final int iters = scaledRandomIntBetween(100, 1000); for (int i = 0; i < iters; i++) { @@ -160,6 +194,10 @@ public class VersionTests extends ESTestCase { Version parsedVersion = Version.fromString(version.toString()); assertEquals(version, parsedVersion); } + + expectThrows(IllegalArgumentException.class, () -> { + Version.fromString("5.0.0-alph2"); + }); } public void testParseLenient() { From 9cbc602487c54fa8a71ebccfbaefdc17cfc399b3 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 7 Mar 2016 17:41:38 +0100 Subject: [PATCH 082/320] Remove SNAPSHOT from versions in plugin descriptors We removed leniencey from version parsing which caught problems with -SNAPSHOT suffixes on plugin properies. This commit removes the -SNAPSHOT from both es and the extension version and adds tests to ensure we can parse older versions that allowed -SNAPSHOT in BWC way. --- .../gradle/plugin/PluginPropertiesTask.groovy | 10 ++++++++-- core/src/main/java/org/elasticsearch/Version.java | 7 +++++++ core/src/test/java/org/elasticsearch/VersionTests.java | 7 +++++++ 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy index 7b525d39f531..b5128817fb03 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy @@ -68,11 +68,17 @@ class PluginPropertiesTask extends Copy { } Map generateSubstitutions() { + def stringSnap = { version -> + if (version.endsWith("-SNAPSHOT")) { + return version.substring(0, version.length() - 9) + } + return version + } return [ 'name': extension.name, 'description': extension.description, - 'version': extension.version, - 'elasticsearchVersion': VersionProperties.elasticsearch, + 'version': stringSnap(extension.version), + 'elasticsearchVersion': stringSnap(VersionProperties.elasticsearch), 'javaVersion': project.targetCompatibility as String, 'isolated': extension.isolated as String, 'classname': extension.classname diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index e96360f5115b..c80f32bf7bcd 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -137,6 +137,10 @@ public class Version { if (!Strings.hasLength(version)) { return Version.CURRENT; } + final boolean snapshot; // this is some BWC for 2.x and before indices + if (snapshot = version.endsWith("-SNAPSHOT")) { + version = version.substring(0, version.length() - 9); + } String[] parts = version.split("\\.|\\-"); if (parts.length < 3 || parts.length > 4) { throw new IllegalArgumentException("the version needs to contain major, minor, and revision, and optionally the build: " + version); @@ -144,6 +148,9 @@ public class Version { try { final int rawMajor = Integer.parseInt(parts[0]); + if (rawMajor >= 5 && snapshot) { // we don't support snapshot as part of the version here anymore + throw new IllegalArgumentException("illegal version format - snapshots are only supported until version 2.x"); + } final int betaOffset = rawMajor < 5 ? 0 : 25; //we reverse the version id calculation based on some assumption as we can't reliably reverse the modulo final int major = rawMajor * 1000000; diff --git a/core/src/test/java/org/elasticsearch/VersionTests.java b/core/src/test/java/org/elasticsearch/VersionTests.java index f15ead1ff9e2..7824ecd39b12 100644 --- a/core/src/test/java/org/elasticsearch/VersionTests.java +++ b/core/src/test/java/org/elasticsearch/VersionTests.java @@ -198,6 +198,13 @@ public class VersionTests extends ESTestCase { expectThrows(IllegalArgumentException.class, () -> { Version.fromString("5.0.0-alph2"); }); + assertSame(Version.CURRENT, Version.fromString(Version.CURRENT.toString())); + + assertSame(Version.fromString("2.0.0-SNAPSHOT"), Version.fromString("2.0.0")); + + expectThrows(IllegalArgumentException.class, () -> { + Version.fromString("5.0.0-SNAPSHOT"); + }); } public void testParseLenient() { From 53cc8bdc455f92a68792404ea32b7cba70d09d5a Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 7 Mar 2016 17:52:23 +0100 Subject: [PATCH 083/320] fix typo --- core/src/main/java/org/elasticsearch/Version.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index c80f32bf7bcd..eeb4825cb90f 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -36,7 +36,7 @@ import java.io.IOException; public class Version { // The logic for ID is: XXYYZZAA, where XX is major version, YY is minor version, ZZ is revision, and AA is alpha/beta/rc indicator - // AA values below 25 are fro alpha builder (since 5.0), and above 25 and below 50 are beta builds, and below 99 are RC builds, with 99 indicating a release + // AA values below 25 are for alpha builder (since 5.0), and above 25 and below 50 are beta builds, and below 99 are RC builds, with 99 indicating a release // the (internal) format of the id is there so we can easily do after/before checks on the id From 45b5ab24fec2fae36e0eb205ccd00d8040ed9c7e Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 7 Mar 2016 12:42:15 -0800 Subject: [PATCH 084/320] Moved MockTerminal and created a base test case for cli commands. --- .../java/org/elasticsearch/cli/Command.java | 6 +- .../common/cli/CheckFileCommand.java | 138 ------------------ .../common/cli/TerminalTests.java | 7 +- .../logging/LoggingConfigurationTests.java | 2 +- .../InternalSettingsPreparerTests.java | 2 +- .../elasticsearch/plugins/PluginCliTests.java | 4 +- .../bootstrap/BootstrapCliParserTests.java | 10 +- .../plugins/InstallPluginCommandTests.java | 7 +- .../plugins/ListPluginsCommandTests.java | 7 +- .../plugins/RemovePluginCommandTests.java | 4 +- .../elasticsearch/cli/CommandTestCase.java | 48 ++++++ .../{common => }/cli/MockTerminal.java | 10 +- .../common/cli/CliToolTestCase.java | 1 + 13 files changed, 75 insertions(+), 171 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/common/cli/CheckFileCommand.java create mode 100644 test/framework/src/main/java/org/elasticsearch/cli/CommandTestCase.java rename test/framework/src/main/java/org/elasticsearch/{common => }/cli/MockTerminal.java (92%) diff --git a/core/src/main/java/org/elasticsearch/cli/Command.java b/core/src/main/java/org/elasticsearch/cli/Command.java index d688347099d8..6e57905b5b27 100644 --- a/core/src/main/java/org/elasticsearch/cli/Command.java +++ b/core/src/main/java/org/elasticsearch/cli/Command.java @@ -49,14 +49,14 @@ public abstract class Command { } /** Parses options for this command from args and executes it. */ - public final int main(String[] args, Terminal terminal) throws Exception { + protected final int main(String[] args, Terminal terminal) throws Exception { final OptionSet options; try { options = parser.parse(args); } catch (OptionException e) { printHelp(terminal); - terminal.println("ERROR: " + e.getMessage()); + terminal.println(Terminal.Verbosity.SILENT, "ERROR: " + e.getMessage()); return ExitCodes.USAGE; } @@ -69,7 +69,7 @@ public abstract class Command { if (options.has(verboseOption)) { // mutually exclusive, we can remove this with jopt-simple 5.0, which natively supports it printHelp(terminal); - terminal.println("ERROR: Cannot specify -s and -v together"); + terminal.println(Terminal.Verbosity.SILENT, "ERROR: Cannot specify -s and -v together"); return ExitCodes.USAGE; } terminal.setVerbosity(Terminal.Verbosity.SILENT); diff --git a/core/src/main/java/org/elasticsearch/common/cli/CheckFileCommand.java b/core/src/main/java/org/elasticsearch/common/cli/CheckFileCommand.java deleted file mode 100644 index e2fcbe89df8c..000000000000 --- a/core/src/main/java/org/elasticsearch/common/cli/CheckFileCommand.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.cli; - -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; - -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.attribute.PosixFileAttributeView; -import java.nio.file.attribute.PosixFileAttributes; -import java.nio.file.attribute.PosixFilePermission; -import java.nio.file.attribute.PosixFilePermissions; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; - -/** - * A helper command that checks if configured paths have been changed when running a CLI command. - * It is only executed in case of specified paths by the command and if the paths underlying filesystem - * supports posix permissions. - * - * If this is the case, a warn message is issued whenever an owner, a group or the file permissions is changed by - * the command being executed and not configured back to its prior state, which should be the task of the command - * being executed. - * - */ -public abstract class CheckFileCommand extends CliTool.Command { - - public CheckFileCommand(Terminal terminal) { - super(terminal); - } - - /** - * abstract method, which should implement the same logic as CliTool.Command.execute(), but is wrapped - */ - public abstract CliTool.ExitStatus doExecute(Settings settings, Environment env) throws Exception; - - /** - * Returns the array of paths, that should be checked if the permissions, user or groups have changed - * before and after execution of the command - * - */ - protected abstract Path[] pathsForPermissionsCheck(Settings settings, Environment env) throws Exception; - - @Override - public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception { - Path[] paths = pathsForPermissionsCheck(settings, env); - - if (paths == null || paths.length == 0) { - return doExecute(settings, env); - } - - Map> permissions = new HashMap<>(paths.length); - Map owners = new HashMap<>(paths.length); - Map groups = new HashMap<>(paths.length); - - if (paths != null && paths.length > 0) { - for (Path path : paths) { - try { - boolean supportsPosixPermissions = Environment.getFileStore(path).supportsFileAttributeView(PosixFileAttributeView.class); - if (supportsPosixPermissions) { - PosixFileAttributes attributes = Files.readAttributes(path, PosixFileAttributes.class); - permissions.put(path, attributes.permissions()); - owners.put(path, attributes.owner().getName()); - groups.put(path, attributes.group().getName()); - } - } catch (IOException e) { - // silently swallow if not supported, no need to log things - } - } - } - - CliTool.ExitStatus status = doExecute(settings, env); - - // check if permissions differ - for (Map.Entry> entry : permissions.entrySet()) { - if (!Files.exists(entry.getKey())) { - continue; - } - - Set permissionsBeforeWrite = entry.getValue(); - Set permissionsAfterWrite = Files.getPosixFilePermissions(entry.getKey()); - if (!permissionsBeforeWrite.equals(permissionsAfterWrite)) { - terminal.println(Terminal.Verbosity.SILENT, "WARNING: The file permissions of [" + entry.getKey() + "] have changed " - + "from [" + PosixFilePermissions.toString(permissionsBeforeWrite) + "] " - + "to [" + PosixFilePermissions.toString(permissionsAfterWrite) + "]"); - terminal.println(Terminal.Verbosity.SILENT, "Please ensure that the user account running Elasticsearch has read access to this file!"); - } - } - - // check if owner differs - for (Map.Entry entry : owners.entrySet()) { - if (!Files.exists(entry.getKey())) { - continue; - } - - String ownerBeforeWrite = entry.getValue(); - String ownerAfterWrite = Files.getOwner(entry.getKey()).getName(); - if (!ownerAfterWrite.equals(ownerBeforeWrite)) { - terminal.println(Terminal.Verbosity.SILENT, "WARNING: Owner of file [" + entry.getKey() + "] used to be [" + ownerBeforeWrite + "], but now is [" + ownerAfterWrite + "]"); - } - } - - // check if group differs - for (Map.Entry entry : groups.entrySet()) { - if (!Files.exists(entry.getKey())) { - continue; - } - - String groupBeforeWrite = entry.getValue(); - String groupAfterWrite = Files.readAttributes(entry.getKey(), PosixFileAttributes.class).group().getName(); - if (!groupAfterWrite.equals(groupBeforeWrite)) { - terminal.println(Terminal.Verbosity.SILENT, "WARNING: Group of file [" + entry.getKey() + "] used to be [" + groupBeforeWrite + "], but now is [" + groupAfterWrite + "]"); - } - } - - return status; - } -} diff --git a/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java b/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java index deb64e906b47..12fc4cb77e4c 100644 --- a/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java +++ b/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java @@ -19,7 +19,10 @@ package org.elasticsearch.common.cli; -public class TerminalTests extends CliToolTestCase { +import org.elasticsearch.cli.MockTerminal; +import org.elasticsearch.test.ESTestCase; + +public class TerminalTests extends ESTestCase { public void testVerbosity() throws Exception { MockTerminal terminal = new MockTerminal(); terminal.setVerbosity(Terminal.Verbosity.SILENT); @@ -48,7 +51,7 @@ public class TerminalTests extends CliToolTestCase { logTerminal.println(verbosity, text); String output = logTerminal.getOutput(); assertTrue(output, output.contains(text)); - logTerminal.resetOutput(); + logTerminal.reset(); } private void assertNotPrinted(MockTerminal logTerminal, Terminal.Verbosity verbosity, String text) throws Exception { diff --git a/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java b/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java index 0cca19d33bf1..5c812cca0a70 100644 --- a/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java +++ b/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java @@ -27,7 +27,7 @@ import java.util.Arrays; import org.apache.log4j.Appender; import org.apache.log4j.Logger; -import org.elasticsearch.common.cli.MockTerminal; +import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.node.internal.InternalSettingsPreparer; diff --git a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java index 33876ef61ad9..c979b2f4013a 100644 --- a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java +++ b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java @@ -24,7 +24,7 @@ import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; -import org.elasticsearch.common.cli.MockTerminal; +import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java index 7a4590f8e254..708cefd91b28 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java @@ -20,10 +20,8 @@ package org.elasticsearch.plugins; import org.elasticsearch.common.cli.CliToolTestCase; -import org.elasticsearch.common.cli.MockTerminal; +import org.elasticsearch.cli.MockTerminal; -import static org.elasticsearch.common.cli.CliTool.ExitStatus.OK_AND_EXIT; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java index 12b5ab9eb39d..18d6e0ac3c9a 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java @@ -25,7 +25,7 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.cli.CliTool.ExitStatus; import org.elasticsearch.common.cli.CliToolTestCase; import org.elasticsearch.cli.UserError; -import org.elasticsearch.common.cli.MockTerminal; +import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.monitor.jvm.JvmInfo; import org.junit.After; @@ -89,7 +89,7 @@ public class BootstrapCliParserTests extends CliToolTestCase { assertTrue(output, output.contains(Build.CURRENT.date())); assertTrue(output, output.contains(JvmInfo.jvmInfo().version())); - terminal.resetOutput(); + terminal.reset(); parser = new BootstrapCLIParser(terminal); status = parser.execute(args("start --version")); assertStatus(status, OK_AND_EXIT); @@ -177,7 +177,7 @@ public class BootstrapCliParserTests extends CliToolTestCase { String output = terminal.getOutput(); assertTrue(output, output.contains("Parameter [network.host] needs value")); - terminal.resetOutput(); + terminal.reset(); status = parser.execute(args("start --network.host --foo")); assertStatus(status, USAGE); output = terminal.getOutput(); @@ -194,7 +194,7 @@ public class BootstrapCliParserTests extends CliToolTestCase { assertTrue(output, output.contains("Unrecognized option: --unknown-param")); // single dash in extra params - terminal.resetOutput(); + terminal.reset(); parser = new BootstrapCLIParser(terminal); status = parser.execute(args("start -network.host 127.0.0.1")); assertStatus(status, USAGE); @@ -228,7 +228,7 @@ public class BootstrapCliParserTests extends CliToolTestCase { tuples.add(new Tuple<>("-h", "elasticsearch.help")); for (Tuple tuple : tuples) { - terminal.resetOutput(); + terminal.reset(); BootstrapCLIParser parser = new BootstrapCLIParser(terminal); ExitStatus status = parser.execute(args(tuple.v1())); assertStatus(status, OK_AND_EXIT); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index 342b579a1759..514090d9869d 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -36,7 +36,6 @@ import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.PosixFileAttributeView; import java.nio.file.attribute.PosixFileAttributes; import java.nio.file.attribute.PosixFilePermission; -import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -45,11 +44,7 @@ import java.util.zip.ZipOutputStream; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.Version; -import org.elasticsearch.cli.ExitCodes; -import org.elasticsearch.common.cli.CliTool; -import org.elasticsearch.common.cli.CliToolTestCase; -import org.elasticsearch.common.cli.MockTerminal; -import org.elasticsearch.common.cli.Terminal; +import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.cli.UserError; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java index aed1696898a6..cbdd031dea18 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java @@ -22,15 +22,10 @@ package org.elasticsearch.plugins; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; -import java.util.Collections; -import java.util.List; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.cli.ExitCodes; -import org.elasticsearch.common.cli.CliTool; -import org.elasticsearch.common.cli.CliToolTestCase; -import org.elasticsearch.common.cli.MockTerminal; -import org.elasticsearch.common.cli.Terminal; +import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java index a678d4f25f6b..d9d5661b834b 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java @@ -26,9 +26,7 @@ import java.nio.file.Path; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.cli.UserError; -import org.elasticsearch.common.cli.CliToolTestCase; -import org.elasticsearch.common.cli.MockTerminal; -import org.elasticsearch.common.cli.Terminal; +import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; diff --git a/test/framework/src/main/java/org/elasticsearch/cli/CommandTestCase.java b/test/framework/src/main/java/org/elasticsearch/cli/CommandTestCase.java new file mode 100644 index 000000000000..3af25509adbe --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/cli/CommandTestCase.java @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cli; + +import joptsimple.OptionSet; +import org.elasticsearch.common.cli.Terminal; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +/** + * A base test case for cli tools. + */ +public abstract class CommandTestCase extends ESTestCase { + + protected final MockTerminal terminal = new MockTerminal(); + + @Before + public void resetTerminal() { + terminal.reset(); + terminal.setVerbosity(Terminal.Verbosity.NORMAL); + } + + protected abstract Command newCommand(); + + public String execute(String... args) throws Exception { + Command command = newCommand(); + OptionSet options = command.parser.parse(args); + command.execute(terminal, options); + return terminal.getOutput(); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/common/cli/MockTerminal.java b/test/framework/src/main/java/org/elasticsearch/cli/MockTerminal.java similarity index 92% rename from test/framework/src/main/java/org/elasticsearch/common/cli/MockTerminal.java rename to test/framework/src/main/java/org/elasticsearch/cli/MockTerminal.java index 3b2903b3fabe..bb01369ac509 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/cli/MockTerminal.java +++ b/test/framework/src/main/java/org/elasticsearch/cli/MockTerminal.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.common.cli; +package org.elasticsearch.cli; import java.io.ByteArrayOutputStream; import java.io.OutputStreamWriter; @@ -27,6 +27,8 @@ import java.nio.charset.StandardCharsets; import java.util.ArrayDeque; import java.util.Deque; +import org.elasticsearch.common.cli.Terminal; + /** * A terminal for tests which captures all output, and * can be plugged with fake input. @@ -78,8 +80,10 @@ public class MockTerminal extends Terminal { return buffer.toString("UTF-8"); } - /** Wipes the output. */ - public void resetOutput() { + /** Wipes the input and output. */ + public void reset() { buffer.reset(); + textInput.clear(); + secretInput.clear(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java index 9debf4b8f33f..330758223a53 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java @@ -21,6 +21,7 @@ package org.elasticsearch.common.cli; import java.io.IOException; +import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.test.ESTestCase; From 7e16afbbf234c2e7f1b9b5928c6dc039c4f1b91a Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 7 Mar 2016 12:58:55 -0800 Subject: [PATCH 085/320] Added transport network settings to whitelist for tribe node Also fail on any path settings within tribe sections --- .../org/elasticsearch/tribe/TribeService.java | 17 +++++++++++--- .../tribe/TribeServiceTests.java | 23 +++++++++++++++---- 2 files changed, 32 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index ff12ddba00c6..b2eb1cd59f0d 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; @@ -53,6 +54,8 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.node.Node; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.TransportSettings; import java.util.Arrays; import java.util.Collections; @@ -160,7 +163,10 @@ public class TribeService extends AbstractLifecycleComponent { private static final List PASS_THROUGH_SETTINGS = Arrays.asList( NetworkService.GLOBAL_NETWORK_HOST_SETTING.getKey(), NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING.getKey(), - NetworkService.GLOBAL_NETWORK_PUBLISHHOST_SETTING.getKey() + NetworkService.GLOBAL_NETWORK_PUBLISHHOST_SETTING.getKey(), + TransportSettings.HOST.getKey(), + TransportSettings.BIND_HOST.getKey(), + TransportSettings.PUBLISH_HOST.getKey() ); private final String onConflict; private final Set droppedIndices = ConcurrentCollections.newConcurrentSet(); @@ -201,6 +207,11 @@ public class TribeService extends AbstractLifecycleComponent { * combined with tribe specific settings. */ static Settings buildClientSettings(String tribeName, Settings globalSettings, Settings tribeSettings) { + for (String tribeKey : tribeSettings.getAsMap().keySet()) { + if (tribeKey.startsWith("path.")) { + throw new IllegalArgumentException("Setting [" + tribeKey + "] not allowed in tribe client [" + tribeName + "]"); + } + } Settings.Builder sb = Settings.builder().put(tribeSettings); sb.put("node.name", globalSettings.get("node.name") + "/" + tribeName); sb.put(Environment.PATH_HOME_SETTING.getKey(), Environment.PATH_HOME_SETTING.get(globalSettings)); // pass through ES home dir @@ -219,8 +230,8 @@ public class TribeService extends AbstractLifecycleComponent { } } sb.put(TRIBE_NAME_SETTING.getKey(), tribeName); - if (sb.get("http.enabled") == null) { - sb.put("http.enabled", false); + if (sb.get(NetworkModule.HTTP_ENABLED.getKey()) == null) { + sb.put(NetworkModule.HTTP_ENABLED.getKey(), false); } sb.put(Node.NODE_CLIENT_SETTING.getKey(), true); return sb.build(); diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java b/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java index 9916b6112131..877fd24a7ba6 100644 --- a/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java +++ b/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java @@ -49,11 +49,12 @@ public class TribeServiceTests extends ESTestCase { assertEquals("plugins/path", clientSettings.get("path.plugins")); assertEquals("logs/path", clientSettings.get("path.logs")); - // TODO: this should be an error, not just ignored! Settings tribeSettings = Settings.builder() .put("path.home", "alternate/path").build(); - clientSettings = TribeService.buildClientSettings("tribe1", globalSettings, tribeSettings); - assertEquals("some/path", clientSettings.get("path.home")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { + TribeService.buildClientSettings("tribe1", globalSettings, tribeSettings); + }); + assertTrue(e.getMessage(), e.getMessage().contains("Setting [path.home] not allowed in tribe client")); } public void testPassthroughSettings() { @@ -62,20 +63,32 @@ public class TribeServiceTests extends ESTestCase { .put("path.home", "some/path") .put("network.host", "0.0.0.0") .put("network.bind_host", "1.1.1.1") - .put("network.publish_host", "2.2.2.2").build(); + .put("network.publish_host", "2.2.2.2") + .put("transport.host", "3.3.3.3") + .put("transport.bind_host", "4.4.4.4") + .put("transport.publish_host", "5.5.5.5").build(); Settings clientSettings = TribeService.buildClientSettings("tribe1", globalSettings, Settings.EMPTY); assertEquals("0.0.0.0", clientSettings.get("network.host")); assertEquals("1.1.1.1", clientSettings.get("network.bind_host")); assertEquals("2.2.2.2", clientSettings.get("network.publish_host")); + assertEquals("3.3.3.3", clientSettings.get("transport.host")); + assertEquals("4.4.4.4", clientSettings.get("transport.bind_host")); + assertEquals("5.5.5.5", clientSettings.get("transport.publish_host")); // per tribe client overrides still work Settings tribeSettings = Settings.builder() .put("network.host", "3.3.3.3") .put("network.bind_host", "4.4.4.4") - .put("network.publish_host", "5.5.5.5").build(); + .put("network.publish_host", "5.5.5.5") + .put("transport.host", "6.6.6.6") + .put("transport.bind_host", "7.7.7.7") + .put("transport.publish_host", "8.8.8.8").build(); clientSettings = TribeService.buildClientSettings("tribe1", globalSettings, tribeSettings); assertEquals("3.3.3.3", clientSettings.get("network.host")); assertEquals("4.4.4.4", clientSettings.get("network.bind_host")); assertEquals("5.5.5.5", clientSettings.get("network.publish_host")); + assertEquals("6.6.6.6", clientSettings.get("transport.host")); + assertEquals("7.7.7.7", clientSettings.get("transport.bind_host")); + assertEquals("8.8.8.8", clientSettings.get("transport.publish_host")); } } From 071d578953961ba79e87dffd59a8c73bd1028f0c Mon Sep 17 00:00:00 2001 From: Jun Ohtani Date: Mon, 7 Mar 2016 10:01:38 +0900 Subject: [PATCH 086/320] Analysis : Allow string explain param in JSON Move some test methods from AnalylzeActionIT to RestAnalyzeActionTest Allow string explain param if it can parse Fix wrong param name in rest-api-spec Closes #16925 --- .../indices/analyze/RestAnalyzeAction.java | 8 +- .../indices/analyze/AnalyzeActionIT.java | 47 --------- .../analyze/RestAnalyzeActionTests.java | 96 +++++++++++++++++++ .../rest-api-spec/api/indices.analyze.json | 4 +- .../test/indices.analyze/10_analyze.yaml | 2 +- 5 files changed, 105 insertions(+), 52 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java index 4e90a6a3a851..a3d0cc84559a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java @@ -144,8 +144,12 @@ public class RestAnalyzeAction extends BaseRestHandler { charFilters.add(parser.text()); } analyzeRequest.charFilters(charFilters.toArray(new String[charFilters.size()])); - } else if (parseFieldMatcher.match(currentFieldName, Fields.EXPLAIN) && token == XContentParser.Token.VALUE_BOOLEAN) { - analyzeRequest.explain(parser.booleanValue()); + } else if (parseFieldMatcher.match(currentFieldName, Fields.EXPLAIN)) { + if (parser.isBooleanValue()) { + analyzeRequest.explain(parser.booleanValue()); + } else { + throw new IllegalArgumentException(currentFieldName + " must be either 'true' or 'false'"); + } } else if (parseFieldMatcher.match(currentFieldName, Fields.ATTRIBUTES) && token == XContentParser.Token.START_ARRAY){ List attributes = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java index 92b96d8e47d8..13ef13c5b5f6 100644 --- a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java @@ -196,53 +196,6 @@ public class AnalyzeActionIT extends ESIntegTestCase { return randomBoolean() ? "test" : "alias"; } - public void testParseXContentForAnalyzeReuqest() throws Exception { - BytesReference content = XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("tokenizer", "keyword") - .array("filters", "lowercase") - .endObject().bytes(); - - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - - RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); - - assertThat(analyzeRequest.text().length, equalTo(1)); - assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); - assertThat(analyzeRequest.tokenizer(), equalTo("keyword")); - assertThat(analyzeRequest.tokenFilters(), equalTo(new String[]{"lowercase"})); - } - - public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() throws Exception { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - - try { - RestAnalyzeAction.buildFromContent(new BytesArray("{invalid_json}"), analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); - fail("shouldn't get here"); - } catch (Exception e) { - assertThat(e, instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), equalTo("Failed to parse request body")); - } - } - - public void testParseXContentForAnalyzeRequestWithUnknownParamThrowsException() throws Exception { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - BytesReference invalidContent =XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("unknown", "keyword") - .endObject().bytes(); - - try { - RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); - fail("shouldn't get here"); - } catch (Exception e) { - assertThat(e, instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]")); - } - } - public void testAnalyzerWithMultiValues() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java new file mode 100644 index 000000000000..10bbeb30aeb7 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java @@ -0,0 +1,96 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.rest.action.admin.indices.analyze; + +import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.*; + +public class RestAnalyzeActionTests extends ESTestCase { + + public void testParseXContentForAnalyzeReuqest() throws Exception { + BytesReference content = XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("filters", "lowercase") + .endObject().bytes(); + + AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + + RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); + + assertThat(analyzeRequest.text().length, equalTo(1)); + assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); + assertThat(analyzeRequest.tokenizer(), equalTo("keyword")); + assertThat(analyzeRequest.tokenFilters(), equalTo(new String[]{"lowercase"})); + } + + public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() throws Exception { + AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + + try { + RestAnalyzeAction.buildFromContent(new BytesArray("{invalid_json}"), analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); + fail("shouldn't get here"); + } catch (Exception e) { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), equalTo("Failed to parse request body")); + } + } + + public void testParseXContentForAnalyzeRequestWithUnknownParamThrowsException() throws Exception { + AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + BytesReference invalidContent = XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("unknown", "keyword") + .endObject().bytes(); + + try { + RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); + fail("shouldn't get here"); + } catch (Exception e) { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]")); + } + } + + public void testParseXContentForAnalyzeRequestWithInvalidStringExplainParamThrowsException() throws Exception { + AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + BytesReference invalidExplain = XContentFactory.jsonBuilder() + .startObject() + .field("explain", "fals") + .endObject().bytes(); + try { + RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); + fail("shouldn't get here"); + } catch (Exception e) { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'")); + } + } + + +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json index 9fe9bfe3cadc..c3dc0a18b453 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json @@ -44,13 +44,13 @@ "type" : "string", "description" : "The name of the tokenizer to use for the analysis" }, - "detail": { + "explain": { "type" : "boolean", "description" : "With `true`, outputs more advanced details. (default: false)" }, "attributes": { "type" : "list", - "description" : "A comma-separated list of token attributes to output, this parameter works only with `detail=true`" + "description" : "A comma-separated list of token attributes to output, this parameter works only with `explain=true`" }, "format": { "type": "enum", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml index 88160ef4f1e1..93ffe0d5db17 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml @@ -75,7 +75,7 @@ setup: "Detail response with Analyzer": - do: indices.analyze: - body: {"text": "This is troubled", "analyzer": standard, "explain": true} + body: {"text": "This is troubled", "analyzer": standard, "explain": "true"} - length: { detail.analyzer.tokens: 3 } - match: { detail.analyzer.name: standard } - match: { detail.analyzer.tokens.0.token: this } From 48cb81e30bb7cc1cdc39a5484456c606e6ff63be Mon Sep 17 00:00:00 2001 From: Jun Ohtani Date: Mon, 7 Mar 2016 15:13:59 +0900 Subject: [PATCH 087/320] Analysis : Allow string explain param in JSON Fix typo Remove unused import Closes #16925 --- .../org/elasticsearch/indices/analyze/AnalyzeActionIT.java | 7 ------- .../admin/indices/analyze/RestAnalyzeActionTests.java | 2 +- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java index 13ef13c5b5f6..23a197dbab62 100644 --- a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java @@ -19,15 +19,8 @@ package org.elasticsearch.indices.analyze; import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder; import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; -import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.rest.action.admin.indices.analyze.RestAnalyzeAction; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.core.IsNull; diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java index 10bbeb30aeb7..dfda667df07c 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java @@ -30,7 +30,7 @@ import static org.hamcrest.Matchers.*; public class RestAnalyzeActionTests extends ESTestCase { - public void testParseXContentForAnalyzeReuqest() throws Exception { + public void testParseXContentForAnalyzeRequest() throws Exception { BytesReference content = XContentFactory.jsonBuilder() .startObject() .field("text", "THIS IS A TEST") From ad7fbe72517633cdc8993544a011c2e682826c94 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Mon, 7 Mar 2016 17:01:41 +0100 Subject: [PATCH 088/320] Add test for the index_options on a keyword field. #16990 This found a bug in the validation, which was checking the wrong IndexOptions. --- .../index/mapper/core/KeywordFieldMapper.java | 4 +-- .../mapper/core/KeywordFieldMapperTests.java | 32 +++++++++++++++++++ 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/KeywordFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/KeywordFieldMapper.java index 171bc8de7947..3f01493590ce 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/KeywordFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/KeywordFieldMapper.java @@ -85,9 +85,9 @@ public final class KeywordFieldMapper extends FieldMapper implements AllFieldMap @Override public Builder indexOptions(IndexOptions indexOptions) { - if (fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) > 0) { + if (indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS) > 0) { throw new IllegalArgumentException("The [keyword] field does not support positions, got [index_options]=" - + indexOptionToString(fieldType.indexOptions())); + + indexOptionToString(indexOptions)); } return super.indexOptions(indexOptions); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java index bdb3f9762ef8..8af92f266a57 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; import java.io.IOException; +import java.util.Arrays; import static org.hamcrest.Matchers.equalTo; @@ -200,4 +201,35 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { assertEquals(1, fields.length); assertEquals(DocValuesType.NONE, fields[0].fieldType().docValuesType()); } + + public void testIndexOptions() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "keyword") + .field("index_options", "freqs").endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + assertEquals(IndexOptions.DOCS_AND_FREQS, fields[0].fieldType().indexOptions()); + + for (String indexOptions : Arrays.asList("positions", "offsets")) { + final String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "keyword") + .field("index_options", indexOptions).endObject().endObject() + .endObject().endObject().string(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(mapping2))); + assertEquals("The [keyword] field does not support positions, got [index_options]=" + indexOptions, e.getMessage()); + } + } } From 3d13c27fa0df12018f35f1b20987c90277e9b6f9 Mon Sep 17 00:00:00 2001 From: Jun Ohtani Date: Tue, 8 Mar 2016 17:31:23 +0900 Subject: [PATCH 089/320] fix checkstyle error --- .../action/admin/indices/analyze/RestAnalyzeActionTests.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java index dfda667df07c..34e8315372bf 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java @@ -26,7 +26,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.test.ESTestCase; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.startsWith; public class RestAnalyzeActionTests extends ESTestCase { From 026519e81b9bc92940b58dbba73f460771bc2b8c Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Mon, 7 Mar 2016 16:41:19 +0100 Subject: [PATCH 090/320] ParseFieldMatcher should log when using deprecated settings. #16988 I always thought ParseFieldMatcher would log when using a deprecated setting, but it does not. --- .../org/elasticsearch/common/ParseField.java | 27 +++---- .../common/ParseFieldMatcher.java | 25 +++--- .../elasticsearch/common/ParseFieldTests.java | 79 ++++++++----------- 3 files changed, 59 insertions(+), 72 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/ParseField.java b/core/src/main/java/org/elasticsearch/common/ParseField.java index 0aad723e6fb0..a0978723d0e6 100644 --- a/core/src/main/java/org/elasticsearch/common/ParseField.java +++ b/core/src/main/java/org/elasticsearch/common/ParseField.java @@ -18,26 +18,23 @@ */ package org.elasticsearch.common; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; -import java.util.EnumSet; import java.util.HashSet; /** * Holds a field that can be found in a request while parsing and its different variants, which may be deprecated. */ public class ParseField { + + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(ParseField.class)); + private final String camelCaseName; private final String underscoreName; private final String[] deprecatedNames; private String allReplacedWith = null; - static final EnumSet EMPTY_FLAGS = EnumSet.noneOf(Flag.class); - static final EnumSet STRICT_FLAGS = EnumSet.of(Flag.STRICT); - - enum Flag { - STRICT - } - public ParseField(String value, String... deprecatedNames) { camelCaseName = Strings.toCamelCase(value); underscoreName = Strings.toUnderscoreCase(value); @@ -80,19 +77,21 @@ public class ParseField { return parseField; } - boolean match(String currentFieldName, EnumSet flags) { + boolean match(String currentFieldName, boolean strict) { if (allReplacedWith == null && (currentFieldName.equals(camelCaseName) || currentFieldName.equals(underscoreName))) { return true; } String msg; for (String depName : deprecatedNames) { if (currentFieldName.equals(depName)) { - if (flags.contains(Flag.STRICT)) { - msg = "Deprecated field [" + currentFieldName + "] used, expected [" + underscoreName + "] instead"; - if (allReplacedWith != null) { - msg = "Deprecated field [" + currentFieldName + "] used, replaced by [" + allReplacedWith + "]"; - } + msg = "Deprecated field [" + currentFieldName + "] used, expected [" + underscoreName + "] instead"; + if (allReplacedWith != null) { + msg = "Deprecated field [" + currentFieldName + "] used, replaced by [" + allReplacedWith + "]"; + } + if (strict) { throw new IllegalArgumentException(msg); + } else { + DEPRECATION_LOGGER.deprecated(msg); } return true; } diff --git a/core/src/main/java/org/elasticsearch/common/ParseFieldMatcher.java b/core/src/main/java/org/elasticsearch/common/ParseFieldMatcher.java index 137e5b4a966c..9866694a230e 100644 --- a/core/src/main/java/org/elasticsearch/common/ParseFieldMatcher.java +++ b/core/src/main/java/org/elasticsearch/common/ParseFieldMatcher.java @@ -21,29 +21,28 @@ package org.elasticsearch.common; import org.elasticsearch.common.settings.Settings; -import java.util.EnumSet; - /** * Matcher to use in combination with {@link ParseField} while parsing requests. Matches a {@link ParseField} * against a field name and throw deprecation exception depending on the current value of the {@link #PARSE_STRICT} setting. */ public class ParseFieldMatcher { public static final String PARSE_STRICT = "index.query.parse.strict"; - public static final ParseFieldMatcher EMPTY = new ParseFieldMatcher(ParseField.EMPTY_FLAGS); - public static final ParseFieldMatcher STRICT = new ParseFieldMatcher(ParseField.STRICT_FLAGS); + public static final ParseFieldMatcher EMPTY = new ParseFieldMatcher(false); + public static final ParseFieldMatcher STRICT = new ParseFieldMatcher(true); - private final EnumSet parseFlags; + private final boolean strict; public ParseFieldMatcher(Settings settings) { - if (settings.getAsBoolean(PARSE_STRICT, false)) { - this.parseFlags = EnumSet.of(ParseField.Flag.STRICT); - } else { - this.parseFlags = ParseField.EMPTY_FLAGS; - } + this(settings.getAsBoolean(PARSE_STRICT, false)); } - public ParseFieldMatcher(EnumSet parseFlags) { - this.parseFlags = parseFlags; + public ParseFieldMatcher(boolean strict) { + this.strict = strict; + } + + /** Should deprecated settings be rejected? */ + public boolean isStrict() { + return strict; } /** @@ -55,6 +54,6 @@ public class ParseFieldMatcher { * @return true whenever the parse field that we are looking for was found, false otherwise */ public boolean match(String fieldName, ParseField parseField) { - return parseField.match(fieldName, parseFlags); + return parseField.match(fieldName, strict); } } diff --git a/core/src/test/java/org/elasticsearch/common/ParseFieldTests.java b/core/src/test/java/org/elasticsearch/common/ParseFieldTests.java index f4b8747ccdc3..3770cd25c109 100644 --- a/core/src/test/java/org/elasticsearch/common/ParseFieldTests.java +++ b/core/src/test/java/org/elasticsearch/common/ParseFieldTests.java @@ -20,8 +20,7 @@ package org.elasticsearch.common; import org.elasticsearch.test.ESTestCase; -import java.util.EnumSet; - +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.sameInstance; @@ -33,38 +32,29 @@ public class ParseFieldTests extends ESTestCase { String[] deprecated = new String[]{"barFoo", "bar_foo"}; ParseField withDeprecations = field.withDeprecation("Foobar", randomFrom(deprecated)); assertThat(field, not(sameInstance(withDeprecations))); - assertThat(field.match(randomFrom(values), ParseField.EMPTY_FLAGS), is(true)); - assertThat(field.match("foo bar", ParseField.EMPTY_FLAGS), is(false)); - assertThat(field.match(randomFrom(deprecated), ParseField.EMPTY_FLAGS), is(false)); - assertThat(field.match("barFoo", ParseField.EMPTY_FLAGS), is(false)); + assertThat(field.match(randomFrom(values), false), is(true)); + assertThat(field.match("foo bar", false), is(false)); + assertThat(field.match(randomFrom(deprecated), false), is(false)); + assertThat(field.match("barFoo", false), is(false)); - assertThat(withDeprecations.match(randomFrom(values), ParseField.EMPTY_FLAGS), is(true)); - assertThat(withDeprecations.match("foo bar", ParseField.EMPTY_FLAGS), is(false)); - assertThat(withDeprecations.match(randomFrom(deprecated), ParseField.EMPTY_FLAGS), is(true)); - assertThat(withDeprecations.match("barFoo", ParseField.EMPTY_FLAGS), is(true)); + assertThat(withDeprecations.match(randomFrom(values), false), is(true)); + assertThat(withDeprecations.match("foo bar", false), is(false)); + assertThat(withDeprecations.match(randomFrom(deprecated), false), is(true)); + assertThat(withDeprecations.match("barFoo", false), is(true)); // now with strict mode - EnumSet flags = EnumSet.of(ParseField.Flag.STRICT); - assertThat(field.match(randomFrom(values), flags), is(true)); - assertThat(field.match("foo bar", flags), is(false)); - assertThat(field.match(randomFrom(deprecated), flags), is(false)); - assertThat(field.match("barFoo", flags), is(false)); + assertThat(field.match(randomFrom(values), true), is(true)); + assertThat(field.match("foo bar", true), is(false)); + assertThat(field.match(randomFrom(deprecated), true), is(false)); + assertThat(field.match("barFoo", true), is(false)); - assertThat(withDeprecations.match(randomFrom(values), flags), is(true)); - assertThat(withDeprecations.match("foo bar", flags), is(false)); - try { - withDeprecations.match(randomFrom(deprecated), flags); - fail(); - } catch (IllegalArgumentException ex) { - - } - - try { - withDeprecations.match("barFoo", flags); - fail(); - } catch (IllegalArgumentException ex) { - - } + assertThat(withDeprecations.match(randomFrom(values), true), is(true)); + assertThat(withDeprecations.match("foo bar", true), is(false)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> withDeprecations.match(randomFrom(deprecated), true)); + assertThat(e.getMessage(), containsString("used, expected [foo_bar] instead")); + e = expectThrows(IllegalArgumentException.class, () -> withDeprecations.match("barFoo", true)); + assertThat(e.getMessage(), containsString("Deprecated field [barFoo] used, expected [foo_bar] instead")); } public void testAllDeprecated() { @@ -72,30 +62,29 @@ public class ParseFieldTests extends ESTestCase { boolean withDeprecatedNames = randomBoolean(); String[] deprecated = new String[]{"text", "same_as_text"}; - String[] allValues = values; + String[] allValues; if (withDeprecatedNames) { - String[] newArray = new String[allValues.length + deprecated.length]; - System.arraycopy(allValues, 0, newArray, 0, allValues.length); - System.arraycopy(deprecated, 0, newArray, allValues.length, deprecated.length); + String[] newArray = new String[values.length + deprecated.length]; + System.arraycopy(values, 0, newArray, 0, values.length); + System.arraycopy(deprecated, 0, newArray, values.length, deprecated.length); allValues = newArray; + } else { + allValues = values; } - ParseField field = new ParseField(randomFrom(values)); + ParseField field; if (withDeprecatedNames) { - field = field.withDeprecation(deprecated); + field = new ParseField(randomFrom(values)).withDeprecation(deprecated).withAllDeprecated("like"); + } else { + field = new ParseField(randomFrom(values)).withAllDeprecated("like"); } - field = field.withAllDeprecated("like"); // strict mode off - assertThat(field.match(randomFrom(allValues), ParseField.EMPTY_FLAGS), is(true)); - assertThat(field.match("not a field name", ParseField.EMPTY_FLAGS), is(false)); + assertThat(field.match(randomFrom(allValues), false), is(true)); + assertThat(field.match("not a field name", false), is(false)); // now with strict mode - EnumSet flags = EnumSet.of(ParseField.Flag.STRICT); - try { - field.match(randomFrom(allValues), flags); - fail(); - } catch (IllegalArgumentException ex) { - } + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> field.match(randomFrom(allValues), true)); + assertThat(e.getMessage(), containsString(" used, replaced by [like]")); } } From c9e1ccf6106065b5fdeb2171a4fdd0f03c056af6 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Tue, 8 Mar 2016 10:26:33 +0100 Subject: [PATCH 091/320] [TEST] Fix newline issue in PluginCliTests on Windows --- .../org/elasticsearch/common/cli/CliToolTestCase.java | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java index 9debf4b8f33f..06c197a05da7 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java @@ -28,6 +28,10 @@ import org.elasticsearch.test.StreamsUtils; import org.junit.After; import org.junit.Before; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.isEmptyString; +import static org.hamcrest.Matchers.not; + public abstract class CliToolTestCase extends ESTestCase { @Before @@ -51,8 +55,10 @@ public abstract class CliToolTestCase extends ESTestCase { public static void assertTerminalOutputContainsHelpFile(MockTerminal terminal, String classPath) throws IOException { String output = terminal.getOutput(); - assertFalse(output, output.isEmpty()); + assertThat(output, not(isEmptyString())); String expectedDocs = StreamsUtils.copyToStringFromClasspath(classPath); - assertTrue(output, output.contains(expectedDocs)); + // convert to *nix newlines as MockTerminal used for tests also uses *nix newlines + expectedDocs = expectedDocs.replace("\r\n", "\n"); + assertThat(output, containsString(expectedDocs)); } } From a4b5fbedb886f85837a48c4a895f9c7d9a8c4094 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Tue, 8 Mar 2016 10:28:26 +0100 Subject: [PATCH 092/320] Moves SortParser:parse(...) to only require QueryShardContext This removes the need for accessing the SearchContext when parsing Sort elements to queries. After applying the patch only a QueryShardContext is needed. Relates to #15178 --- .../index/fielddata/IndexFieldData.java | 15 +++++++++---- .../support/NestedInnerQueryParseSupport.java | 5 ++--- .../search/sort/GeoDistanceSortParser.java | 21 ++++++++++--------- .../search/sort/ScriptSortParser.java | 21 +++++++++++-------- .../search/sort/SortParseElement.java | 15 +++++++------ .../elasticsearch/search/sort/SortParser.java | 4 ++-- .../fielddata/AbstractFieldDataTestCase.java | 2 +- .../search/sort/SortParserTests.java | 4 ++-- 8 files changed, 48 insertions(+), 39 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java index ffa23bf56e42..172e16d8f353 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java @@ -20,10 +20,14 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparatorSource; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.SortField; import org.apache.lucene.search.Weight; @@ -122,11 +126,11 @@ public interface IndexFieldData extends IndexCompone public static class Nested { private final BitSetProducer rootFilter; - private final Weight innerFilter; + private final Query innerQuery; - public Nested(BitSetProducer rootFilter, Weight innerFilter) { + public Nested(BitSetProducer rootFilter, Query innerQuery) { this.rootFilter = rootFilter; - this.innerFilter = innerFilter; + this.innerQuery = innerQuery; } /** @@ -140,7 +144,10 @@ public interface IndexFieldData extends IndexCompone * Get a {@link DocIdSet} that matches the inner documents. */ public DocIdSetIterator innerDocs(LeafReaderContext ctx) throws IOException { - Scorer s = innerFilter.scorer(ctx); + final IndexReaderContext topLevelCtx = ReaderUtil.getTopLevelContext(ctx); + IndexSearcher indexSearcher = new IndexSearcher(topLevelCtx); + Weight weight = indexSearcher.createNormalizedWeight(innerQuery, false); + Scorer s = weight.scorer(ctx); return s == null ? null : s.iterator(); } } diff --git a/core/src/main/java/org/elasticsearch/index/query/support/NestedInnerQueryParseSupport.java b/core/src/main/java/org/elasticsearch/index/query/support/NestedInnerQueryParseSupport.java index 9923728e3bd8..86983026b192 100644 --- a/core/src/main/java/org/elasticsearch/index/query/support/NestedInnerQueryParseSupport.java +++ b/core/src/main/java/org/elasticsearch/index/query/support/NestedInnerQueryParseSupport.java @@ -31,7 +31,6 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; @@ -61,8 +60,8 @@ public class NestedInnerQueryParseSupport { protected ObjectMapper nestedObjectMapper; private ObjectMapper parentObjectMapper; - public NestedInnerQueryParseSupport(XContentParser parser, SearchContext searchContext) { - shardContext = searchContext.getQueryShardContext(); + public NestedInnerQueryParseSupport(XContentParser parser, QueryShardContext context) { + shardContext = context; parseContext = shardContext.parseContext(); shardContext.reset(parser); diff --git a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java index 27c8b8e0ed58..b9407b31bf6c 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java +++ b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java @@ -43,9 +43,9 @@ import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport; import org.elasticsearch.search.MultiValueMode; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; @@ -62,7 +62,7 @@ public class GeoDistanceSortParser implements SortParser { } @Override - public SortField parse(XContentParser parser, SearchContext context) throws Exception { + public SortField parse(XContentParser parser, QueryShardContext context) throws Exception { String fieldName = null; List geoPoints = new ArrayList<>(); DistanceUnit unit = DistanceUnit.DEFAULT; @@ -71,7 +71,7 @@ public class GeoDistanceSortParser implements SortParser { MultiValueMode sortMode = null; NestedInnerQueryParseSupport nestedHelper = null; - final boolean indexCreatedBeforeV2_0 = context.indexShard().indexSettings().getIndexVersionCreated().before(Version.V_2_0_0); + final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0); boolean coerce = GeoDistanceSortBuilder.DEFAULT_COERCE; boolean ignoreMalformed = GeoDistanceSortBuilder.DEFAULT_IGNORE_MALFORMED; @@ -155,12 +155,12 @@ public class GeoDistanceSortParser implements SortParser { throw new IllegalArgumentException("sort_mode [sum] isn't supported for sorting by geo distance"); } - MappedFieldType fieldType = context.smartNameFieldType(fieldName); + MappedFieldType fieldType = context.fieldMapper(fieldName); if (fieldType == null) { throw new IllegalArgumentException("failed to find mapper for [" + fieldName + "] for geo distance based sort"); } final MultiValueMode finalSortMode = sortMode; // final reference for use in the anonymous class - final IndexGeoPointFieldData geoIndexFieldData = context.fieldData().getForField(fieldType); + final IndexGeoPointFieldData geoIndexFieldData = context.getForField(fieldType); final FixedSourceDistance[] distances = new FixedSourceDistance[geoPoints.size()]; for (int i = 0; i< geoPoints.size(); i++) { distances[i] = geoDistance.fixedSourceDistance(geoPoints.get(i).lat(), geoPoints.get(i).lon(), unit); @@ -168,15 +168,16 @@ public class GeoDistanceSortParser implements SortParser { final Nested nested; if (nestedHelper != null && nestedHelper.getPath() != null) { - BitSetProducer rootDocumentsFilter = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter()); - Query innerDocumentsFilter; + BitSetProducer rootDocumentsFilter = context.bitsetFilter(Queries.newNonNestedFilter()); + Query innerDocumentsQuery; if (nestedHelper.filterFound()) { // TODO: use queries instead - innerDocumentsFilter = nestedHelper.getInnerFilter(); + innerDocumentsQuery = nestedHelper.getInnerFilter(); } else { - innerDocumentsFilter = nestedHelper.getNestedObjectMapper().nestedTypeFilter(); + innerDocumentsQuery = nestedHelper.getNestedObjectMapper().nestedTypeFilter(); } - nested = new Nested(rootDocumentsFilter, context.searcher().createNormalizedWeight(innerDocumentsFilter, false)); + + nested = new Nested(rootDocumentsFilter, innerDocumentsQuery); } else { nested = null; } diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java index e4fe2c08f758..c30ea503d80e 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java @@ -27,6 +27,7 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.FieldData; @@ -37,6 +38,7 @@ import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport; import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; @@ -68,7 +70,7 @@ public class ScriptSortParser implements SortParser { } @Override - public SortField parse(XContentParser parser, SearchContext context) throws Exception { + public SortField parse(XContentParser parser, QueryShardContext context) throws Exception { ScriptParameterParser scriptParameterParser = new ScriptParameterParser(); Script script = null; String type = null; @@ -122,19 +124,20 @@ public class ScriptSortParser implements SortParser { script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), params); } } else if (params != null) { - throw new SearchParseException(context, "script params must be specified inside script object", parser.getTokenLocation()); + throw new ParsingException(parser.getTokenLocation(), "script params must be specified inside script object"); } if (script == null) { - throw new SearchParseException(context, "_script sorting requires setting the script to sort by", parser.getTokenLocation()); + throw new ParsingException(parser.getTokenLocation(), "_script sorting requires setting the script to sort by"); } if (type == null) { - throw new SearchParseException(context, "_script sorting requires setting the type of the script", parser.getTokenLocation()); + throw new ParsingException(parser.getTokenLocation(), "_script sorting requires setting the type of the script"); } - final SearchScript searchScript = context.scriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH, Collections.emptyMap()); + final SearchScript searchScript = context.getScriptService().search( + context.lookup(), script, ScriptContext.Standard.SEARCH, Collections.emptyMap()); if (STRING_SORT_TYPE.equals(type) && (sortMode == MultiValueMode.SUM || sortMode == MultiValueMode.AVG)) { - throw new SearchParseException(context, "type [string] doesn't support mode [" + sortMode + "]", parser.getTokenLocation()); + throw new ParsingException(parser.getTokenLocation(), "type [string] doesn't support mode [" + sortMode + "]"); } if (sortMode == null) { @@ -144,7 +147,7 @@ public class ScriptSortParser implements SortParser { // If nested_path is specified, then wrap the `fieldComparatorSource` in a `NestedFieldComparatorSource` final Nested nested; if (nestedHelper != null && nestedHelper.getPath() != null) { - BitSetProducer rootDocumentsFilter = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter()); + BitSetProducer rootDocumentsFilter = context.bitsetFilter(Queries.newNonNestedFilter()); Query innerDocumentsFilter; if (nestedHelper.filterFound()) { // TODO: use queries instead @@ -152,7 +155,7 @@ public class ScriptSortParser implements SortParser { } else { innerDocumentsFilter = nestedHelper.getNestedObjectMapper().nestedTypeFilter(); } - nested = new Nested(rootDocumentsFilter, context.searcher().createNormalizedWeight(innerDocumentsFilter, false)); + nested = new Nested(rootDocumentsFilter, innerDocumentsFilter); } else { nested = null; } @@ -205,7 +208,7 @@ public class ScriptSortParser implements SortParser { }; break; default: - throw new SearchParseException(context, "custom script sort type [" + type + "] not supported", parser.getTokenLocation()); + throw new ParsingException(parser.getTokenLocation(), "custom script sort type [" + type + "] not supported"); } return new SortField("_script", fieldComparatorSource, reverse); diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java b/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java index a99158787d3a..83538bd96722 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java @@ -140,7 +140,7 @@ public class SortParseElement implements SearchParseElement { addSortField(context, sortFields, fieldName, reverse, unmappedType, missing, sortMode, nestedFilterParseHelper); } else { if (PARSERS.containsKey(fieldName)) { - sortFields.add(PARSERS.get(fieldName).parse(parser, context)); + sortFields.add(PARSERS.get(fieldName).parse(parser, context.getQueryShardContext())); } else { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -168,7 +168,7 @@ public class SortParseElement implements SearchParseElement { sortMode = MultiValueMode.fromString(parser.text()); } else if ("nested_path".equals(innerJsonName) || "nestedPath".equals(innerJsonName)) { if (nestedFilterParseHelper == null) { - nestedFilterParseHelper = new NestedInnerQueryParseSupport(parser, context); + nestedFilterParseHelper = new NestedInnerQueryParseSupport(parser, context.getQueryShardContext()); } nestedFilterParseHelper.setPath(parser.text()); } else { @@ -177,7 +177,7 @@ public class SortParseElement implements SearchParseElement { } else if (token == XContentParser.Token.START_OBJECT) { if ("nested_filter".equals(innerJsonName) || "nestedFilter".equals(innerJsonName)) { if (nestedFilterParseHelper == null) { - nestedFilterParseHelper = new NestedInnerQueryParseSupport(parser, context); + nestedFilterParseHelper = new NestedInnerQueryParseSupport(parser, context.getQueryShardContext()); } nestedFilterParseHelper.filter(); } else { @@ -239,14 +239,13 @@ public class SortParseElement implements SearchParseElement { final Nested nested; if (nestedHelper != null && nestedHelper.getPath() != null) { BitSetProducer rootDocumentsFilter = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter()); - Query innerDocumentsFilter; + Query innerDocumentsQuery; if (nestedHelper.filterFound()) { - // TODO: use queries instead - innerDocumentsFilter = nestedHelper.getInnerFilter(); + innerDocumentsQuery = nestedHelper.getInnerFilter(); } else { - innerDocumentsFilter = nestedHelper.getNestedObjectMapper().nestedTypeFilter(); + innerDocumentsQuery = nestedHelper.getNestedObjectMapper().nestedTypeFilter(); } - nested = new Nested(rootDocumentsFilter, context.searcher().createNormalizedWeight(innerDocumentsFilter, false)); + nested = new Nested(rootDocumentsFilter, innerDocumentsQuery); } else { nested = null; } diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortParser.java b/core/src/main/java/org/elasticsearch/search/sort/SortParser.java index 6383afd8845d..727e576a85e4 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortParser.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortParser.java @@ -21,7 +21,7 @@ package org.elasticsearch.search.sort; import org.apache.lucene.search.SortField; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.index.query.QueryShardContext; /** * @@ -30,5 +30,5 @@ public interface SortParser { String[] names(); - SortField parse(XContentParser parser, SearchContext context) throws Exception; + SortField parse(XContentParser parser, QueryShardContext context) throws Exception; } diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index 6f8b5a45df0f..66487c54bf29 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -168,7 +168,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase { protected Nested createNested(IndexSearcher searcher, Query parentFilter, Query childFilter) throws IOException { BitsetFilterCache s = indexService.cache().bitsetFilterCache(); - return new Nested(s.getBitSetProducer(parentFilter), searcher.createNormalizedWeight(childFilter, false)); + return new Nested(s.getBitSetProducer(parentFilter), childFilter); } public void testEmpty() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/sort/SortParserTests.java b/core/src/test/java/org/elasticsearch/search/sort/SortParserTests.java index cbd7b5468b22..0c64b7e7b158 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/SortParserTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/SortParserTests.java @@ -50,7 +50,7 @@ public class SortParserTests extends ESSingleNodeTestCase { XContentParser parser = XContentHelper.createParser(sortBuilder.bytes()); parser.nextToken(); GeoDistanceSortParser geoParser = new GeoDistanceSortParser(); - geoParser.parse(parser, context); + geoParser.parse(parser, context.getQueryShardContext()); sortBuilder = jsonBuilder(); sortBuilder.startObject(); @@ -139,6 +139,6 @@ public class SortParserTests extends ESSingleNodeTestCase { XContentParser parser = XContentHelper.createParser(sortBuilder.bytes()); parser.nextToken(); GeoDistanceSortParser geoParser = new GeoDistanceSortParser(); - geoParser.parse(parser, context); + geoParser.parse(parser, context.getQueryShardContext()); } } From 71ac12f4a97b77f020ecd957bb580b23920215c4 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Thu, 3 Mar 2016 16:12:41 +0100 Subject: [PATCH 093/320] Prevent closing index during snapshot restore Closes #16933 --- .../metadata/MetaDataIndexStateService.java | 29 ++++++++- .../AbstractSnapshotIntegTestCase.java | 26 ++++++++ .../SharedClusterSnapshotRestoreIT.java | 60 +++++++++++++++++++ 3 files changed, 114 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java index df26df29800e..6639f9bdbd60 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java @@ -19,12 +19,14 @@ package org.elasticsearch.cluster.metadata; +import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.close.CloseIndexClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexClusterStateUpdateRequest; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.RestoreInProgress; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -37,11 +39,14 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashSet; import java.util.List; +import java.util.Set; /** * Service responsible for submitting open/close index requests @@ -78,7 +83,7 @@ public class MetaDataIndexStateService extends AbstractComponent { @Override public ClusterState execute(ClusterState currentState) { - List indicesToClose = new ArrayList<>(); + Set indicesToClose = new HashSet<>(); for (String index : request.indices()) { IndexMetaData indexMetaData = currentState.metaData().index(index); if (indexMetaData == null) { @@ -94,6 +99,28 @@ public class MetaDataIndexStateService extends AbstractComponent { return currentState; } + // Check if any of the indices to be closed are currently being restored from a snapshot and fail closing if such an index + // is found as closing an index that is being restored makes the index unusable (it cannot be recovered). + RestoreInProgress restore = currentState.custom(RestoreInProgress.TYPE); + if (restore != null) { + Set indicesToFail = null; + for (RestoreInProgress.Entry entry : restore.entries()) { + for (ObjectObjectCursor shard : entry.shards()) { + if (!shard.value.state().completed()) { + if (indicesToClose.contains(shard.key.getIndexName())) { + if (indicesToFail == null) { + indicesToFail = new HashSet<>(); + } + indicesToFail.add(shard.key.getIndexName()); + } + } + } + } + if (indicesToFail != null) { + throw new IllegalArgumentException("Cannot close indices that are being restored: " + indicesToFail); + } + } + logger.info("closing indices [{}]", indicesAsString); MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); diff --git a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 7e9bd14f9f3f..dc803a464124 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -137,6 +137,32 @@ public abstract class AbstractSnapshotIntegTestCase extends ESIntegTestCase { return null; } + public static void blockAllDataNodes(String repository) { + for(RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) { + ((MockRepository)repositoriesService.repository(repository)).blockOnDataFiles(true); + } + } + + public static void unblockAllDataNodes(String repository) { + for(RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) { + ((MockRepository)repositoriesService.repository(repository)).unblock(); + } + } + + public void waitForBlockOnAnyDataNode(String repository, TimeValue timeout) throws InterruptedException { + if (false == awaitBusy(() -> { + for(RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) { + MockRepository mockRepository = (MockRepository) repositoriesService.repository(repository); + if (mockRepository.blocked()) { + return true; + } + } + return false; + }, timeout.millis(), TimeUnit.MILLISECONDS)) { + fail("Timeout waiting for repository block on any data node!!!"); + } + } + public static void unblockNode(String node) { ((MockRepository)internalCluster().getInstance(RepositoriesService.class, node).repository("test-repo")).unblock(); } diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 65337d4b6329..9fb2b0f99897 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -1865,6 +1865,66 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas } } + public void testCloseIndexDuringRestore() throws Exception { + Client client = client(); + + logger.info("--> creating repository"); + assertAcked(client.admin().cluster().preparePutRepository("test-repo") + .setType("mock").setSettings(Settings.settingsBuilder() + .put("location", randomRepoPath()) + .put("compress", randomBoolean()) + .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES) + )); + + createIndex("test-idx-1", "test-idx-2"); + ensureGreen(); + + logger.info("--> indexing some data"); + for (int i = 0; i < 100; i++) { + index("test-idx-1", "doc", Integer.toString(i), "foo", "bar" + i); + index("test-idx-2", "doc", Integer.toString(i), "foo", "baz" + i); + } + refresh(); + assertThat(client.prepareSearch("test-idx-1").setSize(0).get().getHits().totalHits(), equalTo(100L)); + assertThat(client.prepareSearch("test-idx-2").setSize(0).get().getHits().totalHits(), equalTo(100L)); + + logger.info("--> snapshot"); + assertThat(client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap") + .setIndices("test-idx-*").setWaitForCompletion(true).get().getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS)); + + logger.info("--> deleting indices before restoring"); + assertAcked(client.admin().indices().prepareDelete("test-idx-*").get()); + + blockAllDataNodes("test-repo"); + logger.info("--> execution will be blocked on all data nodes"); + + logger.info("--> start restore"); + ListenableActionFuture restoreFut = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap") + .setWaitForCompletion(true) + .execute(); + + logger.info("--> waiting for block to kick in"); + waitForBlockOnAnyDataNode("test-repo", TimeValue.timeValueSeconds(60)); + + logger.info("--> close index while restore is running"); + try { + client.admin().indices().prepareClose("test-idx-1").get(); + fail("Expected closing index to fail during restore"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("Cannot close indices that are being restored: [test-idx-1]")); + } + + logger.info("--> unblocking all data nodes"); + unblockAllDataNodes("test-repo"); + + logger.info("--> wait for restore to finish"); + RestoreSnapshotResponse restoreSnapshotResponse = restoreFut.get(); + logger.info("--> check that all shards were recovered"); + assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); + assertThat(restoreSnapshotResponse.getRestoreInfo().successfulShards(), greaterThan(0)); + assertThat(restoreSnapshotResponse.getRestoreInfo().failedShards(), equalTo(0)); + } + public void testDeleteOrphanSnapshot() throws Exception { Client client = client(); From fa6a3398a8e671b37f6a47ab573dbff23bcbf461 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Fri, 4 Mar 2016 12:52:35 +0100 Subject: [PATCH 094/320] Remove NodeService injection to Discovery This was only used by the multicast plugin which is now removed. Closes #17000 --- .../org/elasticsearch/discovery/Discovery.java | 6 ------ .../discovery/local/LocalDiscovery.java | 7 ------- .../discovery/zen/DiscoveryNodesProvider.java | 4 ---- .../elasticsearch/discovery/zen/ZenDiscovery.java | 15 --------------- .../elasticsearch/node/service/NodeService.java | 1 - .../zen/ping/unicast/UnicastZenPingIT.java | 11 ----------- .../publish/PublishClusterStateActionTests.java | 6 ------ .../org/elasticsearch/test/NoopDiscovery.java | 7 ------- 8 files changed, 57 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/discovery/Discovery.java b/core/src/main/java/org/elasticsearch/discovery/Discovery.java index b96417381ffa..778e2d150536 100644 --- a/core/src/main/java/org/elasticsearch/discovery/Discovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/Discovery.java @@ -26,7 +26,6 @@ import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.node.service.NodeService; import java.io.IOException; @@ -41,11 +40,6 @@ public interface Discovery extends LifecycleComponent { String nodeDescription(); - /** - * Here as a hack to solve dep injection problem... - */ - void setNodeService(@Nullable NodeService nodeService); - /** * Another hack to solve dep injection problem..., note, this will be called before * any start is called. diff --git a/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java index 661de5260c1a..0462d6a8d8dd 100644 --- a/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java @@ -33,7 +33,6 @@ import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.internal.Nullable; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.ClusterSettings; @@ -45,7 +44,6 @@ import org.elasticsearch.discovery.BlockingClusterStatePublishResponseHandler; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.DiscoveryStats; -import org.elasticsearch.node.service.NodeService; import java.util.HashSet; import java.util.Queue; @@ -84,11 +82,6 @@ public class LocalDiscovery extends AbstractLifecycleComponent implem this.discoverySettings = new DiscoverySettings(settings, clusterSettings); } - @Override - public void setNodeService(@Nullable NodeService nodeService) { - // nothing to do here - } - @Override public void setRoutingService(RoutingService routingService) { this.routingService = routingService; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/DiscoveryNodesProvider.java b/core/src/main/java/org/elasticsearch/discovery/zen/DiscoveryNodesProvider.java index f845cbe1fed7..b9ce79013696 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/DiscoveryNodesProvider.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/DiscoveryNodesProvider.java @@ -20,8 +20,6 @@ package org.elasticsearch.discovery.zen; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.node.service.NodeService; /** * @@ -30,6 +28,4 @@ public interface DiscoveryNodesProvider { DiscoveryNodes nodes(); - @Nullable - NodeService nodeService(); } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index fb0f7a619668..c0dd78b4e5f3 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -60,7 +60,6 @@ import org.elasticsearch.discovery.zen.ping.ZenPing; import org.elasticsearch.discovery.zen.ping.ZenPingService; import org.elasticsearch.discovery.zen.publish.PendingClusterStateStats; import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction; -import org.elasticsearch.node.service.NodeService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; @@ -137,10 +136,6 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen /** counts the time this node has joined the cluster or have elected it self as master */ private final AtomicLong clusterJoinsCounter = new AtomicLong(); - @Nullable - private NodeService nodeService; - - // must initialized in doStart(), when we have the routingService set private volatile NodeJoinController nodeJoinController; @@ -192,11 +187,6 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen transportService.registerRequestHandler(DISCOVERY_REJOIN_ACTION_NAME, RejoinClusterRequest::new, ThreadPool.Names.SAME, new RejoinClusterRequestHandler()); } - @Override - public void setNodeService(@Nullable NodeService nodeService) { - this.nodeService = nodeService; - } - @Override public void setRoutingService(RoutingService routingService) { this.routingService = routingService; @@ -292,11 +282,6 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen return clusterService.state().nodes(); } - @Override - public NodeService nodeService() { - return this.nodeService; - } - @Override public boolean nodeHasJoinedClusterOnce() { return clusterJoinsCounter.get() > 0; diff --git a/core/src/main/java/org/elasticsearch/node/service/NodeService.java b/core/src/main/java/org/elasticsearch/node/service/NodeService.java index 7ae98a20dbb7..88b2fe488681 100644 --- a/core/src/main/java/org/elasticsearch/node/service/NodeService.java +++ b/core/src/main/java/org/elasticsearch/node/service/NodeService.java @@ -84,7 +84,6 @@ public class NodeService extends AbstractComponent implements Closeable { this.transportService = transportService; this.indicesService = indicesService; this.discovery = discovery; - discovery.setNodeService(this); this.version = version; this.pluginService = pluginService; this.circuitBreakerService = circuitBreakerService; diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java index b247dad069e9..88d375699a15 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java @@ -33,7 +33,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.ping.PingContextProvider; import org.elasticsearch.discovery.zen.ping.ZenPing; -import org.elasticsearch.node.service.NodeService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -82,11 +81,6 @@ public class UnicastZenPingIT extends ESTestCase { return DiscoveryNodes.builder().put(nodeA).localNodeId("UZP_A").build(); } - @Override - public NodeService nodeService() { - return null; - } - @Override public boolean nodeHasJoinedClusterOnce() { return false; @@ -101,11 +95,6 @@ public class UnicastZenPingIT extends ESTestCase { return DiscoveryNodes.builder().put(nodeB).localNodeId("UZP_B").build(); } - @Override - public NodeService nodeService() { - return null; - } - @Override public boolean nodeHasJoinedClusterOnce() { return true; diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java index 224ecbdf6194..7e31f6055de1 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java @@ -43,7 +43,6 @@ import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.DiscoveryNodesProvider; import org.elasticsearch.node.Node; -import org.elasticsearch.node.service.NodeService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; @@ -134,11 +133,6 @@ public class PublishClusterStateActionTests extends ESTestCase { return clusterState.nodes(); } - @Override - public NodeService nodeService() { - assert false; - throw new UnsupportedOperationException("Shouldn't be here"); - } } public MockNode createMockNode(final String name) throws Exception { diff --git a/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java b/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java index 427dce714e88..3193aaf458e7 100644 --- a/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java +++ b/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java @@ -21,13 +21,11 @@ package org.elasticsearch.test; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingService; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.DiscoveryStats; -import org.elasticsearch.node.service.NodeService; public class NoopDiscovery implements Discovery { @@ -42,11 +40,6 @@ public class NoopDiscovery implements Discovery { return null; } - @Override - public void setNodeService(@Nullable NodeService nodeService) { - - } - @Override public void setRoutingService(RoutingService routingService) { From be176a1fede0d28c0596d29f49dd4118ad073e82 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 8 Mar 2016 15:09:53 +0100 Subject: [PATCH 095/320] Add Json representation to raw group settings for better logging represetation. --- .../org/elasticsearch/common/settings/Setting.java | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 07519028106d..c31b905abbf1 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.settings; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; @@ -30,6 +31,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.MemorySizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; @@ -519,7 +521,16 @@ public class Setting extends ToXContentToBytes { @Override public String getRaw(Settings settings) { - throw new UnsupportedOperationException("group settings don't support raw values"); + Settings subSettings = get(settings); + try { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + subSettings.toXContent(builder, EMPTY_PARAMS); + builder.endObject(); + return builder.string(); + } catch (IOException e) { + throw new RuntimeException(e); + } } @Override From 95b0a6a2cf46de2f900cc08466d9697fbb2d4e5e Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 8 Mar 2016 08:34:46 -0500 Subject: [PATCH 096/320] Limit generic thread pool The generic thread pool was previously configured to be able to create an unlimited number of threads. The thinking is that tasks that are submitted to its work queue must execute and should not block waiting for a worker. However, in cases of heavy load, this can lead to an explosion in the number of threads; this can even lead to a feedback loop that exacerbates the problem. What is more, this can even bump into OS limits on the number of threads that can be created. This commit limits the number of threads in the generic thread pool to four times the bounded number of processors. Relates #17003 --- core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index c7f4392e56a3..8b6f11987050 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -222,7 +222,7 @@ public class ThreadPool extends AbstractComponent implements Closeable { int halfProcMaxAt5 = Math.min(((availableProcessors + 1) / 2), 5); int halfProcMaxAt10 = Math.min(((availableProcessors + 1) / 2), 10); Map defaultExecutorTypeSettings = new HashMap<>(); - add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.GENERIC).keepAlive("30s")); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.GENERIC).size(4 * availableProcessors).keepAlive("30s")); add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.INDEX).size(availableProcessors).queueSize(200)); add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.BULK).size(availableProcessors).queueSize(50)); add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.GET).size(availableProcessors).queueSize(1000)); From 930984eb4fce446c355ba35d17fdf08d054221a6 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 8 Mar 2016 08:46:11 -0500 Subject: [PATCH 097/320] Reduce maximum number of threads in boostrap check This commit reduces the maximum number of threads required in the bootstrap check. This limit can be reduced since the generic thread pool is no longer unbounded. Relates #17003 --- .../main/java/org/elasticsearch/bootstrap/BootstrapCheck.java | 2 +- .../java/org/elasticsearch/bootstrap/BootstrapCheckTests.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java index 6ac3c477fd74..433dd4498a4d 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java @@ -225,7 +225,7 @@ final class BootstrapCheck { static class MaxNumberOfThreadsCheck implements Check { - private final long maxNumberOfThreadsThreshold = 1 << 15; + private final long maxNumberOfThreadsThreshold = 1 << 11; @Override public boolean check() { diff --git a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java index 45986eab00ee..3c269c390045 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java @@ -131,7 +131,7 @@ public class BootstrapCheckTests extends ESTestCase { } public void testMaxNumberOfThreadsCheck() { - final int limit = 1 << 15; + final int limit = 1 << 11; final AtomicLong maxNumberOfThreads = new AtomicLong(randomIntBetween(1, limit - 1)); final BootstrapCheck.MaxNumberOfThreadsCheck check = new BootstrapCheck.MaxNumberOfThreadsCheck() { @Override From c0572c631db574cdcaa17413cb9376dacd1f8210 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 8 Mar 2016 08:51:21 -0500 Subject: [PATCH 098/320] Note to configuration docs on number of threads This commit adds a note to the configuration docs regarding the number of threads necessary for the Elasticsearch user. Relates #17003 --- docs/reference/setup/configuration.asciidoc | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/reference/setup/configuration.asciidoc b/docs/reference/setup/configuration.asciidoc index 03037207fb09..bef563cd9655 100644 --- a/docs/reference/setup/configuration.asciidoc +++ b/docs/reference/setup/configuration.asciidoc @@ -43,6 +43,13 @@ using the <> API, with: curl localhost:9200/_nodes/stats/process?pretty -------------------------------------------------- +[float] +[[max-number-of-threads]] +==== Number of threads + +Make sure that the number of threads that the Elasticsearch user can +create is at least 2048. + [float] [[vm-max-map-count]] ==== Virtual memory From 9a9eadd743b433a1cc203fd56c8872cc29c75a4d Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 8 Mar 2016 15:30:57 +0100 Subject: [PATCH 099/320] [TEST] Use actual target directory to fsync copied files in test Apparently lucene6 is way more picky with respect to corrupting files that are not fsynced that's why this test sometimes failed after the lucene6 upgrade. --- .../indices/recovery/RecoverySourceHandlerTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index b5f744ddc23e..b69d12185460 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -94,7 +94,7 @@ public class RecoverySourceHandlerTests extends ESTestCase { @Override public void close() throws IOException { super.close(); - store.directory().sync(Collections.singleton(md.name())); // sync otherwise MDW will mess with it + targetStore.directory().sync(Collections.singleton(md.name())); // sync otherwise MDW will mess with it } }; } catch (IOException e) { From 678bc927ce94574596c81d3c7f7eef78332bfce3 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Tue, 8 Mar 2016 09:12:04 -0700 Subject: [PATCH 100/320] Log when cancelling allocation of a replica because a new syncid was found Currently the message stays in the `UnassignedInfo` for the shard, however, it would be very useful to know the exact point (time-wise) that the cancellation happened when diagnosing an issue. Relates to debugging #16357 --- .../java/org/elasticsearch/gateway/ReplicaShardAllocator.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java index e2b6f0d27ed9..c94e1370c019 100644 --- a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java @@ -104,6 +104,8 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { && matchingNodes.isNodeMatchBySyncID(nodeWithHighestMatch) == true) { // we found a better match that has a full sync id match, the existing allocation is not fully synced // so we found a better one, cancel this one + logger.debug("cancelling allocation of replica on [{}], sync id match found on node [{}]", + currentNode, nodeWithHighestMatch); it.moveToUnassigned(new UnassignedInfo(UnassignedInfo.Reason.REALLOCATED_REPLICA, "existing allocation of replica to [" + currentNode + "] cancelled, sync id match found on node [" + nodeWithHighestMatch + "]", null, allocation.getCurrentNanoTime(), System.currentTimeMillis())); From 6d0efae7139401f497e6cd4ab329fba82dd922bf Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 2 Mar 2016 11:18:44 -0500 Subject: [PATCH 101/320] Teach list tasks api to wait for tasks to finish _wait_for_completion defaults to false. If set to true then the API will wait for all the tasks that it finds to stop running before returning. You can use the timeout parameter to prevent it from waiting forever. If you don't set a timeout parameter it'll default to 30 seconds. Also adds a log message to rest tests if any tasks overrun the test. This is just a log (instead of failing the test) because lots of tasks are run by the cluster on its own and they shouldn't cause the test to fail. Things like fetching disk usage from the other nodes, for example. Switches the request to getter/setter style methods as we're going that way in the Elasticsearch code base. Reindex is all getter/setter style. Closes #16906 --- .../node/tasks/cancel/CancelTasksRequest.java | 10 ++- .../cancel/TransportCancelTasksAction.java | 16 ++-- .../node/tasks/list/ListTasksRequest.java | 22 ++++- .../tasks/list/ListTasksRequestBuilder.java | 10 ++- .../tasks/list/TransportListTasksAction.java | 39 ++++++++- .../support/tasks/BaseTasksRequest.java | 28 +++--- .../support/tasks/TasksRequestBuilder.java | 6 +- .../support/tasks/TransportTasksAction.java | 18 ++-- .../node/tasks/RestCancelTasksAction.java | 8 +- .../node/tasks/RestListTasksAction.java | 12 +-- .../node/tasks/CancellableTasksTests.java | 14 +-- .../admin/cluster/node/tasks/TasksIT.java | 75 ++++++++++++++++ .../cluster/node/tasks/TestTaskPlugin.java | 5 +- .../node/tasks/TransportTasksActionTests.java | 32 +++---- .../rest-api-spec/test/reindex/10_basic.yaml | 9 +- .../test/update-by-query/10_basic.yaml | 6 ++ .../rest-api-spec/api/tasks.list.json | 4 + .../test/rest/ESRestTestCase.java | 85 ++++++++++++++----- .../rest/client/http/HttpRequestBuilder.java | 5 +- 19 files changed, 305 insertions(+), 99 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequest.java index 1d1249e15510..e92695d61e24 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequest.java @@ -53,12 +53,18 @@ public class CancelTasksRequest extends BaseTasksRequest { return super.match(task) && task instanceof CancellableTask; } - public CancelTasksRequest reason(String reason) { + /** + * Set the reason for canceling the task. + */ + public CancelTasksRequest setReason(String reason) { this.reason = reason; return this; } - public String reason() { + /** + * The reason for canceling the task. + */ + public String getReason() { return reason; } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java index b07e540d7926..874f230587d8 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java @@ -84,21 +84,21 @@ public class TransportCancelTasksAction extends TransportTasksAction operation) { - if (request.taskId().isSet() == false) { + if (request.getTaskId().isSet() == false) { // we are only checking one task, we can optimize it - CancellableTask task = taskManager.getCancellableTask(request.taskId().getId()); + CancellableTask task = taskManager.getCancellableTask(request.getTaskId().getId()); if (task != null) { if (request.match(task)) { operation.accept(task); } else { - throw new IllegalArgumentException("task [" + request.taskId() + "] doesn't support this operation"); + throw new IllegalArgumentException("task [" + request.getTaskId() + "] doesn't support this operation"); } } else { - if (taskManager.getTask(request.taskId().getId()) != null) { + if (taskManager.getTask(request.getTaskId().getId()) != null) { // The task exists, but doesn't support cancellation - throw new IllegalArgumentException("task [" + request.taskId() + "] doesn't support cancellation"); + throw new IllegalArgumentException("task [" + request.getTaskId() + "] doesn't support cancellation"); } else { - throw new ResourceNotFoundException("task [{}] doesn't support cancellation", request.taskId()); + throw new ResourceNotFoundException("task [{}] doesn't support cancellation", request.getTaskId()); } } } else { @@ -113,14 +113,14 @@ public class TransportCancelTasksAction extends TransportTasksAction removeBanOnNodes(cancellableTask, nodes)); - Set childNodes = taskManager.cancel(cancellableTask, request.reason(), banLock::onTaskFinished); + Set childNodes = taskManager.cancel(cancellableTask, request.getReason(), banLock::onTaskFinished); if (childNodes != null) { if (childNodes.isEmpty()) { logger.trace("cancelling task {} with no children", cancellableTask.getId()); return cancellableTask.taskInfo(clusterService.localNode(), false); } else { logger.trace("cancelling task {} with children on nodes [{}]", cancellableTask.getId(), childNodes); - setBanOnNodes(request.reason(), cancellableTask, childNodes, banLock); + setBanOnNodes(request.getReason(), cancellableTask, childNodes, banLock); return cancellableTask.taskInfo(clusterService.localNode(), false); } } else { diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequest.java index 6bf8ac3e1efb..3fe743fc36aa 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequest.java @@ -31,31 +31,49 @@ import java.io.IOException; public class ListTasksRequest extends BaseTasksRequest { private boolean detailed = false; + private boolean waitForCompletion = false; /** * Should the detailed task information be returned. */ - public boolean detailed() { + public boolean getDetailed() { return this.detailed; } /** * Should the detailed task information be returned. */ - public ListTasksRequest detailed(boolean detailed) { + public ListTasksRequest setDetailed(boolean detailed) { this.detailed = detailed; return this; } + /** + * Should this request wait for all found tasks to complete? + */ + public boolean getWaitForCompletion() { + return waitForCompletion; + } + + /** + * Should this request wait for all found tasks to complete? + */ + public ListTasksRequest setWaitForCompletion(boolean waitForCompletion) { + this.waitForCompletion = waitForCompletion; + return this; + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); detailed = in.readBoolean(); + waitForCompletion = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeBoolean(detailed); + out.writeBoolean(waitForCompletion); } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java index 2b462014f438..1385781125a3 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java @@ -35,7 +35,15 @@ public class ListTasksRequestBuilder extends TasksRequestBuilder { + private static final TimeValue WAIT_FOR_COMPLETION_POLL = timeValueMillis(100); + private static final TimeValue DEFAULT_WAIT_FOR_COMPLETION_TIMEOUT = timeValueSeconds(30); @Inject public TransportListTasksAction(Settings settings, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { @@ -59,7 +67,34 @@ public class TransportListTasksAction extends TransportTasksAction operation) { + if (false == request.getWaitForCompletion()) { + super.processTasks(request, operation); + return; + } + // If we should wait for completion then we have to intercept every found task and wait for it to leave the manager. + TimeValue timeout = request.getTimeout(); + if (timeout == null) { + timeout = DEFAULT_WAIT_FOR_COMPLETION_TIMEOUT; + } + long timeoutTime = System.nanoTime() + timeout.nanos(); + super.processTasks(request, operation.andThen((Task t) -> { + while (System.nanoTime() - timeoutTime < 0) { + if (taskManager.getTask(t.getId()) == null) { + return; + } + try { + Thread.sleep(WAIT_FOR_COMPLETION_POLL.millis()); + } catch (InterruptedException e) { + throw new ElasticsearchException("Interrupted waiting for completion of [{}]", e, t); + } + } + throw new ElasticsearchTimeoutException("Timed out waiting for completion of [{}]", t); + })); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java b/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java index f7da48a667bd..f10453872592 100644 --- a/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java @@ -71,7 +71,7 @@ public class BaseTasksRequest> extends * Sets the list of action masks for the actions that should be returned */ @SuppressWarnings("unchecked") - public final Request actions(String... actions) { + public final Request setActions(String... actions) { this.actions = actions; return (Request) this; } @@ -79,16 +79,16 @@ public class BaseTasksRequest> extends /** * Return the list of action masks for the actions that should be returned */ - public String[] actions() { + public String[] getActions() { return actions; } - public final String[] nodesIds() { + public final String[] getNodesIds() { return nodesIds; } @SuppressWarnings("unchecked") - public final Request nodesIds(String... nodesIds) { + public final Request setNodesIds(String... nodesIds) { this.nodesIds = nodesIds; return (Request) this; } @@ -98,12 +98,12 @@ public class BaseTasksRequest> extends * * By default tasks with any ids are returned. */ - public TaskId taskId() { + public TaskId getTaskId() { return taskId; } @SuppressWarnings("unchecked") - public final Request taskId(TaskId taskId) { + public final Request setTaskId(TaskId taskId) { this.taskId = taskId; return (Request) this; } @@ -112,29 +112,29 @@ public class BaseTasksRequest> extends /** * Returns the parent task id that tasks should be filtered by */ - public TaskId parentTaskId() { + public TaskId getParentTaskId() { return parentTaskId; } @SuppressWarnings("unchecked") - public Request parentTaskId(TaskId parentTaskId) { + public Request setParentTaskId(TaskId parentTaskId) { this.parentTaskId = parentTaskId; return (Request) this; } - public TimeValue timeout() { + public TimeValue getTimeout() { return this.timeout; } @SuppressWarnings("unchecked") - public final Request timeout(TimeValue timeout) { + public final Request setTimeout(TimeValue timeout) { this.timeout = timeout; return (Request) this; } @SuppressWarnings("unchecked") - public final Request timeout(String timeout) { + public final Request setTimeout(String timeout) { this.timeout = TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".timeout"); return (Request) this; } @@ -162,11 +162,11 @@ public class BaseTasksRequest> extends } public boolean match(Task task) { - if (actions() != null && actions().length > 0 && Regex.simpleMatch(actions(), task.getAction()) == false) { + if (getActions() != null && getActions().length > 0 && Regex.simpleMatch(getActions(), task.getAction()) == false) { return false; } - if (taskId().isSet() == false) { - if(taskId().getId() != task.getId()) { + if (getTaskId().isSet() == false) { + if(getTaskId().getId() != task.getId()) { return false; } } diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java index a7265ce9998e..a510a847c629 100644 --- a/core/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java @@ -35,19 +35,19 @@ public class TasksRequestBuilder , Res @SuppressWarnings("unchecked") public final RequestBuilder setNodesIds(String... nodesIds) { - request.nodesIds(nodesIds); + request.setNodesIds(nodesIds); return (RequestBuilder) this; } @SuppressWarnings("unchecked") public final RequestBuilder setActions(String... actions) { - request.actions(actions); + request.setActions(actions); return (RequestBuilder) this; } @SuppressWarnings("unchecked") public final RequestBuilder setTimeout(TimeValue timeout) { - request.timeout(timeout); + request.setTimeout(timeout); return (RequestBuilder) this; } } diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java index f10b9f233271..53c0d8519977 100644 --- a/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java @@ -124,25 +124,25 @@ public abstract class TransportTasksAction< } protected String[] resolveNodes(TasksRequest request, ClusterState clusterState) { - if (request.taskId().isSet()) { - return clusterState.nodes().resolveNodesIds(request.nodesIds()); + if (request.getTaskId().isSet()) { + return clusterState.nodes().resolveNodesIds(request.getNodesIds()); } else { - return new String[]{request.taskId().getNodeId()}; + return new String[]{request.getTaskId().getNodeId()}; } } protected void processTasks(TasksRequest request, Consumer operation) { - if (request.taskId().isSet() == false) { + if (request.getTaskId().isSet() == false) { // we are only checking one task, we can optimize it - Task task = taskManager.getTask(request.taskId().getId()); + Task task = taskManager.getTask(request.getTaskId().getId()); if (task != null) { if (request.match(task)) { operation.accept((OperationTask) task); } else { - throw new ResourceNotFoundException("task [{}] doesn't support this operation", request.taskId()); + throw new ResourceNotFoundException("task [{}] doesn't support this operation", request.getTaskId()); } } else { - throw new ResourceNotFoundException("task [{}] is missing", request.taskId()); + throw new ResourceNotFoundException("task [{}] is missing", request.getTaskId()); } } else { for (Task task : taskManager.getTasks().values()) { @@ -224,8 +224,8 @@ public abstract class TransportTasksAction< } } else { TransportRequestOptions.Builder builder = TransportRequestOptions.builder(); - if (request.timeout() != null) { - builder.withTimeout(request.timeout()); + if (request.getTimeout() != null) { + builder.withTimeout(request.getTimeout()); } builder.withCompress(transportCompress()); for (int i = 0; i < nodesIds.length; i++) { diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestCancelTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestCancelTasksAction.java index 99cdc16253a7..658090bb6dbd 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestCancelTasksAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestCancelTasksAction.java @@ -52,10 +52,10 @@ public class RestCancelTasksAction extends BaseRestHandler { TaskId parentTaskId = new TaskId(request.param("parent_task_id")); CancelTasksRequest cancelTasksRequest = new CancelTasksRequest(); - cancelTasksRequest.taskId(taskId); - cancelTasksRequest.nodesIds(nodesIds); - cancelTasksRequest.actions(actions); - cancelTasksRequest.parentTaskId(parentTaskId); + cancelTasksRequest.setTaskId(taskId); + cancelTasksRequest.setNodesIds(nodesIds); + cancelTasksRequest.setActions(actions); + cancelTasksRequest.setParentTaskId(parentTaskId); client.admin().cluster().cancelTasks(cancelTasksRequest, new RestToXContentListener<>(channel)); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java index 992267fa8a51..9a9d1991298d 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java @@ -50,13 +50,15 @@ public class RestListTasksAction extends BaseRestHandler { TaskId taskId = new TaskId(request.param("taskId")); String[] actions = Strings.splitStringByCommaToArray(request.param("actions")); TaskId parentTaskId = new TaskId(request.param("parent_task_id")); + boolean waitForCompletion = request.paramAsBoolean("wait_for_completion", false); ListTasksRequest listTasksRequest = new ListTasksRequest(); - listTasksRequest.taskId(taskId); - listTasksRequest.nodesIds(nodesIds); - listTasksRequest.detailed(detailed); - listTasksRequest.actions(actions); - listTasksRequest.parentTaskId(parentTaskId); + listTasksRequest.setTaskId(taskId); + listTasksRequest.setNodesIds(nodesIds); + listTasksRequest.setDetailed(detailed); + listTasksRequest.setActions(actions); + listTasksRequest.setParentTaskId(parentTaskId); + listTasksRequest.setWaitForCompletion(waitForCompletion); client.admin().cluster().listTasks(listTasksRequest, new RestToXContentListener<>(channel)); } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java index 5109ab979cf0..586f178d12d6 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java @@ -237,8 +237,8 @@ public class CancellableTasksTests extends TaskManagerTestCase { // Cancel main task CancelTasksRequest request = new CancelTasksRequest(); - request.reason("Testing Cancellation"); - request.taskId(new TaskId(testNodes[0].discoveryNode.getId(), mainTask.getId())); + request.setReason("Testing Cancellation"); + request.setTaskId(new TaskId(testNodes[0].discoveryNode.getId(), mainTask.getId())); // And send the cancellation request to a random node CancelTasksResponse response = testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction.execute(request) .get(); @@ -270,7 +270,7 @@ public class CancellableTasksTests extends TaskManagerTestCase { // Make sure that tasks are no longer running ListTasksResponse listTasksResponse = testNodes[randomIntBetween(0, testNodes.length - 1)] - .transportListTasksAction.execute(new ListTasksRequest().taskId( + .transportListTasksAction.execute(new ListTasksRequest().setTaskId( new TaskId(testNodes[0].discoveryNode.getId(), mainTask.getId()))).get(); assertEquals(0, listTasksResponse.getTasks().size()); @@ -313,7 +313,7 @@ public class CancellableTasksTests extends TaskManagerTestCase { // Make sure that tasks are running ListTasksResponse listTasksResponse = testNodes[randomIntBetween(0, testNodes.length - 1)] - .transportListTasksAction.execute(new ListTasksRequest().parentTaskId(new TaskId(mainNode, mainTask.getId()))).get(); + .transportListTasksAction.execute(new ListTasksRequest().setParentTaskId(new TaskId(mainNode, mainTask.getId()))).get(); assertThat(listTasksResponse.getTasks().size(), greaterThanOrEqualTo(blockOnNodes.size())); // Simulate the coordinating node leaving the cluster @@ -331,8 +331,8 @@ public class CancellableTasksTests extends TaskManagerTestCase { logger.info("--> Simulate issuing cancel request on the node that is about to leave the cluster"); // Simulate issuing cancel request on the node that is about to leave the cluster CancelTasksRequest request = new CancelTasksRequest(); - request.reason("Testing Cancellation"); - request.taskId(new TaskId(testNodes[0].discoveryNode.getId(), mainTask.getId())); + request.setReason("Testing Cancellation"); + request.setTaskId(new TaskId(testNodes[0].discoveryNode.getId(), mainTask.getId())); // And send the cancellation request to a random node CancelTasksResponse response = testNodes[0].transportCancelTasksAction.execute(request).get(); logger.info("--> Done simulating issuing cancel request on the node that is about to leave the cluster"); @@ -356,7 +356,7 @@ public class CancellableTasksTests extends TaskManagerTestCase { // Make sure that tasks are no longer running try { ListTasksResponse listTasksResponse1 = testNodes[randomIntBetween(1, testNodes.length - 1)] - .transportListTasksAction.execute(new ListTasksRequest().taskId(new TaskId(mainNode, mainTask.getId()))).get(); + .transportListTasksAction.execute(new ListTasksRequest().setTaskId(new TaskId(mainNode, mainTask.getId()))).get(); assertEquals(0, listTasksResponse1.getTasks().size()); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index eaa3caf90849..36f4ce9b30e8 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -18,6 +18,8 @@ */ package org.elasticsearch.action.admin.cluster.node.tasks; +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.ListenableActionFuture; import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; @@ -54,8 +56,10 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReentrantLock; import java.util.function.Function; +import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; import static org.hamcrest.Matchers.emptyCollectionOf; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; @@ -327,6 +331,77 @@ public class TasksIT extends ESIntegTestCase { assertEquals(0, client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").get().getTasks().size()); } + public void testTasksListWaitForCompletion() throws Exception { + // Start blocking test task + ListenableActionFuture future = TestTaskPlugin.TestTaskAction.INSTANCE.newRequestBuilder(client()) + .execute(); + + ListenableActionFuture waitResponseFuture; + try { + // Wait for the task to start on all nodes + assertBusy(() -> assertEquals(internalCluster().numDataAndMasterNodes(), + client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").get().getTasks().size())); + + // Spin up a request to wait for that task to finish + waitResponseFuture = client().admin().cluster().prepareListTasks() + .setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").setWaitForCompletion(true).execute(); + } finally { + // Unblock the request so the wait for completion request can finish + TestTaskPlugin.UnblockTestTasksAction.INSTANCE.newRequestBuilder(client()).get(); + } + + // Now that the task is unblocked the list response will come back + ListTasksResponse waitResponse = waitResponseFuture.get(); + // If any tasks come back then they are the tasks we asked for - it'd be super weird if this wasn't true + for (TaskInfo task: waitResponse.getTasks()) { + assertEquals(task.getAction(), TestTaskPlugin.TestTaskAction.NAME + "[n]"); + } + // See the next test to cover the timeout case + + future.get(); + } + + public void testTasksListWaitForTimeout() throws Exception { + // Start blocking test task + ListenableActionFuture future = TestTaskPlugin.TestTaskAction.INSTANCE.newRequestBuilder(client()) + .execute(); + try { + // Wait for the task to start on all nodes + assertBusy(() -> assertEquals(internalCluster().numDataAndMasterNodes(), + client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").get().getTasks().size())); + + // Spin up a request that should wait for those tasks to finish + // It will timeout because we haven't unblocked the tasks + ListTasksResponse waitResponse = client().admin().cluster().prepareListTasks() + .setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").setWaitForCompletion(true).setTimeout(timeValueMillis(100)) + .get(); + + assertFalse(waitResponse.getNodeFailures().isEmpty()); + for (FailedNodeException failure : waitResponse.getNodeFailures()) { + Throwable timeoutException = failure.getCause(); + // The exception sometimes comes back wrapped depending on the client + if (timeoutException.getCause() != null) { + timeoutException = timeoutException.getCause(); + } + assertThat(failure.getCause().getCause(), instanceOf(ElasticsearchTimeoutException.class)); + } + } finally { + // Now we can unblock those requests + TestTaskPlugin.UnblockTestTasksAction.INSTANCE.newRequestBuilder(client()).get(); + } + future.get(); + } + + public void testTasksListWaitForNoTask() throws Exception { + // Spin up a request to wait for no matching tasks + ListenableActionFuture waitResponseFuture = client().admin().cluster().prepareListTasks() + .setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").setWaitForCompletion(true).setTimeout(timeValueMillis(10)) + .execute(); + + // It should finish quickly and without complaint + assertThat(waitResponseFuture.get().getTasks(), emptyCollectionOf(TaskInfo.class)); + } + @Override public void tearDown() throws Exception { for (Map.Entry, RecordingTaskManagerListener> entry : listeners.entrySet()) { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index 0d4372a51eb3..e8dcd228e50d 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -345,7 +345,10 @@ public class TestTaskPlugin extends Plugin { public static class UnblockTestTasksRequest extends BaseTasksRequest { - + @Override + public boolean match(Task task) { + return task instanceof TestTask && super.match(task); + } } public static class UnblockTestTasksResponse extends BaseTasksResponse { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index e1501f9b14c9..556eee238fda 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -355,7 +355,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { int testNodeNum = randomIntBetween(0, testNodes.length - 1); TestNode testNode = testNodes[testNodeNum]; ListTasksRequest listTasksRequest = new ListTasksRequest(); - listTasksRequest.actions("testAction*"); // pick all test actions + listTasksRequest.setActions("testAction*"); // pick all test actions logger.info("Listing currently running tasks using node [{}]", testNodeNum); ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get(); logger.info("Checking currently running tasks"); @@ -371,7 +371,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Check task counts using transport with filtering testNode = testNodes[randomIntBetween(0, testNodes.length - 1)]; listTasksRequest = new ListTasksRequest(); - listTasksRequest.actions("testAction[n]"); // only pick node actions + listTasksRequest.setActions("testAction[n]"); // only pick node actions response = testNode.transportListTasksAction.execute(listTasksRequest).get(); assertEquals(testNodes.length, response.getPerNodeTasks().size()); for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) { @@ -380,7 +380,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { } // Check task counts using transport with detailed description - listTasksRequest.detailed(true); // same request only with detailed description + listTasksRequest.setDetailed(true); // same request only with detailed description response = testNode.transportListTasksAction.execute(listTasksRequest).get(); assertEquals(testNodes.length, response.getPerNodeTasks().size()); for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) { @@ -389,7 +389,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { } // Make sure that the main task on coordinating node is the task that was returned to us by execute() - listTasksRequest.actions("testAction"); // only pick the main task + listTasksRequest.setActions("testAction"); // only pick the main task response = testNode.transportListTasksAction.execute(listTasksRequest).get(); assertEquals(1, response.getTasks().size()); assertEquals(mainTask.getId(), response.getTasks().get(0).getId()); @@ -417,7 +417,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Get the parent task ListTasksRequest listTasksRequest = new ListTasksRequest(); - listTasksRequest.actions("testAction"); + listTasksRequest.setActions("testAction"); ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get(); assertEquals(1, response.getTasks().size()); String parentNode = response.getTasks().get(0).getNode().getId(); @@ -425,7 +425,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Find tasks with common parent listTasksRequest = new ListTasksRequest(); - listTasksRequest.parentTaskId(new TaskId(parentNode, parentTaskId)); + listTasksRequest.setParentTaskId(new TaskId(parentNode, parentTaskId)); response = testNode.transportListTasksAction.execute(listTasksRequest).get(); assertEquals(testNodes.length, response.getTasks().size()); for (TaskInfo task : response.getTasks()) { @@ -451,7 +451,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Get the parent task ListTasksRequest listTasksRequest = new ListTasksRequest(); - listTasksRequest.actions("testAction*"); + listTasksRequest.setActions("testAction*"); ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get(); assertEquals(0, response.getTasks().size()); @@ -472,7 +472,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Check task counts using transport with filtering TestNode testNode = testNodes[randomIntBetween(0, testNodes.length - 1)]; ListTasksRequest listTasksRequest = new ListTasksRequest(); - listTasksRequest.actions("testAction[n]"); // only pick node actions + listTasksRequest.setActions("testAction[n]"); // only pick node actions ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get(); assertEquals(testNodes.length, response.getPerNodeTasks().size()); for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) { @@ -482,7 +482,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Check task counts using transport with detailed description long minimalDurationNanos = System.nanoTime() - maximumStartTimeNanos; - listTasksRequest.detailed(true); // same request only with detailed description + listTasksRequest.setDetailed(true); // same request only with detailed description response = testNode.transportListTasksAction.execute(listTasksRequest).get(); assertEquals(testNodes.length, response.getPerNodeTasks().size()); for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) { @@ -518,9 +518,9 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Try to cancel main task using action name CancelTasksRequest request = new CancelTasksRequest(); - request.nodesIds(testNodes[0].discoveryNode.getId()); - request.reason("Testing Cancellation"); - request.actions(actionName); + request.setNodesIds(testNodes[0].discoveryNode.getId()); + request.setReason("Testing Cancellation"); + request.setActions(actionName); CancelTasksResponse response = testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction.execute(request) .get(); @@ -532,8 +532,8 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Try to cancel main task using id request = new CancelTasksRequest(); - request.reason("Testing Cancellation"); - request.taskId(new TaskId(testNodes[0].discoveryNode.getId(), task.getId())); + request.setReason("Testing Cancellation"); + request.setTaskId(new TaskId(testNodes[0].discoveryNode.getId(), task.getId())); response = testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction.execute(request).get(); // Shouldn't match any tasks since testAction doesn't support cancellation @@ -544,7 +544,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Make sure that task is still running ListTasksRequest listTasksRequest = new ListTasksRequest(); - listTasksRequest.actions(actionName); + listTasksRequest.setActions(actionName); ListTasksResponse listResponse = testNodes[randomIntBetween(0, testNodes.length - 1)].transportListTasksAction.execute (listTasksRequest).get(); assertEquals(1, listResponse.getPerNodeTasks().size()); @@ -617,7 +617,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Run task action on node tasks that are currently running // should be successful on all nodes except one TestTasksRequest testTasksRequest = new TestTasksRequest(); - testTasksRequest.actions("testAction[n]"); // pick all test actions + testTasksRequest.setActions("testAction[n]"); // pick all test actions TestTasksResponse response = tasksActions[0].execute(testTasksRequest).get(); // Get successful responses from all nodes except one assertEquals(testNodes.length - 1, response.tasks.size()); diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml index c23e5da95a1d..7f84c1aac8b8 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml @@ -58,9 +58,6 @@ --- "wait_for_completion=false": - - skip: - version: "0.0.0 - " - reason: breaks other tests by leaving a running reindex behind - do: index: index: source @@ -79,6 +76,7 @@ dest: index: dest - match: {task: '/.+:\d+/'} + - set: {task: task} - is_false: updated - is_false: version_conflicts - is_false: batches @@ -87,6 +85,11 @@ - is_false: took - is_false: created + - do: + tasks.list: + wait_for_completion: true + task_id: $task + --- "Response format for version conflict": - do: diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml index 383e945bbf21..94ffa2349a9e 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml @@ -37,6 +37,7 @@ wait_for_completion: false index: test - match: {task: '/.+:\d+/'} + - set: {task: task} - is_false: updated - is_false: version_conflicts - is_false: batches @@ -45,6 +46,11 @@ - is_false: took - is_false: created + - do: + tasks.list: + wait_for_completion: true + task_id: $task + --- "Response for version conflict": - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json index 7e8683b3475f..5cdeed1b1424 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json @@ -31,6 +31,10 @@ "parent_task": { "type" : "number", "description" : "Return tasks with specified parent task id. Set to -1 to return all." + }, + "wait_for_completion": { + "type": "boolean", + "description": "Wait for the matching tasks to complete (default: false)" } } }, diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 5684717342d1..fbc518b136d0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -19,14 +19,34 @@ package org.elasticsearch.test.rest; -import com.carrotsearch.randomizedtesting.RandomizedTest; +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.file.FileSystem; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + import org.apache.lucene.util.IOUtils; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.client.RestException; +import org.elasticsearch.test.rest.client.RestResponse; import org.elasticsearch.test.rest.parser.RestTestParseException; import org.elasticsearch.test.rest.parser.RestTestSuiteParser; import org.elasticsearch.test.rest.section.DoSection; @@ -42,24 +62,11 @@ import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; -import java.io.IOException; -import java.io.InputStream; -import java.net.InetSocketAddress; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.nio.file.FileSystem; -import java.nio.file.FileSystems; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardCopyOption; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; +import com.carrotsearch.randomizedtesting.RandomizedTest; + +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; +import static java.util.Collections.sort; /** * Runs the clients test suite against an elasticsearch cluster. @@ -261,7 +268,6 @@ public abstract class ESRestTestCase extends ESTestCase { @After public void wipeCluster() throws Exception { - // wipe indices Map deleteIndicesArgs = new HashMap<>(); deleteIndicesArgs.put("index", "*"); @@ -285,6 +291,30 @@ public abstract class ESRestTestCase extends ESTestCase { adminExecutionContext.callApi("snapshot.delete_repository", deleteSnapshotsArgs, Collections.emptyList(), Collections.emptyMap()); } + /** + * Logs a message if there are still running tasks. The reasoning is that any tasks still running are state the is trying to bleed into + * other tests. + */ + @After + public void logIfThereAreRunningTasks() throws InterruptedException, IOException, RestException { + RestResponse tasks = adminExecutionContext.callApi("tasks.list", emptyMap(), emptyList(), emptyMap()); + Set runningTasks = runningTasks(tasks); + // Ignore the task list API - it doens't count against us + runningTasks.remove(ListTasksAction.NAME); + runningTasks.remove(ListTasksAction.NAME + "[n]"); + if (runningTasks.isEmpty()) { + return; + } + List stillRunning = new ArrayList<>(runningTasks); + sort(stillRunning); + logger.info("There are still tasks running after this test that might break subsequent tests {}.", stillRunning); + /* + * This isn't a higher level log or outright failure because some of these tasks are run by the cluster in the background. If we + * could determine that some tasks are run by the user we'd fail the tests if those tasks were running and ignore any background + * tasks. + */ + } + @AfterClass public static void close() { if (restTestExecutionContext != null) { @@ -365,4 +395,19 @@ public abstract class ESRestTestCase extends ESTestCase { executableSection.execute(restTestExecutionContext); } } + + @SuppressWarnings("unchecked") + public Set runningTasks(RestResponse response) throws IOException { + Set runningTasks = new HashSet<>(); + Map nodes = (Map) response.evaluate("nodes"); + for (Map.Entry node : nodes.entrySet()) { + Map nodeInfo = (Map) node.getValue(); + Map nodeTasks = (Map) nodeInfo.get("tasks"); + for (Map.Entry taskAndName : nodeTasks.entrySet()) { + Map task = (Map) taskAndName.getValue(); + runningTasks.add(task.get("action").toString()); + } + } + return runningTasks; + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java index 6a484e9ae696..79f7502fb27a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java @@ -114,9 +114,10 @@ public class HttpRequestBuilder { for (String pathPart : path) { try { finalPath.append('/'); - URI uri = new URI(null, null, null, -1, pathPart, null, null); + // We append "/" to the path part to handle parts that start with - or other invalid characters + URI uri = new URI(null, null, null, -1, "/" + pathPart, null, null); //manually escape any slash that each part may contain - finalPath.append(uri.getRawPath().replaceAll("/", "%2F")); + finalPath.append(uri.getRawPath().substring(1).replaceAll("/", "%2F")); } catch(URISyntaxException e) { throw new RuntimeException("unable to build uri", e); } From 0745e19c29b3eb014376552401a44f43f92f5435 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 8 Mar 2016 11:10:40 -0500 Subject: [PATCH 102/320] Add uuid to Index's toString This is useful because uuid is starting to matter more and more in index operations. --- .../java/org/elasticsearch/index/Index.java | 10 ++++- .../org/elasticsearch/index/IndexTests.java | 44 +++++++++++++++++++ 2 files changed, 53 insertions(+), 1 deletion(-) create mode 100644 core/src/test/java/org/elasticsearch/index/IndexTests.java diff --git a/core/src/main/java/org/elasticsearch/index/Index.java b/core/src/main/java/org/elasticsearch/index/Index.java index 80bf3c31b442..983b977d6119 100644 --- a/core/src/main/java/org/elasticsearch/index/Index.java +++ b/core/src/main/java/org/elasticsearch/index/Index.java @@ -19,6 +19,7 @@ package org.elasticsearch.index; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -50,7 +51,14 @@ public class Index implements Writeable { @Override public String toString() { - return "[" + name + "]"; + /* + * If we have a uuid we put it in the toString so it'll show up in logs which is useful as more and more things use the uuid rather + * than the name as the lookup key for the index. + */ + if (ClusterState.UNKNOWN_UUID.equals(uuid)) { + return "[" + name + "]"; + } + return "[" + name + "/" + uuid + "]"; } @Override diff --git a/core/src/test/java/org/elasticsearch/index/IndexTests.java b/core/src/test/java/org/elasticsearch/index/IndexTests.java new file mode 100644 index 000000000000..6ce38c6acba8 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/IndexTests.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; + +import static org.apache.lucene.util.TestUtil.randomSimpleString; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.not; + +public class IndexTests extends ESTestCase { + public void testToString() { + assertEquals("[name/uuid]", new Index("name", "uuid").toString()); + assertEquals("[name]", new Index("name", ClusterState.UNKNOWN_UUID).toString()); + + Index random = new Index(randomSimpleString(random(), 1, 100), + usually() ? Strings.randomBase64UUID(random()) : ClusterState.UNKNOWN_UUID); + assertThat(random.toString(), containsString(random.getName())); + if (ClusterState.UNKNOWN_UUID.equals(random.getUUID())) { + assertThat(random.toString(), not(containsString(random.getUUID()))); + } else { + assertThat(random.toString(), containsString(random.getUUID())); + } + } +} From e32716b26f1ee7a9f359bde2eb7542650379552a Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 8 Mar 2016 12:09:05 -0500 Subject: [PATCH 103/320] [test] Fix uncommon tests failure in TasksIT --- .../action/admin/cluster/node/tasks/TasksIT.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index 36f4ce9b30e8..8c791a990182 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -42,6 +42,7 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.tasks.MockTaskManager; import org.elasticsearch.test.tasks.MockTaskManagerListener; import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.transport.ReceiveTimeoutTransportException; import java.io.IOException; import java.util.ArrayList; @@ -57,6 +58,7 @@ import java.util.concurrent.locks.ReentrantLock; import java.util.function.Function; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; +import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.emptyCollectionOf; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; @@ -383,7 +385,8 @@ public class TasksIT extends ESIntegTestCase { if (timeoutException.getCause() != null) { timeoutException = timeoutException.getCause(); } - assertThat(failure.getCause().getCause(), instanceOf(ElasticsearchTimeoutException.class)); + assertThat(timeoutException, + either(instanceOf(ElasticsearchTimeoutException.class)).or(instanceOf(ReceiveTimeoutTransportException.class))); } } finally { // Now we can unblock those requests From 496f50bfc305bb83b7e8d30212225a8026a9d0a9 Mon Sep 17 00:00:00 2001 From: Nicholas Knize Date: Tue, 8 Mar 2016 12:45:08 -0600 Subject: [PATCH 104/320] Deprecate lat_lon and precision_step With GeoPoinV2 lat_lon and precision_step parameters will be removed in 5.0. This PR adds deprecation logging for 2.x. --- .../index/mapper/geo/BaseGeoPointFieldMapper.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java index 1a1c1592d7e0..5e617dd6815d 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java @@ -28,6 +28,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -56,6 +58,7 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; */ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements ArrayValueMapperParser { public static final String CONTENT_TYPE = "geo_point"; + protected static final DeprecationLogger deprecationLogger = new DeprecationLogger(Loggers.getLogger(BaseGeoPointFieldMapper.class)); public static class Names { public static final String LAT = "lat"; public static final String LAT_SUFFIX = "." + LAT; @@ -194,9 +197,13 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr String propName = Strings.toUnderscoreCase(entry.getKey()); Object propNode = entry.getValue(); if (propName.equals("lat_lon")) { + deprecationLogger.deprecated(CONTENT_TYPE + " lat_lon parameter is deprecated and will be removed " + + "in the next major release"); builder.enableLatLon(XContentMapValues.lenientNodeBooleanValue(propNode)); iterator.remove(); } else if (propName.equals("precision_step")) { + deprecationLogger.deprecated(CONTENT_TYPE + " precision_step parameter is deprecated and will be removed " + + "in the next major release"); builder.precisionStep(XContentMapValues.nodeIntegerValue(propNode)); iterator.remove(); } else if (propName.equals("geohash")) { From e5c852f7678b6c568811f9bcbc397b864df1c15f Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 8 Mar 2016 13:39:37 -0800 Subject: [PATCH 105/320] Convert bootstrapcli parser to jopt-simple --- .../elasticsearch/bootstrap/Bootstrap.java | 18 +- .../bootstrap/BootstrapCLIParser.java | 199 +++++------------ .../bootstrap/Elasticsearch.java | 13 +- .../java/org/elasticsearch/cli/Command.java | 63 +++--- .../org/elasticsearch/cli/MultiCommand.java | 4 +- .../org/elasticsearch/cli/TestCommand.java | 41 ---- .../mapper/attachments/StandaloneRunner.java | 189 ---------------- .../bootstrap/BootstrapCliParserTests.java | 206 +++++------------- .../elasticsearch/cli/CommandTestCase.java | 16 +- .../org/elasticsearch/cli/MockTerminal.java | 4 +- 10 files changed, 162 insertions(+), 591 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/cli/TestCommand.java delete mode 100644 plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 215659054d2c..73654fdfee5a 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -26,7 +26,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.common.PidFile; import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.cli.CliTool; import org.elasticsearch.common.cli.Terminal; import org.elasticsearch.common.inject.CreationException; import org.elasticsearch.common.logging.ESLogger; @@ -218,17 +217,10 @@ final class Bootstrap { * This method is invoked by {@link Elasticsearch#main(String[])} * to startup elasticsearch. */ - static void init(String[] args) throws Throwable { + static void init() throws Throwable { // Set the system property before anything has a chance to trigger its use initLoggerPrefix(); - BootstrapCLIParser bootstrapCLIParser = new BootstrapCLIParser(); - CliTool.ExitStatus status = bootstrapCLIParser.execute(args); - - if (CliTool.ExitStatus.OK != status) { - exit(status.status()); - } - INSTANCE = new Bootstrap(); boolean foreground = !"false".equals(System.getProperty("es.foreground", System.getProperty("es-foreground"))); @@ -307,14 +299,6 @@ final class Bootstrap { System.err.close(); } - @SuppressForbidden(reason = "System#err") - private static void sysError(String line, boolean flush) { - System.err.println(line); - if (flush) { - System.err.flush(); - } - } - private static void checkForCustomConfFile() { String confFileSetting = System.getProperty("es.default.config"); checkUnsetAndMaybeExit(confFileSetting, "es.default.config"); diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java index ec11a773cccc..3567039cd425 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java @@ -19,165 +19,70 @@ package org.elasticsearch.bootstrap; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.Option; +import java.util.Arrays; + +import joptsimple.OptionSet; +import joptsimple.OptionSpec; import org.elasticsearch.Build; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.cli.CliTool; -import org.elasticsearch.common.cli.CliToolConfig; -import org.elasticsearch.common.cli.Terminal; +import org.elasticsearch.cli.Command; +import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.UserError; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.cli.Terminal; import org.elasticsearch.monitor.jvm.JvmInfo; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Locale; -import java.util.Map; -import java.util.Properties; +final class BootstrapCliParser extends Command { -import static org.elasticsearch.common.cli.CliToolConfig.Builder.cmd; -import static org.elasticsearch.common.cli.CliToolConfig.Builder.optionBuilder; + private final OptionSpec versionOption; + private final OptionSpec daemonizeOption; + private final OptionSpec pidfileOption; + private final OptionSpec propertyOption; + private boolean shouldRun = false; -final class BootstrapCLIParser extends CliTool { - - private static final CliToolConfig CONFIG = CliToolConfig.config("elasticsearch", BootstrapCLIParser.class) - .cmds(Start.CMD, Version.CMD) - .build(); - - public BootstrapCLIParser() { - super(CONFIG); - } - - public BootstrapCLIParser(Terminal terminal) { - super(CONFIG, terminal); + BootstrapCliParser() { + super("Starts elasticsearch"); + // TODO: in jopt-simple 5.0, make this mutually exclusive with all other options + versionOption = parser.acceptsAll(Arrays.asList("V", "version"), + "Prints elasticsearch version information and exits"); + daemonizeOption = parser.acceptsAll(Arrays.asList("d", "daemonize"), + "Starts Elasticsearch in the background"); + // TODO: in jopt-simple 5.0 this option type can be a Path + pidfileOption = parser.acceptsAll(Arrays.asList("p", "pidfile"), + "Creates a pid file in the specified path on start") + .withRequiredArg(); + propertyOption = parser.accepts("E", "Configures an Elasticsearch setting") + .withRequiredArg(); } @Override - protected Command parse(String cmdName, CommandLine cli) throws Exception { - switch (cmdName.toLowerCase(Locale.ROOT)) { - case Start.NAME: - return Start.parse(terminal, cli); - case Version.NAME: - return Version.parse(terminal, cli); - default: - assert false : "should never get here, if the user enters an unknown command, an error message should be shown before parse is called"; - return null; - } - } - - static class Version extends CliTool.Command { - - private static final String NAME = "version"; - - private static final CliToolConfig.Cmd CMD = cmd(NAME, Version.class).build(); - - public static Command parse(Terminal terminal, CommandLine cli) { - return new Version(terminal); - } - - public Version(Terminal terminal) { - super(terminal); - } - - @Override - public ExitStatus execute(Settings settings, Environment env) throws Exception { + protected void execute(Terminal terminal, OptionSet options) throws Exception { + if (options.has(versionOption)) { terminal.println("Version: " + org.elasticsearch.Version.CURRENT - + ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date() - + ", JVM: " + JvmInfo.jvmInfo().version()); - return ExitStatus.OK_AND_EXIT; + + ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date() + + ", JVM: " + JvmInfo.jvmInfo().version()); + return; } + + // TODO: don't use sysprops for any of these! pass the args through to bootstrap... + if (options.has(daemonizeOption)) { + System.setProperty("es.foreground", "false"); + } + String pidFile = pidfileOption.value(options); + if (Strings.isNullOrEmpty(pidFile) == false) { + System.setProperty("es.pidfile", pidFile); + } + + for (String property : propertyOption.values(options)) { + String[] keyValue = property.split("=", 2); + if (keyValue.length != 2) { + throw new UserError(ExitCodes.USAGE, "Malformed elasticsearch setting, must be of the form key=value"); + } + System.setProperty("es." + keyValue[0], keyValue[1]); + } + shouldRun = true; } - static class Start extends CliTool.Command { - - private static final String NAME = "start"; - - private static final CliToolConfig.Cmd CMD = cmd(NAME, Start.class) - .options( - optionBuilder("d", "daemonize").hasArg(false).required(false), - optionBuilder("p", "pidfile").hasArg(true).required(false), - optionBuilder("V", "version").hasArg(false).required(false), - Option.builder("D").argName("property=value").valueSeparator('=').numberOfArgs(2) - ) - .stopAtNonOption(true) // needed to parse the --foo.bar options, so this parser must be lenient - .build(); - - // TODO: don't use system properties as a way to do this, its horrible... - @SuppressForbidden(reason = "Sets system properties passed as CLI parameters") - public static Command parse(Terminal terminal, CommandLine cli) throws UserError { - if (cli.hasOption("V")) { - return Version.parse(terminal, cli); - } - - if (cli.hasOption("d")) { - System.setProperty("es.foreground", "false"); - } - - String pidFile = cli.getOptionValue("pidfile"); - if (!Strings.isNullOrEmpty(pidFile)) { - System.setProperty("es.pidfile", pidFile); - } - - if (cli.hasOption("D")) { - Properties properties = cli.getOptionProperties("D"); - for (Map.Entry entry : properties.entrySet()) { - String key = (String) entry.getKey(); - String propertyName = key.startsWith("es.") ? key : "es." + key; - System.setProperty(propertyName, entry.getValue().toString()); - } - } - - // hacky way to extract all the fancy extra args, there is no CLI tool helper for this - Iterator iterator = cli.getArgList().iterator(); - final Map properties = new HashMap<>(); - while (iterator.hasNext()) { - String arg = iterator.next(); - if (!arg.startsWith("--")) { - if (arg.startsWith("-D") || arg.startsWith("-d") || arg.startsWith("-p")) { - throw new UserError(ExitStatus.USAGE.status(), - "Parameter [" + arg + "] starting with \"-D\", \"-d\" or \"-p\" must be before any parameters starting with --" - ); - } else { - throw new UserError(ExitStatus.USAGE.status(), "Parameter [" + arg + "]does not start with --"); - } - } - // if there is no = sign, we have to get the next argu - arg = arg.replace("--", ""); - if (arg.contains("=")) { - String[] splitArg = arg.split("=", 2); - String key = splitArg[0]; - String value = splitArg[1]; - properties.put("es." + key, value); - } else { - if (iterator.hasNext()) { - String value = iterator.next(); - if (value.startsWith("--")) { - throw new UserError(ExitStatus.USAGE.status(), "Parameter [" + arg + "] needs value"); - } - properties.put("es." + arg, value); - } else { - throw new UserError(ExitStatus.USAGE.status(), "Parameter [" + arg + "] needs value"); - } - } - } - for (Map.Entry entry : properties.entrySet()) { - System.setProperty(entry.getKey(), entry.getValue()); - } - return new Start(terminal); - } - - public Start(Terminal terminal) { - super(terminal); - - } - - @Override - public ExitStatus execute(Settings settings, Environment env) throws Exception { - return ExitStatus.OK; - } + boolean shouldRun() { + return shouldRun; } - } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 107a955696c4..214efe483cac 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -21,6 +21,8 @@ package org.elasticsearch.bootstrap; import java.io.IOException; +import org.elasticsearch.common.cli.Terminal; + /** * This class starts elasticsearch. */ @@ -32,9 +34,16 @@ public final class Elasticsearch { /** * Main entry point for starting elasticsearch */ - public static void main(String[] args) throws StartupError { + public static void main(String[] args) throws Exception { + BootstrapCliParser parser = new BootstrapCliParser(); + parser.main(args, Terminal.DEFAULT); + + if (parser.shouldRun() == false) { + return; + } + try { - Bootstrap.init(args); + Bootstrap.init(); } catch (Throwable t) { // format exceptions to the console in a special way // to avoid 2MB stacktraces from guice, etc. diff --git a/core/src/main/java/org/elasticsearch/cli/Command.java b/core/src/main/java/org/elasticsearch/cli/Command.java index 6e57905b5b27..f608d0cefb13 100644 --- a/core/src/main/java/org/elasticsearch/cli/Command.java +++ b/core/src/main/java/org/elasticsearch/cli/Command.java @@ -21,6 +21,7 @@ package org.elasticsearch.cli; import java.io.IOException; import java.util.Arrays; +import java.util.List; import joptsimple.OptionException; import joptsimple.OptionParser; @@ -49,38 +50,9 @@ public abstract class Command { } /** Parses options for this command from args and executes it. */ - protected final int main(String[] args, Terminal terminal) throws Exception { - - final OptionSet options; + public final int main(String[] args, Terminal terminal) throws Exception { try { - options = parser.parse(args); - } catch (OptionException e) { - printHelp(terminal); - terminal.println(Terminal.Verbosity.SILENT, "ERROR: " + e.getMessage()); - return ExitCodes.USAGE; - } - - if (options.has(helpOption)) { - printHelp(terminal); - return ExitCodes.OK; - } - - if (options.has(silentOption)) { - if (options.has(verboseOption)) { - // mutually exclusive, we can remove this with jopt-simple 5.0, which natively supports it - printHelp(terminal); - terminal.println(Terminal.Verbosity.SILENT, "ERROR: Cannot specify -s and -v together"); - return ExitCodes.USAGE; - } - terminal.setVerbosity(Terminal.Verbosity.SILENT); - } else if (options.has(verboseOption)) { - terminal.setVerbosity(Terminal.Verbosity.VERBOSE); - } else { - terminal.setVerbosity(Terminal.Verbosity.NORMAL); - } - - try { - return execute(terminal, options); + mainWithoutErrorHandling(args, terminal); } catch (OptionException e) { printHelp(terminal); terminal.println(Terminal.Verbosity.SILENT, "ERROR: " + e.getMessage()); @@ -89,6 +61,33 @@ public abstract class Command { terminal.println(Terminal.Verbosity.SILENT, "ERROR: " + e.getMessage()); return e.exitCode; } + return ExitCodes.OK; + } + + /** + * Executes the command, but all errors are thrown. + */ + void mainWithoutErrorHandling(String[] args, Terminal terminal) throws Exception { + final OptionSet options = parser.parse(args); + + if (options.has(helpOption)) { + printHelp(terminal); + return; + } + + if (options.has(silentOption)) { + if (options.has(verboseOption)) { + // mutually exclusive, we can remove this with jopt-simple 5.0, which natively supports it + throw new UserError(ExitCodes.USAGE, "Cannot specify -s and -v together"); + } + terminal.setVerbosity(Terminal.Verbosity.SILENT); + } else if (options.has(verboseOption)) { + terminal.setVerbosity(Terminal.Verbosity.VERBOSE); + } else { + terminal.setVerbosity(Terminal.Verbosity.NORMAL); + } + + execute(terminal, options); } /** Prints a help message for the command to the terminal. */ @@ -111,5 +110,5 @@ public abstract class Command { * Executes this command. * * Any runtime user errors (like an input file that does not exist), should throw a {@link UserError}. */ - protected abstract int execute(Terminal terminal, OptionSet options) throws Exception; + protected abstract void execute(Terminal terminal, OptionSet options) throws Exception; } diff --git a/core/src/main/java/org/elasticsearch/cli/MultiCommand.java b/core/src/main/java/org/elasticsearch/cli/MultiCommand.java index 94c403d57d0a..5862b6f23111 100644 --- a/core/src/main/java/org/elasticsearch/cli/MultiCommand.java +++ b/core/src/main/java/org/elasticsearch/cli/MultiCommand.java @@ -56,7 +56,7 @@ public class MultiCommand extends Command { } @Override - protected int execute(Terminal terminal, OptionSet options) throws Exception { + protected void execute(Terminal terminal, OptionSet options) throws Exception { if (subcommands.isEmpty()) { throw new IllegalStateException("No subcommands configured"); } @@ -68,6 +68,6 @@ public class MultiCommand extends Command { if (subcommand == null) { throw new UserError(ExitCodes.USAGE, "Unknown command [" + args[0] + "]"); } - return subcommand.main(Arrays.copyOfRange(args, 1, args.length), terminal); + subcommand.mainWithoutErrorHandling(Arrays.copyOfRange(args, 1, args.length), terminal); } } diff --git a/core/src/main/java/org/elasticsearch/cli/TestCommand.java b/core/src/main/java/org/elasticsearch/cli/TestCommand.java deleted file mode 100644 index fe3fa5c6b8cf..000000000000 --- a/core/src/main/java/org/elasticsearch/cli/TestCommand.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cli; - -import joptsimple.OptionSet; -import org.elasticsearch.common.cli.Terminal; - -public class TestCommand extends Command { - - public static void main(String[] args) throws Exception { - exit(new TestCommand().main(args, Terminal.DEFAULT)); - } - - public TestCommand() { - super("some test cli"); - parser.accepts("foo", "some option"); - } - - @Override - protected int execute(Terminal terminal, OptionSet options) throws Exception { - terminal.println("running"); - return ExitCodes.OK; - } -} diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java deleted file mode 100644 index 03c6e65047a9..000000000000 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java +++ /dev/null @@ -1,189 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.mapper.attachments; - -import org.apache.commons.cli.CommandLine; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.cli.CliTool; -import org.elasticsearch.common.cli.CliToolConfig; -import org.elasticsearch.common.cli.Terminal; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.env.Environment; -import org.elasticsearch.index.MapperTestUtils; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.ParseContext; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Locale; - -import static org.elasticsearch.common.cli.CliToolConfig.Builder.cmd; -import static org.elasticsearch.common.cli.CliToolConfig.Builder.option; -import static org.elasticsearch.common.io.Streams.copy; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.mapper.attachments.AttachmentUnitTestCase.getIndicesModuleWithRegisteredAttachmentMapper; -import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; - -/** - * This class provides a simple main class which can be used to test what is extracted from a given binary file. - * You can run it using - * -u file://URL/TO/YOUR/DOC - * --size set extracted size (default to mapper attachment size) - * BASE64 encoded binary - * - * Example: - * StandaloneRunner BASE64Text - * StandaloneRunner -u /tmp/mydoc.pdf - * StandaloneRunner -u /tmp/mydoc.pdf --size 1000000 - */ -@SuppressForbidden(reason = "commandline tool") -public class StandaloneRunner extends CliTool { - - private static final CliToolConfig CONFIG = CliToolConfig.config("tika", StandaloneRunner.class) - .cmds(TikaRunner.CMD) - .build(); - - static { - System.setProperty("es.path.home", "/tmp"); - } - - static class TikaRunner extends Command { - private static final String NAME = "tika"; - private final String url; - private final Integer size; - private final String base64text; - private final DocumentMapper docMapper; - - private static final CliToolConfig.Cmd CMD = cmd(NAME, TikaRunner.class) - .options(option("u", "url").required(false).hasArg(false)) - .options(option("t", "size").required(false).hasArg(false)) - .build(); - - protected TikaRunner(Terminal terminal, String url, Integer size, String base64text) throws IOException { - super(terminal); - this.size = size; - this.url = url; - this.base64text = base64text; - DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(PathUtils.get("."), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); // use CWD b/c it won't be used - - String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/standalone/standalone-mapping.json"); - docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); - } - - @Override - public ExitStatus execute(Settings settings, Environment env) throws Exception { - XContentBuilder builder = jsonBuilder().startObject().field("file").startObject(); - - if (base64text != null) { - // If base64 is provided - builder.field("_content", base64text); - } else { - // A file is provided - byte[] bytes = copyToBytes(PathUtils.get(url)); - builder.field("_content", bytes); - } - - if (size >= 0) { - builder.field("_indexed_chars", size); - } - - BytesReference json = builder.endObject().endObject().bytes(); - - ParseContext.Document doc = docMapper.parse("person", "person", "1", json).rootDoc(); - - terminal.println("## Extracted text"); - terminal.println("--------------------- BEGIN -----------------------"); - terminal.println(doc.get("file.content")); - terminal.println("---------------------- END ------------------------"); - terminal.println("## Metadata"); - printMetadataContent(doc, AttachmentMapper.FieldNames.AUTHOR); - printMetadataContent(doc, AttachmentMapper.FieldNames.CONTENT_LENGTH); - printMetadataContent(doc, AttachmentMapper.FieldNames.CONTENT_TYPE); - printMetadataContent(doc, AttachmentMapper.FieldNames.DATE); - printMetadataContent(doc, AttachmentMapper.FieldNames.KEYWORDS); - printMetadataContent(doc, AttachmentMapper.FieldNames.LANGUAGE); - printMetadataContent(doc, AttachmentMapper.FieldNames.NAME); - printMetadataContent(doc, AttachmentMapper.FieldNames.TITLE); - - return ExitStatus.OK; - } - - private void printMetadataContent(ParseContext.Document doc, String field) { - terminal.println("- " + field + ":" + doc.get(docMapper.mappers().getMapper("file." + field).fieldType().name())); - } - - public static byte[] copyToBytes(Path path) throws IOException { - try (InputStream is = Files.newInputStream(path); - BytesStreamOutput out = new BytesStreamOutput()) { - copy(is, out); - return out.bytes().toBytes(); - } - } - - public static Command parse(Terminal terminal, CommandLine cli) throws IOException { - String url = cli.getOptionValue("u"); - String base64text = null; - String sSize = cli.getOptionValue("size"); - Integer size = sSize != null ? Integer.parseInt(sSize) : -1; - if (url == null && cli.getArgs().length == 0) { - return exitCmd(ExitStatus.USAGE, terminal, "url or BASE64 content should be provided (type -h for help)"); - } - if (url == null) { - if (cli.getArgs().length == 0) { - return exitCmd(ExitStatus.USAGE, terminal, "url or BASE64 content should be provided (type -h for help)"); - } - base64text = cli.getArgs()[0]; - } else { - if (cli.getArgs().length == 1) { - return exitCmd(ExitStatus.USAGE, terminal, "url or BASE64 content should be provided. Not both. (type -h for help)"); - } - } - return new TikaRunner(terminal, url, size, base64text); - } - } - - public StandaloneRunner() { - super(CONFIG); - } - - - public static void main(String[] args) throws Exception { - StandaloneRunner pluginManager = new StandaloneRunner(); - pluginManager.execute(args); - } - - @Override - protected Command parse(String cmdName, CommandLine cli) throws Exception { - switch (cmdName.toLowerCase(Locale.ROOT)) { - case TikaRunner.NAME: return TikaRunner.parse(terminal, cli); - default: - assert false : "can't get here as cmd name is validated before this method is called"; - return exitCmd(ExitStatus.CODE_ERROR); - } - } -} diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java index 18d6e0ac3c9a..726d17b09384 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java @@ -19,11 +19,14 @@ package org.elasticsearch.bootstrap; +import joptsimple.OptionException; import org.elasticsearch.Build; import org.elasticsearch.Version; +import org.elasticsearch.cli.Command; +import org.elasticsearch.cli.CommandTestCase; +import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.cli.CliTool.ExitStatus; -import org.elasticsearch.common.cli.CliToolTestCase; import org.elasticsearch.cli.UserError; import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.common.collect.Tuple; @@ -46,9 +49,13 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @SuppressForbidden(reason = "modifies system properties intentionally") -public class BootstrapCliParserTests extends CliToolTestCase { +public class BootstrapCliParserTests extends CommandTestCase { + + @Override + protected Command newCommand() { + return new BootstrapCliParser(); + } - private MockTerminal terminal = new MockTerminal(); private List propertiesToClear = new ArrayList<>(); private Map properties; @@ -66,195 +73,86 @@ public class BootstrapCliParserTests extends CliToolTestCase { assertEquals("properties leaked", properties, new HashMap<>(System.getProperties())); } - public void testThatVersionIsReturned() throws Exception { - BootstrapCLIParser parser = new BootstrapCLIParser(terminal); - ExitStatus status = parser.execute(args("version")); - assertStatus(status, OK_AND_EXIT); - - String output = terminal.getOutput(); - assertTrue(output, output.contains(Version.CURRENT.toString())); - assertTrue(output, output.contains(Build.CURRENT.shortHash())); - assertTrue(output, output.contains(Build.CURRENT.date())); - assertTrue(output, output.contains(JvmInfo.jvmInfo().version())); + void assertShouldRun(boolean shouldRun) { + BootstrapCliParser parser = (BootstrapCliParser)command; + assertEquals(shouldRun, parser.shouldRun()); } - public void testThatVersionIsReturnedAsStartParameter() throws Exception { - BootstrapCLIParser parser = new BootstrapCLIParser(terminal); - ExitStatus status = parser.execute(args("start -V")); - assertStatus(status, OK_AND_EXIT); - - String output = terminal.getOutput(); + public void testVersion() throws Exception { + String output = execute("-V"); assertTrue(output, output.contains(Version.CURRENT.toString())); assertTrue(output, output.contains(Build.CURRENT.shortHash())); assertTrue(output, output.contains(Build.CURRENT.date())); assertTrue(output, output.contains(JvmInfo.jvmInfo().version())); + assertShouldRun(false); terminal.reset(); - parser = new BootstrapCLIParser(terminal); - status = parser.execute(args("start --version")); - assertStatus(status, OK_AND_EXIT); - - output = terminal.getOutput(); + output = execute("--version"); assertTrue(output, output.contains(Version.CURRENT.toString())); assertTrue(output, output.contains(Build.CURRENT.shortHash())); assertTrue(output, output.contains(Build.CURRENT.date())); assertTrue(output, output.contains(JvmInfo.jvmInfo().version())); + assertShouldRun(false); } - public void testThatPidFileCanBeConfigured() throws Exception { - BootstrapCLIParser parser = new BootstrapCLIParser(terminal); + public void testPidfile() throws Exception { registerProperties("es.pidfile"); - ExitStatus status = parser.execute(args("start --pidfile")); // missing pid file - assertStatus(status, USAGE); + // missing argument + OptionException e = expectThrows(OptionException.class, () -> { + execute("-p"); + }); + assertEquals("Option p/pidfile requires an argument", e.getMessage()); + assertShouldRun(false); // good cases - status = parser.execute(args("start --pidfile /tmp/pid")); - assertStatus(status, OK); + terminal.reset(); + execute("--pidfile", "/tmp/pid"); assertSystemProperty("es.pidfile", "/tmp/pid"); + assertShouldRun(true); System.clearProperty("es.pidfile"); - status = parser.execute(args("start -p /tmp/pid")); - assertStatus(status, OK); + terminal.reset(); + execute("-p", "/tmp/pid"); assertSystemProperty("es.pidfile", "/tmp/pid"); + assertShouldRun(true); } - public void testThatParsingDaemonizeWorks() throws Exception { - BootstrapCLIParser parser = new BootstrapCLIParser(terminal); + public void testNoDaemonize() throws Exception { registerProperties("es.foreground"); - ExitStatus status = parser.execute(args("start -d")); - assertStatus(status, OK); - assertThat(System.getProperty("es.foreground"), is("false")); + execute(); + assertSystemProperty("es.foreground", null); + assertShouldRun(true); } - public void testThatNotDaemonizingDoesNotConfigureProperties() throws Exception { - BootstrapCLIParser parser = new BootstrapCLIParser(terminal); + public void testDaemonize() throws Exception { registerProperties("es.foreground"); - ExitStatus status = parser.execute(args("start")); - assertStatus(status, OK); - assertThat(System.getProperty("es.foreground"), is(nullValue())); + execute("-d"); + assertSystemProperty("es.foreground", "false"); + assertShouldRun(true); + + System.clearProperty("es.foreground"); + execute("--daemonize"); + assertSystemProperty("es.foreground", "false"); + assertShouldRun(true); } - public void testThatJavaPropertyStyleArgumentsCanBeParsed() throws Exception { - BootstrapCLIParser parser = new BootstrapCLIParser(terminal); + public void testConfig() throws Exception { registerProperties("es.foo", "es.spam"); - ExitStatus status = parser.execute(args("start -Dfoo=bar -Dspam=eggs")); - assertStatus(status, OK); + execute("-Efoo=bar", "-Espam=eggs"); assertSystemProperty("es.foo", "bar"); assertSystemProperty("es.spam", "eggs"); + assertShouldRun(true); } - public void testThatJavaPropertyStyleArgumentsWithEsPrefixAreNotPrefixedTwice() throws Exception { - BootstrapCLIParser parser = new BootstrapCLIParser(terminal); - registerProperties("es.spam", "es.pidfile"); - - ExitStatus status = parser.execute(args("start -Des.pidfile=/path/to/foo/elasticsearch/distribution/zip/target/integ-tests/es.pid -Dspam=eggs")); - assertStatus(status, OK); - assertThat(System.getProperty("es.es.pidfile"), is(nullValue())); - assertSystemProperty("es.pidfile", "/path/to/foo/elasticsearch/distribution/zip/target/integ-tests/es.pid"); - assertSystemProperty("es.spam", "eggs"); - } - - public void testThatUnknownLongOptionsCanBeParsed() throws Exception { - BootstrapCLIParser parser = new BootstrapCLIParser(terminal); - registerProperties("es.network.host", "es.my.option"); - - ExitStatus status = parser.execute(args("start --network.host 127.0.0.1 --my.option=true")); - assertStatus(status, OK); - assertSystemProperty("es.network.host", "127.0.0.1"); - assertSystemProperty("es.my.option", "true"); - } - - public void testThatUnknownLongOptionsNeedAValue() throws Exception { - BootstrapCLIParser parser = new BootstrapCLIParser(terminal); - registerProperties("es.network.host"); - - ExitStatus status = parser.execute(args("start --network.host")); - assertStatus(status, USAGE); - String output = terminal.getOutput(); - assertTrue(output, output.contains("Parameter [network.host] needs value")); - - terminal.reset(); - status = parser.execute(args("start --network.host --foo")); - assertStatus(status, USAGE); - output = terminal.getOutput(); - assertTrue(output, output.contains("Parameter [network.host] needs value")); - } - - public void testParsingErrors() throws Exception { - BootstrapCLIParser parser = new BootstrapCLIParser(terminal); - - // unknown params - ExitStatus status = parser.execute(args("version --unknown-param /tmp/pid")); - assertStatus(status, USAGE); - String output = terminal.getOutput(); - assertTrue(output, output.contains("Unrecognized option: --unknown-param")); - - // single dash in extra params - terminal.reset(); - parser = new BootstrapCLIParser(terminal); - status = parser.execute(args("start -network.host 127.0.0.1")); - assertStatus(status, USAGE); - output = terminal.getOutput(); - assertTrue(output, output.contains("Parameter [-network.host]does not start with --")); - - // never ended parameter - terminal = new MockTerminal(); - parser = new BootstrapCLIParser(terminal); - status = parser.execute(args("start --network.host")); - assertStatus(status, USAGE); - output = terminal.getOutput(); - assertTrue(output, output.contains("Parameter [network.host] needs value")); - - // free floating value - terminal = new MockTerminal(); - parser = new BootstrapCLIParser(terminal); - status = parser.execute(args("start 127.0.0.1")); - assertStatus(status, USAGE); - output = terminal.getOutput(); - assertTrue(output, output.contains("Parameter [127.0.0.1]does not start with --")); - } - - public void testHelpWorks() throws Exception { - List> tuples = new ArrayList<>(); - tuples.add(new Tuple<>("version --help", "elasticsearch-version.help")); - tuples.add(new Tuple<>("version -h", "elasticsearch-version.help")); - tuples.add(new Tuple<>("start --help", "elasticsearch-start.help")); - tuples.add(new Tuple<>("start -h", "elasticsearch-start.help")); - tuples.add(new Tuple<>("--help", "elasticsearch.help")); - tuples.add(new Tuple<>("-h", "elasticsearch.help")); - - for (Tuple tuple : tuples) { - terminal.reset(); - BootstrapCLIParser parser = new BootstrapCLIParser(terminal); - ExitStatus status = parser.execute(args(tuple.v1())); - assertStatus(status, OK_AND_EXIT); - assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/bootstrap/" + tuple.v2()); - } - } - - public void testThatSpacesInParametersAreSupported() throws Exception { - // emulates: bin/elasticsearch --node.name "'my node with spaces'" --pidfile "'/tmp/my pid.pid'" - BootstrapCLIParser parser = new BootstrapCLIParser(terminal); - registerProperties("es.pidfile", "es.my.param"); - - ExitStatus status = parser.execute("start", "--pidfile", "foo with space", "--my.param", "my awesome neighbour"); - assertStatus(status, OK); - assertSystemProperty("es.pidfile", "foo with space"); - assertSystemProperty("es.my.param", "my awesome neighbour"); - - } - - public void testThatHelpfulErrorMessageIsGivenWhenParametersAreOutOfOrder() throws Exception { - BootstrapCLIParser parser = new BootstrapCLIParser(terminal); + public void testConfigMalformed() throws Exception { UserError e = expectThrows(UserError.class, () -> { - parser.parse("start", new String[]{"--foo=bar", "-Dbaz=qux"}); + execute("-Efoo"); }); - assertThat(e.getMessage(), containsString("must be before any parameters starting with --")); - assertNull(System.getProperty("es.foo")); + assertTrue(e.getMessage(), e.getMessage().contains("Malformed elasticsearch setting")); } private void registerProperties(String ... systemProperties) { @@ -265,8 +163,4 @@ public class BootstrapCliParserTests extends CliToolTestCase { String msg = String.format(Locale.ROOT, "Expected property %s to be %s, terminal output was %s", name, expectedValue, terminal.getOutput()); assertThat(msg, System.getProperty(name), is(expectedValue)); } - - private void assertStatus(ExitStatus status, ExitStatus expectedStatus) throws Exception { - assertThat(String.format(Locale.ROOT, "Expected status to be [%s], but was [%s], terminal output was %s", expectedStatus, status, terminal.getOutput()), status, is(expectedStatus)); - } } diff --git a/test/framework/src/main/java/org/elasticsearch/cli/CommandTestCase.java b/test/framework/src/main/java/org/elasticsearch/cli/CommandTestCase.java index 3af25509adbe..a9b31b636ccd 100644 --- a/test/framework/src/main/java/org/elasticsearch/cli/CommandTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cli/CommandTestCase.java @@ -29,20 +29,30 @@ import org.junit.Before; */ public abstract class CommandTestCase extends ESTestCase { + /** The terminal that execute uses. */ protected final MockTerminal terminal = new MockTerminal(); + /** The last command that was executed. */ + protected Command command; + @Before public void resetTerminal() { terminal.reset(); terminal.setVerbosity(Terminal.Verbosity.NORMAL); } + /** Creates a Command to test execution. */ protected abstract Command newCommand(); + /** + * Runs the command with the given args. + * + * Output can be found in {@link #terminal}. + * The command created can be found in {@link #command}. + */ public String execute(String... args) throws Exception { - Command command = newCommand(); - OptionSet options = command.parser.parse(args); - command.execute(terminal, options); + command = newCommand(); + command.mainWithoutErrorHandling(args, terminal); return terminal.getOutput(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/cli/MockTerminal.java b/test/framework/src/main/java/org/elasticsearch/cli/MockTerminal.java index bb01369ac509..b712b216f9a4 100644 --- a/test/framework/src/main/java/org/elasticsearch/cli/MockTerminal.java +++ b/test/framework/src/main/java/org/elasticsearch/cli/MockTerminal.java @@ -47,7 +47,7 @@ public class MockTerminal extends Terminal { @Override public String readText(String prompt) { if (textInput.isEmpty()) { - return null; + throw new IllegalStateException("No text input configured for prompt [" + prompt + "]"); } return textInput.removeFirst(); } @@ -55,7 +55,7 @@ public class MockTerminal extends Terminal { @Override public char[] readSecret(String prompt) { if (secretInput.isEmpty()) { - return null; + throw new IllegalStateException("No secret input configured for prompt [" + prompt + "]"); } return secretInput.removeFirst().toCharArray(); } From 3836f3a73600b7af4751a90d21ac8ef076e9ad7b Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 8 Mar 2016 13:40:39 -0800 Subject: [PATCH 106/320] Remove reference to standalonerunner --- .../common/cli/CliToolConfig.java | 53 ------------------- .../plugins/InstallPluginCommand.java | 21 ++++---- .../plugins/ListPluginsCommand.java | 4 +- .../plugins/RemovePluginCommand.java | 3 +- .../elasticsearch/plugins/PluginCliTests.java | 30 ++++++++++- docs/plugins/mapper-attachments.asciidoc | 38 ------------- 6 files changed, 41 insertions(+), 108 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/cli/CliToolConfig.java b/core/src/main/java/org/elasticsearch/common/cli/CliToolConfig.java index d0ba897b33d8..ff752f45ef07 100644 --- a/core/src/main/java/org/elasticsearch/common/cli/CliToolConfig.java +++ b/core/src/main/java/org/elasticsearch/common/cli/CliToolConfig.java @@ -33,10 +33,6 @@ import java.util.Map; */ public class CliToolConfig { - public static Builder config(String name, Class toolType) { - return new Builder(name, toolType); - } - private final Class toolType; private final String name; private final Map cmds; @@ -82,55 +78,6 @@ public class CliToolConfig { helpPrinter.print(this, terminal); } - public static class Builder { - - public static Cmd.Builder cmd(String name, Class cmdType) { - return new Cmd.Builder(name, cmdType); - } - - public static OptionBuilder option(String shortName, String longName) { - return new OptionBuilder(shortName, longName); - } - - public static Option.Builder optionBuilder(String shortName, String longName) { - return Option.builder(shortName).argName(longName).longOpt(longName); - } - - public static OptionGroupBuilder optionGroup(boolean required) { - return new OptionGroupBuilder(required); - } - - private final Class toolType; - private final String name; - private Cmd[] cmds; - - private Builder(String name, Class toolType) { - this.name = name; - this.toolType = toolType; - } - - public Builder cmds(Cmd.Builder... cmds) { - this.cmds = new Cmd[cmds.length]; - for (int i = 0; i < cmds.length; i++) { - this.cmds[i] = cmds[i].build(); - this.cmds[i].toolName = name; - } - return this; - } - - public Builder cmds(Cmd... cmds) { - for (int i = 0; i < cmds.length; i++) { - cmds[i].toolName = name; - } - this.cmds = cmds; - return this; - } - - public CliToolConfig build() { - return new CliToolConfig(name, toolType, cmds); - } - } - public static class Cmd { private String toolName; diff --git a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index bbe00fddd8ca..83273e6c1c49 100644 --- a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -145,7 +145,7 @@ class InstallPluginCommand extends Command { } @Override - protected int execute(Terminal terminal, OptionSet options) throws Exception { + protected void execute(Terminal terminal, OptionSet options) throws Exception { // TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args List args = arguments.values(options); if (args.size() != 1) { @@ -154,7 +154,6 @@ class InstallPluginCommand extends Command { String pluginId = args.get(0); boolean isBatch = options.has(batchOption) || System.console() == null; execute(terminal, pluginId, isBatch); - return ExitCodes.OK; } // pkg private for testing @@ -222,14 +221,14 @@ class InstallPluginCommand extends Command { BufferedReader checksumReader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8)); expectedChecksum = checksumReader.readLine(); if (checksumReader.readLine() != null) { - throw new UserError(CliTool.ExitStatus.IO_ERROR.status(), "Invalid checksum file at " + checksumUrl); + throw new UserError(ExitCodes.IO_ERROR, "Invalid checksum file at " + checksumUrl); } } byte[] zipbytes = Files.readAllBytes(zip); String gotChecksum = MessageDigests.toHexString(MessageDigests.sha1().digest(zipbytes)); if (expectedChecksum.equals(gotChecksum) == false) { - throw new UserError(CliTool.ExitStatus.IO_ERROR.status(), "SHA1 mismatch, expected " + expectedChecksum + " but got " + gotChecksum); + throw new UserError(ExitCodes.IO_ERROR, "SHA1 mismatch, expected " + expectedChecksum + " but got " + gotChecksum); } return zip; @@ -271,7 +270,7 @@ class InstallPluginCommand extends Command { Files.delete(zip); if (hasEsDir == false) { IOUtils.rm(target); - throw new UserError(CliTool.ExitStatus.DATA_ERROR.status(), "`elasticsearch` directory is missing in the plugin zip"); + throw new UserError(ExitCodes.DATA_ERROR, "`elasticsearch` directory is missing in the plugin zip"); } return target; } @@ -285,7 +284,7 @@ class InstallPluginCommand extends Command { // don't let luser install plugin as a module... // they might be unavoidably in maven central and are packaged up the same way) if (MODULES.contains(info.getName())) { - throw new UserError(CliTool.ExitStatus.USAGE.status(), "plugin '" + info.getName() + "' cannot be installed like this, it is a system module"); + throw new UserError(ExitCodes.USAGE, "plugin '" + info.getName() + "' cannot be installed like this, it is a system module"); } // check for jar hell before any copying @@ -341,7 +340,7 @@ class InstallPluginCommand extends Command { final Path destination = env.pluginsFile().resolve(info.getName()); if (Files.exists(destination)) { - throw new UserError(CliTool.ExitStatus.USAGE.status(), "plugin directory " + destination.toAbsolutePath() + " already exists. To update the plugin, uninstall it first using 'remove " + info.getName() + "' command"); + throw new UserError(ExitCodes.USAGE, "plugin directory " + destination.toAbsolutePath() + " already exists. To update the plugin, uninstall it first using 'remove " + info.getName() + "' command"); } Path tmpBinDir = tmpRoot.resolve("bin"); @@ -374,7 +373,7 @@ class InstallPluginCommand extends Command { /** Copies the files from {@code tmpBinDir} into {@code destBinDir}, along with permissions from dest dirs parent. */ private void installBin(PluginInfo info, Path tmpBinDir, Path destBinDir) throws Exception { if (Files.isDirectory(tmpBinDir) == false) { - throw new UserError(CliTool.ExitStatus.IO_ERROR.status(), "bin in plugin " + info.getName() + " is not a directory"); + throw new UserError(ExitCodes.IO_ERROR, "bin in plugin " + info.getName() + " is not a directory"); } Files.createDirectory(destBinDir); @@ -392,7 +391,7 @@ class InstallPluginCommand extends Command { try (DirectoryStream stream = Files.newDirectoryStream(tmpBinDir)) { for (Path srcFile : stream) { if (Files.isDirectory(srcFile)) { - throw new UserError(CliTool.ExitStatus.DATA_ERROR.status(), "Directories not allowed in bin dir for plugin " + info.getName() + ", found " + srcFile.getFileName()); + throw new UserError(ExitCodes.DATA_ERROR, "Directories not allowed in bin dir for plugin " + info.getName() + ", found " + srcFile.getFileName()); } Path destFile = destBinDir.resolve(tmpBinDir.relativize(srcFile)); @@ -413,7 +412,7 @@ class InstallPluginCommand extends Command { */ private void installConfig(PluginInfo info, Path tmpConfigDir, Path destConfigDir) throws Exception { if (Files.isDirectory(tmpConfigDir) == false) { - throw new UserError(CliTool.ExitStatus.IO_ERROR.status(), "config in plugin " + info.getName() + " is not a directory"); + throw new UserError(ExitCodes.IO_ERROR, "config in plugin " + info.getName() + " is not a directory"); } // create the plugin's config dir "if necessary" @@ -422,7 +421,7 @@ class InstallPluginCommand extends Command { try (DirectoryStream stream = Files.newDirectoryStream(tmpConfigDir)) { for (Path srcFile : stream) { if (Files.isDirectory(srcFile)) { - throw new UserError(CliTool.ExitStatus.DATA_ERROR.status(), "Directories not allowed in config dir for plugin " + info.getName()); + throw new UserError(ExitCodes.DATA_ERROR, "Directories not allowed in config dir for plugin " + info.getName()); } Path destFile = destConfigDir.resolve(tmpConfigDir.relativize(srcFile)); diff --git a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java index 142a18cbde53..276e38f1595f 100644 --- a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java @@ -44,7 +44,7 @@ class ListPluginsCommand extends Command { } @Override - public int execute(Terminal terminal, OptionSet options) throws Exception { + protected void execute(Terminal terminal, OptionSet options) throws Exception { if (Files.exists(env.pluginsFile()) == false) { throw new IOException("Plugins directory missing: " + env.pluginsFile()); } @@ -55,7 +55,5 @@ class ListPluginsCommand extends Command { terminal.println(plugin.getFileName().toString()); } } - - return ExitCodes.OK; } } diff --git a/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java index 10a73a0fc9a8..f435a16edf04 100644 --- a/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java @@ -53,14 +53,13 @@ class RemovePluginCommand extends Command { } @Override - public int execute(Terminal terminal, OptionSet options) throws Exception { + protected void execute(Terminal terminal, OptionSet options) throws Exception { // TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args List args = arguments.values(options); if (args.size() != 1) { throw new UserError(ExitCodes.USAGE, "Must supply a single plugin id argument"); } execute(terminal, args.get(0)); - return ExitCodes.OK; } // pkg private for testing diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java index 708cefd91b28..73d979495717 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java @@ -19,13 +19,40 @@ package org.elasticsearch.plugins; +import java.io.IOException; +import java.nio.file.Path; + +import org.elasticsearch.cli.Command; +import org.elasticsearch.cli.CommandTestCase; import org.elasticsearch.common.cli.CliToolTestCase; import org.elasticsearch.cli.MockTerminal; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.junit.Before; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; -public class PluginCliTests extends CliToolTestCase { +public class PluginCliTests extends CommandTestCase { + + // the home dir for each test to use + Path homeDir; + + // settings used to create an Environment for tools + Settings.Builder settingsBuilder; + + @Before + public void setupHome() { + homeDir = createTempDir(); + settingsBuilder = Settings.builder() + .put("path.home", homeDir); + } + + @Override + protected Command newCommand() { + return new PluginCli(new Environment(settingsBuilder.build())); + } + public void testHelpWorks() throws Exception { MockTerminal terminal = new MockTerminal(); /* nocommit @@ -48,4 +75,5 @@ public class PluginCliTests extends CliToolTestCase { assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/plugins/plugin-list.help"); */ } + } diff --git a/docs/plugins/mapper-attachments.asciidoc b/docs/plugins/mapper-attachments.asciidoc index ed992623a503..0ad452d92e9e 100644 --- a/docs/plugins/mapper-attachments.asciidoc +++ b/docs/plugins/mapper-attachments.asciidoc @@ -405,41 +405,3 @@ It gives back: } } -------------------------- - -[[mapper-attachments-standalone]] -==== Stand alone runner - -If you want to run some tests within your IDE, you can use `StandaloneRunner` class. -It accepts arguments: - -* `-u file://URL/TO/YOUR/DOC` -* `--size` set extracted size (default to mapper attachment size) -* `BASE64` encoded binary - -Example: - -[source,sh] --------------------------- -StandaloneRunner BASE64Text -StandaloneRunner -u /tmp/mydoc.pdf -StandaloneRunner -u /tmp/mydoc.pdf --size 1000000 --------------------------- - -It produces something like: - -[source,text] --------------------------- -## Extracted text ---------------------- BEGIN ----------------------- -This is the extracted text ----------------------- END ------------------------ -## Metadata -- author: null -- content_length: null -- content_type: application/pdf -- date: null -- keywords: null -- language: null -- name: null -- title: null --------------------------- From 80198accc11aee7672a309917686cd93cf2a8f86 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 8 Mar 2016 14:13:55 -0800 Subject: [PATCH 107/320] Removed old cli stuff, and add tests for new Command behavior --- .../elasticsearch/bootstrap/Bootstrap.java | 2 +- .../bootstrap/BootstrapCLIParser.java | 2 +- .../bootstrap/Elasticsearch.java | 2 +- .../java/org/elasticsearch/cli/Command.java | 2 - .../org/elasticsearch/cli/MultiCommand.java | 2 - .../{common => }/cli/Terminal.java | 2 +- .../org/elasticsearch/common/cli/CliTool.java | 252 ------------------ .../common/cli/CliToolConfig.java | 249 ----------------- .../elasticsearch/common/cli/HelpPrinter.java | 57 ---- .../common/logging/TerminalAppender.java | 2 +- .../internal/InternalSettingsPreparer.java | 2 +- .../plugins/InstallPluginCommand.java | 5 +- .../plugins/ListPluginsCommand.java | 4 +- .../org/elasticsearch/plugins/PluginCli.java | 2 +- .../elasticsearch/plugins/PluginSecurity.java | 4 +- .../plugins/RemovePluginCommand.java | 9 +- .../org/elasticsearch/cli/CommandTests.java | 123 +++++++++ .../elasticsearch/cli/MultiCommandTests.java | 28 ++ .../{common => }/cli/TerminalTests.java | 3 +- .../bootstrap/BootstrapCliParserTests.java | 31 +-- .../plugins/PluginSecurityTests.java | 2 +- .../elasticsearch/cli/CommandTestCase.java | 2 - .../org/elasticsearch/cli/MockTerminal.java | 2 - 23 files changed, 180 insertions(+), 609 deletions(-) rename core/src/main/java/org/elasticsearch/{common => }/cli/Terminal.java (99%) delete mode 100644 core/src/main/java/org/elasticsearch/common/cli/CliTool.java delete mode 100644 core/src/main/java/org/elasticsearch/common/cli/CliToolConfig.java delete mode 100644 core/src/main/java/org/elasticsearch/common/cli/HelpPrinter.java create mode 100644 core/src/test/java/org/elasticsearch/cli/CommandTests.java create mode 100644 core/src/test/java/org/elasticsearch/cli/MultiCommandTests.java rename core/src/test/java/org/elasticsearch/{common => }/cli/TerminalTests.java (96%) diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 73654fdfee5a..5008229f5f80 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -26,7 +26,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.common.PidFile; import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.cli.Terminal; +import org.elasticsearch.cli.Terminal; import org.elasticsearch.common.inject.CreationException; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.LogConfigurator; diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java index 3567039cd425..e44e397f67a3 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java @@ -28,7 +28,7 @@ import org.elasticsearch.cli.Command; import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.UserError; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.cli.Terminal; +import org.elasticsearch.cli.Terminal; import org.elasticsearch.monitor.jvm.JvmInfo; final class BootstrapCliParser extends Command { diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 214efe483cac..1d2a0b98232a 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -21,7 +21,7 @@ package org.elasticsearch.bootstrap; import java.io.IOException; -import org.elasticsearch.common.cli.Terminal; +import org.elasticsearch.cli.Terminal; /** * This class starts elasticsearch. diff --git a/core/src/main/java/org/elasticsearch/cli/Command.java b/core/src/main/java/org/elasticsearch/cli/Command.java index f608d0cefb13..9e6afdd66382 100644 --- a/core/src/main/java/org/elasticsearch/cli/Command.java +++ b/core/src/main/java/org/elasticsearch/cli/Command.java @@ -21,14 +21,12 @@ package org.elasticsearch.cli; import java.io.IOException; import java.util.Arrays; -import java.util.List; import joptsimple.OptionException; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.cli.Terminal; /** * An action to execute within a cli. diff --git a/core/src/main/java/org/elasticsearch/cli/MultiCommand.java b/core/src/main/java/org/elasticsearch/cli/MultiCommand.java index 5862b6f23111..a9feee0c9bf7 100644 --- a/core/src/main/java/org/elasticsearch/cli/MultiCommand.java +++ b/core/src/main/java/org/elasticsearch/cli/MultiCommand.java @@ -19,14 +19,12 @@ package org.elasticsearch.cli; -import java.io.IOException; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.Map; import joptsimple.NonOptionArgumentSpec; import joptsimple.OptionSet; -import org.elasticsearch.common.cli.Terminal; /** * A cli tool which is made up of multiple subcommands. diff --git a/core/src/main/java/org/elasticsearch/common/cli/Terminal.java b/core/src/main/java/org/elasticsearch/cli/Terminal.java similarity index 99% rename from core/src/main/java/org/elasticsearch/common/cli/Terminal.java rename to core/src/main/java/org/elasticsearch/cli/Terminal.java index 6a1c4382e42e..00d886aa8abd 100644 --- a/core/src/main/java/org/elasticsearch/common/cli/Terminal.java +++ b/core/src/main/java/org/elasticsearch/cli/Terminal.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.common.cli; +package org.elasticsearch.cli; import java.io.BufferedReader; import java.io.Console; diff --git a/core/src/main/java/org/elasticsearch/common/cli/CliTool.java b/core/src/main/java/org/elasticsearch/common/cli/CliTool.java deleted file mode 100644 index ba2007813d54..000000000000 --- a/core/src/main/java/org/elasticsearch/common/cli/CliTool.java +++ /dev/null @@ -1,252 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.cli; - -import org.apache.commons.cli.AlreadySelectedException; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.DefaultParser; -import org.apache.commons.cli.MissingArgumentException; -import org.apache.commons.cli.MissingOptionException; -import org.apache.commons.cli.UnrecognizedOptionException; -import org.elasticsearch.cli.UserError; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.node.internal.InternalSettingsPreparer; - -import java.util.Locale; - -import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; - -/** - * A base class for command-line interface tool. - * - * Two modes are supported: - * - * - Single command mode. The tool exposes a single command that can potentially accept arguments (eg. CLI options). - * - Multi command mode. The tool support multiple commands, each for different tasks, each potentially accepts arguments. - * - * In a multi-command mode. The first argument must be the command name. For example, the plugin manager - * can be seen as a multi-command tool with two possible commands: install and uninstall - * - * The tool is configured using a {@link CliToolConfig} which encapsulates the tool's commands and their - * potential options. The tool also comes with out of the box simple help support (the -h/--help option is - * automatically handled) where the help text is configured in a dedicated *.help files located in the same package - * as the tool. - */ -public abstract class CliTool { - - // based on sysexits.h - public enum ExitStatus { - OK(0), - OK_AND_EXIT(0), - USAGE(64), /* command line usage error */ - DATA_ERROR(65), /* data format error */ - NO_INPUT(66), /* cannot open input */ - NO_USER(67), /* addressee unknown */ - NO_HOST(68), /* host name unknown */ - UNAVAILABLE(69), /* service unavailable */ - CODE_ERROR(70), /* internal software error */ - CANT_CREATE(73), /* can't create (user) output file */ - IO_ERROR(74), /* input/output error */ - TEMP_FAILURE(75), /* temp failure; user is invited to retry */ - PROTOCOL(76), /* remote error in protocol */ - NOPERM(77), /* permission denied */ - CONFIG(78); /* configuration error */ - - final int status; - - ExitStatus(int status) { - this.status = status; - } - - public int status() { - return status; - } - } - - protected final Terminal terminal; - protected final Environment env; - protected final Settings settings; - - private final CliToolConfig config; - - protected CliTool(CliToolConfig config) { - this(config, Terminal.DEFAULT); - } - - protected CliTool(CliToolConfig config, Terminal terminal) { - if (config.cmds().size() == 0) { - throw new IllegalArgumentException("At least one command must be configured"); - } - this.config = config; - this.terminal = terminal; - env = InternalSettingsPreparer.prepareEnvironment(EMPTY_SETTINGS, terminal); - settings = env.settings(); - } - - public final ExitStatus execute(String... args) throws Exception { - - // first lets see if the user requests tool help. We're doing it only if - // this is a multi-command tool. If it's a single command tool, the -h/--help - // option will be taken care of on the command level - if (!config.isSingle() && args.length > 0 && (args[0].equals("-h") || args[0].equals("--help"))) { - config.printUsage(terminal); - return ExitStatus.OK_AND_EXIT; - } - - CliToolConfig.Cmd cmd; - if (config.isSingle()) { - cmd = config.single(); - } else { - - if (args.length == 0) { - terminal.println(Terminal.Verbosity.SILENT, "ERROR: command not specified"); - config.printUsage(terminal); - return ExitStatus.USAGE; - } - - String cmdName = args[0]; - cmd = config.cmd(cmdName); - if (cmd == null) { - terminal.println(Terminal.Verbosity.SILENT, "ERROR: unknown command [" + cmdName + "]. Use [-h] option to list available commands"); - return ExitStatus.USAGE; - } - - // we now remove the command name from the args - if (args.length == 1) { - args = new String[0]; - } else { - String[] cmdArgs = new String[args.length - 1]; - System.arraycopy(args, 1, cmdArgs, 0, cmdArgs.length); - args = cmdArgs; - } - } - - try { - return parse(cmd, args).execute(settings, env); - } catch (UserError error) { - terminal.println(Terminal.Verbosity.SILENT, "ERROR: " + error.getMessage()); - return ExitStatus.USAGE; - //return error.exitCode; - } - } - - public Command parse(String cmdName, String[] args) throws Exception { - CliToolConfig.Cmd cmd = config.cmd(cmdName); - return parse(cmd, args); - } - - public Command parse(CliToolConfig.Cmd cmd, String[] args) throws Exception { - CommandLineParser parser = new DefaultParser(); - CommandLine cli = parser.parse(CliToolConfig.OptionsSource.HELP.options(), args, true); - if (cli.hasOption("h")) { - return helpCmd(cmd); - } - try { - cli = parser.parse(cmd.options(), args, cmd.isStopAtNonOption()); - } catch (AlreadySelectedException|MissingArgumentException|MissingOptionException|UnrecognizedOptionException e) { - // intentionally drop the stack trace here as these are really user errors, - // the stack trace into cli parsing lib is not important - throw new UserError(ExitStatus.USAGE.status(), e.toString()); - } - - if (cli.hasOption("v")) { - terminal.setVerbosity(Terminal.Verbosity.VERBOSE); - } else if (cli.hasOption("s")) { - terminal.setVerbosity(Terminal.Verbosity.SILENT); - } else { - terminal.setVerbosity(Terminal.Verbosity.NORMAL); - } - return parse(cmd.name(), cli); - } - - protected Command.Help helpCmd(CliToolConfig.Cmd cmd) { - return new Command.Help(cmd, terminal); - } - - protected static Command.Exit exitCmd(ExitStatus status) { - return new Command.Exit(null, status, null); - } - - protected static Command.Exit exitCmd(ExitStatus status, Terminal terminal, String msg, Object... args) { - return new Command.Exit(String.format(Locale.ROOT, msg, args), status, terminal); - } - - protected abstract Command parse(String cmdName, CommandLine cli) throws Exception; - - public static abstract class Command { - - protected final Terminal terminal; - - protected Command(Terminal terminal) { - this.terminal = terminal; - } - - public abstract ExitStatus execute(Settings settings, Environment env) throws Exception; - - public static class Help extends Command { - - private final CliToolConfig.Cmd cmd; - - private Help(CliToolConfig.Cmd cmd, Terminal terminal) { - super(terminal); - this.cmd = cmd; - } - - @Override - public ExitStatus execute(Settings settings, Environment env) throws Exception { - cmd.printUsage(terminal); - return ExitStatus.OK_AND_EXIT; - } - } - - public static class Exit extends Command { - private final String msg; - private final ExitStatus status; - - private Exit(String msg, ExitStatus status, Terminal terminal) { - super(terminal); - this.msg = msg; - this.status = status; - } - - @Override - public ExitStatus execute(Settings settings, Environment env) throws Exception { - if (msg != null) { - if (status != ExitStatus.OK) { - terminal.println(Terminal.Verbosity.SILENT, "ERROR: " + msg); - } else { - terminal.println(msg); - } - } - return status; - } - - public ExitStatus status() { - return status; - } - } - } - - - -} - diff --git a/core/src/main/java/org/elasticsearch/common/cli/CliToolConfig.java b/core/src/main/java/org/elasticsearch/common/cli/CliToolConfig.java deleted file mode 100644 index ff752f45ef07..000000000000 --- a/core/src/main/java/org/elasticsearch/common/cli/CliToolConfig.java +++ /dev/null @@ -1,249 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.cli; - -import org.apache.commons.cli.Option; -import org.apache.commons.cli.OptionGroup; -import org.apache.commons.cli.Options; - -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -/** - * - */ -public class CliToolConfig { - - private final Class toolType; - private final String name; - private final Map cmds; - - private static final HelpPrinter helpPrinter = new HelpPrinter(); - - private CliToolConfig(String name, Class toolType, Cmd[] cmds) { - this.name = name; - this.toolType = toolType; - final Map cmdsMapping = new HashMap<>(); - for (int i = 0; i < cmds.length; i++) { - cmdsMapping.put(cmds[i].name, cmds[i]); - } - this.cmds = Collections.unmodifiableMap(cmdsMapping); - } - - public boolean isSingle() { - return cmds.size() == 1; - } - - public Cmd single() { - assert isSingle() : "Requesting single command on a multi-command tool"; - return cmds.values().iterator().next(); - } - - public Class toolType() { - return toolType; - } - - public String name() { - return name; - } - - public Collection cmds() { - return cmds.values(); - } - - public Cmd cmd(String name) { - return cmds.get(name); - } - - public void printUsage(Terminal terminal) { - helpPrinter.print(this, terminal); - } - - public static class Cmd { - - private String toolName; - private final String name; - private final Class cmdType; - private final Options options; - private final boolean stopAtNonOption; - - private Cmd(String name, Class cmdType, Options options, boolean stopAtNonOption) { - this.name = name; - this.cmdType = cmdType; - this.options = options; - this.stopAtNonOption = stopAtNonOption; - OptionsSource.VERBOSITY.populate(options); - } - - public Class cmdType() { - return cmdType; - } - - public String name() { - return name; - } - - public Options options() { - return options; - } - - public boolean isStopAtNonOption() { - return stopAtNonOption; - } - - public void printUsage(Terminal terminal) { - helpPrinter.print(toolName, this, terminal); - } - - public static class Builder { - - private final String name; - private final Class cmdType; - private Options options = new Options(); - private boolean stopAtNonOption = false; - - private Builder(String name, Class cmdType) { - this.name = name; - this.cmdType = cmdType; - } - - public Builder options(OptionBuilder... optionBuilder) { - for (int i = 0; i < optionBuilder.length; i++) { - options.addOption(optionBuilder[i].build()); - } - return this; - } - - public Builder options(Option.Builder... optionBuilders) { - for (int i = 0; i < optionBuilders.length; i++) { - options.addOption(optionBuilders[i].build()); - } - return this; - } - - public Builder optionGroups(OptionGroupBuilder... optionGroupBuilders) { - for (OptionGroupBuilder builder : optionGroupBuilders) { - options.addOptionGroup(builder.build()); - } - return this; - } - - /** - * @param stopAtNonOption if true an unrecognized argument stops - * the parsing and the remaining arguments are added to the - * args list. If false an unrecognized - * argument triggers a ParseException. - */ - public Builder stopAtNonOption(boolean stopAtNonOption) { - this.stopAtNonOption = stopAtNonOption; - return this; - } - - public Cmd build() { - return new Cmd(name, cmdType, options, stopAtNonOption); - } - } - } - - public static class OptionBuilder { - - private final Option option; - - private OptionBuilder(String shortName, String longName) { - option = new Option(shortName, ""); - option.setLongOpt(longName); - option.setArgName(longName); - } - - public OptionBuilder required(boolean required) { - option.setRequired(required); - return this; - } - - public OptionBuilder hasArg(boolean optional) { - option.setOptionalArg(optional); - option.setArgs(1); - return this; - } - - public Option build() { - return option; - } - } - - public static class OptionGroupBuilder { - - private OptionGroup group; - - private OptionGroupBuilder(boolean required) { - group = new OptionGroup(); - group.setRequired(required); - } - - public OptionGroupBuilder options(OptionBuilder... optionBuilders) { - for (OptionBuilder builder : optionBuilders) { - group.addOption(builder.build()); - } - return this; - } - - public OptionGroup build() { - return group; - } - - } - - static abstract class OptionsSource { - - static final OptionsSource HELP = new OptionsSource() { - - @Override - void populate(Options options) { - options.addOption(new OptionBuilder("h", "help").required(false).build()); - } - }; - - static final OptionsSource VERBOSITY = new OptionsSource() { - @Override - void populate(Options options) { - OptionGroup verbosityGroup = new OptionGroup(); - verbosityGroup.setRequired(false); - verbosityGroup.addOption(new OptionBuilder("s", "silent").required(false).build()); - verbosityGroup.addOption(new OptionBuilder("v", "verbose").required(false).build()); - options.addOptionGroup(verbosityGroup); - } - }; - - private Options options; - - Options options() { - if (options == null) { - options = new Options(); - populate(options); - } - return options; - } - - abstract void populate(Options options); - - } -} diff --git a/core/src/main/java/org/elasticsearch/common/cli/HelpPrinter.java b/core/src/main/java/org/elasticsearch/common/cli/HelpPrinter.java deleted file mode 100644 index ada6cc33a191..000000000000 --- a/core/src/main/java/org/elasticsearch/common/cli/HelpPrinter.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.cli; - -import org.elasticsearch.common.io.Streams; -import org.elasticsearch.common.util.Callback; - -import java.io.IOException; -import java.io.InputStream; - -/** - * - */ -public class HelpPrinter { - - private static final String HELP_FILE_EXT = ".help"; - - public void print(CliToolConfig config, Terminal terminal) { - print(config.toolType(), config.name(), terminal); - } - - public void print(String toolName, CliToolConfig.Cmd cmd, Terminal terminal) { - print(cmd.cmdType(), toolName + "-" + cmd.name(), terminal); - } - - private static void print(Class clazz, String name, final Terminal terminal) { - terminal.println(Terminal.Verbosity.SILENT, ""); - try (InputStream input = clazz.getResourceAsStream(name + HELP_FILE_EXT)) { - Streams.readAllLines(input, new Callback() { - @Override - public void handle(String line) { - terminal.println(Terminal.Verbosity.SILENT, line); - } - }); - } catch (IOException ioe) { - throw new RuntimeException(ioe); - } - terminal.println(Terminal.Verbosity.SILENT, ""); - } -} diff --git a/core/src/main/java/org/elasticsearch/common/logging/TerminalAppender.java b/core/src/main/java/org/elasticsearch/common/logging/TerminalAppender.java index 7031a62a9994..e967ad9d79ed 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/TerminalAppender.java +++ b/core/src/main/java/org/elasticsearch/common/logging/TerminalAppender.java @@ -22,7 +22,7 @@ package org.elasticsearch.common.logging; import org.apache.log4j.AppenderSkeleton; import org.apache.log4j.spi.LoggingEvent; -import org.elasticsearch.common.cli.Terminal; +import org.elasticsearch.cli.Terminal; /** * TerminalAppender logs event to Terminal.DEFAULT. It is used for example by the PluginCli. diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java index faf449586c11..c66cb08dae77 100644 --- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java @@ -23,7 +23,7 @@ import org.elasticsearch.bootstrap.BootstrapInfo; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.cli.Terminal; +import org.elasticsearch.cli.Terminal; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index 83273e6c1c49..32c4bf185073 100644 --- a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -27,8 +27,7 @@ import org.elasticsearch.Version; import org.elasticsearch.bootstrap.JarHell; import org.elasticsearch.cli.Command; import org.elasticsearch.cli.ExitCodes; -import org.elasticsearch.common.cli.CliTool; -import org.elasticsearch.common.cli.Terminal; +import org.elasticsearch.cli.Terminal; import org.elasticsearch.cli.UserError; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.io.FileSystemUtils; @@ -59,7 +58,7 @@ import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import static java.util.Collections.unmodifiableSet; -import static org.elasticsearch.common.cli.Terminal.Verbosity.VERBOSE; +import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE; import static org.elasticsearch.common.util.set.Sets.newHashSet; /** diff --git a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java index 276e38f1595f..953e698a4c20 100644 --- a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java @@ -26,9 +26,7 @@ import java.nio.file.Path; import joptsimple.OptionSet; import org.elasticsearch.cli.Command; -import org.elasticsearch.cli.ExitCodes; -import org.elasticsearch.common.cli.CliTool; -import org.elasticsearch.common.cli.Terminal; +import org.elasticsearch.cli.Terminal; import org.elasticsearch.env.Environment; /** diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginCli.java b/core/src/main/java/org/elasticsearch/plugins/PluginCli.java index 9f2e432a4386..323b872044ef 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginCli.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginCli.java @@ -22,7 +22,7 @@ package org.elasticsearch.plugins; import org.apache.log4j.BasicConfigurator; import org.apache.log4j.varia.NullAppender; import org.elasticsearch.cli.MultiCommand; -import org.elasticsearch.common.cli.Terminal; +import org.elasticsearch.cli.Terminal; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.node.internal.InternalSettingsPreparer; diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginSecurity.java b/core/src/main/java/org/elasticsearch/plugins/PluginSecurity.java index b14bcaf2ff3d..f9c3d1826c99 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginSecurity.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginSecurity.java @@ -20,8 +20,8 @@ package org.elasticsearch.plugins; import org.apache.lucene.util.IOUtils; -import org.elasticsearch.common.cli.Terminal; -import org.elasticsearch.common.cli.Terminal.Verbosity; +import org.elasticsearch.cli.Terminal; +import org.elasticsearch.cli.Terminal.Verbosity; import org.elasticsearch.env.Environment; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java index f435a16edf04..a3e6c375f839 100644 --- a/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java @@ -32,11 +32,10 @@ import org.elasticsearch.cli.Command; import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.UserError; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.cli.CliTool; -import org.elasticsearch.common.cli.Terminal; +import org.elasticsearch.cli.Terminal; import org.elasticsearch.env.Environment; -import static org.elasticsearch.common.cli.Terminal.Verbosity.VERBOSE; +import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE; /** * A command for the plugin cli to remove a plugin from elasticsearch. @@ -68,7 +67,7 @@ class RemovePluginCommand extends Command { Path pluginDir = env.pluginsFile().resolve(pluginName); if (Files.exists(pluginDir) == false) { - throw new UserError(CliTool.ExitStatus.USAGE.status(), "Plugin " + pluginName + " not found. Run 'plugin list' to get list of installed plugins."); + throw new UserError(ExitCodes.USAGE, "Plugin " + pluginName + " not found. Run 'plugin list' to get list of installed plugins."); } List pluginPaths = new ArrayList<>(); @@ -76,7 +75,7 @@ class RemovePluginCommand extends Command { Path pluginBinDir = env.binFile().resolve(pluginName); if (Files.exists(pluginBinDir)) { if (Files.isDirectory(pluginBinDir) == false) { - throw new UserError(CliTool.ExitStatus.IO_ERROR.status(), "Bin dir for " + pluginName + " is not a directory"); + throw new UserError(ExitCodes.IO_ERROR, "Bin dir for " + pluginName + " is not a directory"); } pluginPaths.add(pluginBinDir); terminal.println(VERBOSE, "Removing: " + pluginBinDir); diff --git a/core/src/test/java/org/elasticsearch/cli/CommandTests.java b/core/src/test/java/org/elasticsearch/cli/CommandTests.java new file mode 100644 index 000000000000..153bd4600b91 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cli/CommandTests.java @@ -0,0 +1,123 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cli; + +import joptsimple.OptionSet; +import org.elasticsearch.test.ESTestCase; + +public class CommandTests extends ESTestCase { + + static class UserErrorCommand extends Command { + UserErrorCommand() { + super("Throws a user error"); + } + @Override + protected void execute(Terminal terminal, OptionSet options) throws Exception { + throw new UserError(ExitCodes.DATA_ERROR, "Bad input"); + } + } + + static class NoopCommand extends Command { + boolean executed = false; + NoopCommand() { + super("Does nothing"); + } + @Override + protected void execute(Terminal terminal, OptionSet options) throws Exception { + terminal.println("Normal output"); + terminal.println(Terminal.Verbosity.SILENT, "Silent output"); + terminal.println(Terminal.Verbosity.VERBOSE, "Verbose output"); + executed = true; + } + @Override + protected void printAdditionalHelp(Terminal terminal) { + terminal.println("Some extra help"); + } + } + + public void testHelp() throws Exception { + NoopCommand command = new NoopCommand(); + MockTerminal terminal = new MockTerminal(); + String[] args = {"-h"}; + int status = command.main(args, terminal); + String output = terminal.getOutput(); + assertEquals(output, ExitCodes.OK, status); + assertTrue(output, output.contains("Does nothing")); + assertTrue(output, output.contains("Some extra help")); + assertFalse(command.executed); + + command = new NoopCommand(); + String[] args2 = {"--help"}; + status = command.main(args2, terminal); + output = terminal.getOutput(); + assertEquals(output, ExitCodes.OK, status); + assertTrue(output, output.contains("Does nothing")); + assertTrue(output, output.contains("Some extra help")); + assertFalse(command.executed); + } + + public void testVerbositySilentAndVerbose() throws Exception { + MockTerminal terminal = new MockTerminal(); + NoopCommand command = new NoopCommand(); + String[] args = {"-v", "-s"}; + UserError e = expectThrows(UserError.class, () -> { + command.mainWithoutErrorHandling(args, terminal); + }); + assertTrue(e.getMessage(), e.getMessage().contains("Cannot specify -s and -v together")); + } + + public void testSilentVerbosity() throws Exception { + MockTerminal terminal = new MockTerminal(); + NoopCommand command = new NoopCommand(); + String[] args = {"-s"}; + command.main(args, terminal); + String output = terminal.getOutput(); + assertTrue(output, output.contains("Silent output")); + } + + public void testNormalVerbosity() throws Exception { + MockTerminal terminal = new MockTerminal(); + terminal.setVerbosity(Terminal.Verbosity.SILENT); + NoopCommand command = new NoopCommand(); + String[] args = {}; + command.main(args, terminal); + String output = terminal.getOutput(); + assertTrue(output, output.contains("Normal output")); + } + + public void testVerboseVerbosity() throws Exception { + MockTerminal terminal = new MockTerminal(); + NoopCommand command = new NoopCommand(); + String[] args = {"-v"}; + command.main(args, terminal); + String output = terminal.getOutput(); + assertTrue(output, output.contains("Verbose output")); + } + + public void testUserError() throws Exception { + MockTerminal terminal = new MockTerminal(); + UserErrorCommand command = new UserErrorCommand(); + String[] args = {}; + int status = command.main(args, terminal); + String output = terminal.getOutput(); + assertEquals(output, ExitCodes.DATA_ERROR, status); + assertTrue(output, output.contains("ERROR: Bad input")); + } +} diff --git a/core/src/test/java/org/elasticsearch/cli/MultiCommandTests.java b/core/src/test/java/org/elasticsearch/cli/MultiCommandTests.java new file mode 100644 index 000000000000..cdd7cb7e2412 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cli/MultiCommandTests.java @@ -0,0 +1,28 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cli; + +public class MultiCommandTests extends CommandTestCase { + + @Override + protected Command newCommand() { + return null; + } +} diff --git a/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java b/core/src/test/java/org/elasticsearch/cli/TerminalTests.java similarity index 96% rename from core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java rename to core/src/test/java/org/elasticsearch/cli/TerminalTests.java index 12fc4cb77e4c..6673bdbc858a 100644 --- a/core/src/test/java/org/elasticsearch/common/cli/TerminalTests.java +++ b/core/src/test/java/org/elasticsearch/cli/TerminalTests.java @@ -17,9 +17,8 @@ * under the License. */ -package org.elasticsearch.common.cli; +package org.elasticsearch.cli; -import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.test.ESTestCase; public class TerminalTests extends ESTestCase { diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java index 726d17b09384..2fc08f23a064 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java @@ -19,21 +19,6 @@ package org.elasticsearch.bootstrap; -import joptsimple.OptionException; -import org.elasticsearch.Build; -import org.elasticsearch.Version; -import org.elasticsearch.cli.Command; -import org.elasticsearch.cli.CommandTestCase; -import org.elasticsearch.cli.ExitCodes; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.cli.CliTool.ExitStatus; -import org.elasticsearch.cli.UserError; -import org.elasticsearch.cli.MockTerminal; -import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.monitor.jvm.JvmInfo; -import org.junit.After; -import org.junit.Before; - import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -41,12 +26,18 @@ import java.util.List; import java.util.Locale; import java.util.Map; -import static org.elasticsearch.common.cli.CliTool.ExitStatus.OK; -import static org.elasticsearch.common.cli.CliTool.ExitStatus.OK_AND_EXIT; -import static org.elasticsearch.common.cli.CliTool.ExitStatus.USAGE; -import static org.hamcrest.Matchers.containsString; +import joptsimple.OptionException; +import org.elasticsearch.Build; +import org.elasticsearch.Version; +import org.elasticsearch.cli.Command; +import org.elasticsearch.cli.CommandTestCase; +import org.elasticsearch.cli.UserError; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.monitor.jvm.JvmInfo; +import org.junit.After; +import org.junit.Before; + import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; @SuppressForbidden(reason = "modifies system properties intentionally") public class BootstrapCliParserTests extends CommandTestCase { diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginSecurityTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginSecurityTests.java index acc300c6cf58..466f7d05cd1d 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginSecurityTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginSecurityTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.plugins; -import org.elasticsearch.common.cli.Terminal; +import org.elasticsearch.cli.Terminal; import org.elasticsearch.test.ESTestCase; import java.nio.file.Path; diff --git a/test/framework/src/main/java/org/elasticsearch/cli/CommandTestCase.java b/test/framework/src/main/java/org/elasticsearch/cli/CommandTestCase.java index a9b31b636ccd..e9c6a2eec9c3 100644 --- a/test/framework/src/main/java/org/elasticsearch/cli/CommandTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cli/CommandTestCase.java @@ -19,8 +19,6 @@ package org.elasticsearch.cli; -import joptsimple.OptionSet; -import org.elasticsearch.common.cli.Terminal; import org.elasticsearch.test.ESTestCase; import org.junit.Before; diff --git a/test/framework/src/main/java/org/elasticsearch/cli/MockTerminal.java b/test/framework/src/main/java/org/elasticsearch/cli/MockTerminal.java index b712b216f9a4..bd8bd493ceac 100644 --- a/test/framework/src/main/java/org/elasticsearch/cli/MockTerminal.java +++ b/test/framework/src/main/java/org/elasticsearch/cli/MockTerminal.java @@ -27,8 +27,6 @@ import java.nio.charset.StandardCharsets; import java.util.ArrayDeque; import java.util.Deque; -import org.elasticsearch.common.cli.Terminal; - /** * A terminal for tests which captures all output, and * can be plugged with fake input. From 13424318db53cf1790b6539b6ed0d70491808ec2 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 8 Mar 2016 14:16:39 -0800 Subject: [PATCH 108/320] Remove old help files --- .../bootstrap/elasticsearch-start.help | 28 ------------------- .../bootstrap/elasticsearch-version.help | 16 ----------- .../bootstrap/elasticsearch.help | 22 --------------- 3 files changed, 66 deletions(-) delete mode 100644 core/src/main/resources/org/elasticsearch/bootstrap/elasticsearch-start.help delete mode 100644 core/src/main/resources/org/elasticsearch/bootstrap/elasticsearch-version.help delete mode 100644 core/src/main/resources/org/elasticsearch/bootstrap/elasticsearch.help diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/elasticsearch-start.help b/core/src/main/resources/org/elasticsearch/bootstrap/elasticsearch-start.help deleted file mode 100644 index 9b27a8dd390b..000000000000 --- a/core/src/main/resources/org/elasticsearch/bootstrap/elasticsearch-start.help +++ /dev/null @@ -1,28 +0,0 @@ -NAME - - start - Start Elasticsearch - -SYNOPSIS - - elasticsearch start - -DESCRIPTION - - This command starts Elasticsearch. You can configure it to run in the foreground, write a pid file - and configure arbitrary options that override file-based configuration. - -OPTIONS - - -h,--help Shows this message - - -p,--pidfile Creates a pid file in the specified path on start - - -d,--daemonize Starts Elasticsearch in the background - - -Dproperty=value Configures an Elasticsearch specific property, like -Dnetwork.host=127.0.0.1 - - --property=value Configures an elasticsearch specific property, like --network.host 127.0.0.1 - --property value - - NOTE: The -d, -p, and -D arguments must appear before any --property arguments. - diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/elasticsearch-version.help b/core/src/main/resources/org/elasticsearch/bootstrap/elasticsearch-version.help deleted file mode 100644 index 00f2a33401ce..000000000000 --- a/core/src/main/resources/org/elasticsearch/bootstrap/elasticsearch-version.help +++ /dev/null @@ -1,16 +0,0 @@ -NAME - - version - Show version information and exit - -SYNOPSIS - - elasticsearch version - -DESCRIPTION - - This command shows Elasticsearch version, timestamp and build information as well as JVM info - -OPTIONS - - -h,--help Shows this message - diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/elasticsearch.help b/core/src/main/resources/org/elasticsearch/bootstrap/elasticsearch.help deleted file mode 100644 index 83ee497dc214..000000000000 --- a/core/src/main/resources/org/elasticsearch/bootstrap/elasticsearch.help +++ /dev/null @@ -1,22 +0,0 @@ -NAME - - elasticsearch - Manages elasticsearch - -SYNOPSIS - - elasticsearch - -DESCRIPTION - - Start an elasticsearch node - -COMMANDS - - start Start elasticsearch - - version Show version information and exit - -NOTES - - [*] For usage help on specific commands please type "elasticsearch -h" - From 73ebe36ed001e8202b342a66c1f6358b798ee727 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 8 Mar 2016 17:27:53 -0800 Subject: [PATCH 109/320] More tests --- .../elasticsearch/bootstrap/Bootstrap.java | 26 +++--- .../bootstrap/Elasticsearch.java | 11 +-- .../elasticsearch/cli/MultiCommandTests.java | 79 ++++++++++++++++++- .../elasticsearch/plugins/PluginCliTests.java | 79 ------------------- 4 files changed, 96 insertions(+), 99 deletions(-) delete mode 100644 core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 5008229f5f80..6cd2b4d80fe3 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -19,14 +19,22 @@ package org.elasticsearch.bootstrap; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintStream; +import java.nio.file.Path; +import java.util.Locale; +import java.util.concurrent.CountDownLatch; + import org.apache.lucene.util.Constants; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.StringHelper; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; +import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.Terminal; import org.elasticsearch.common.PidFile; import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.cli.Terminal; import org.elasticsearch.common.inject.CreationException; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.LogConfigurator; @@ -39,13 +47,6 @@ import org.elasticsearch.monitor.process.ProcessProbe; import org.elasticsearch.node.Node; import org.elasticsearch.node.internal.InternalSettingsPreparer; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.PrintStream; -import java.nio.file.Path; -import java.util.Locale; -import java.util.concurrent.CountDownLatch; - import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; /** @@ -217,10 +218,17 @@ final class Bootstrap { * This method is invoked by {@link Elasticsearch#main(String[])} * to startup elasticsearch. */ - static void init() throws Throwable { + static void init(String[] args) throws Throwable { // Set the system property before anything has a chance to trigger its use initLoggerPrefix(); + BootstrapCliParser parser = new BootstrapCliParser(); + int status = parser.main(args, Terminal.DEFAULT); + + if (parser.shouldRun() == false || status != ExitCodes.OK) { + exit(status); + } + INSTANCE = new Bootstrap(); boolean foreground = !"false".equals(System.getProperty("es.foreground", System.getProperty("es-foreground"))); diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 1d2a0b98232a..3b95c3f4a6ff 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -21,8 +21,6 @@ package org.elasticsearch.bootstrap; import java.io.IOException; -import org.elasticsearch.cli.Terminal; - /** * This class starts elasticsearch. */ @@ -35,15 +33,8 @@ public final class Elasticsearch { * Main entry point for starting elasticsearch */ public static void main(String[] args) throws Exception { - BootstrapCliParser parser = new BootstrapCliParser(); - parser.main(args, Terminal.DEFAULT); - - if (parser.shouldRun() == false) { - return; - } - try { - Bootstrap.init(); + Bootstrap.init(args); } catch (Throwable t) { // format exceptions to the console in a special way // to avoid 2MB stacktraces from guice, etc. diff --git a/core/src/test/java/org/elasticsearch/cli/MultiCommandTests.java b/core/src/test/java/org/elasticsearch/cli/MultiCommandTests.java index cdd7cb7e2412..4f91d3784401 100644 --- a/core/src/test/java/org/elasticsearch/cli/MultiCommandTests.java +++ b/core/src/test/java/org/elasticsearch/cli/MultiCommandTests.java @@ -19,10 +19,87 @@ package org.elasticsearch.cli; +import joptsimple.OptionSet; +import org.junit.Before; + public class MultiCommandTests extends CommandTestCase { + static class DummyMultiCommand extends MultiCommand { + DummyMultiCommand() { + super("A dummy multi command"); + } + } + + static class DummySubCommand extends Command { + DummySubCommand() { + super("A dummy subcommand"); + } + @Override + protected void execute(Terminal terminal, OptionSet options) throws Exception { + terminal.println("Arguments: " + options.nonOptionArguments().toString()); + } + } + + DummyMultiCommand multiCommand; + + @Before + public void setupCommand() { + multiCommand = new DummyMultiCommand(); + } + @Override protected Command newCommand() { - return null; + return multiCommand; + } + + public void testNoCommandsConfigured() throws Exception { + IllegalStateException e = expectThrows(IllegalStateException.class, () -> { + execute(); + }); + assertEquals("No subcommands configured", e.getMessage()); + } + + public void testUnknownCommand() throws Exception { + multiCommand.subcommands.put("something", new DummySubCommand()); + UserError e = expectThrows(UserError.class, () -> { + execute("somethingelse"); + }); + assertEquals(ExitCodes.USAGE, e.exitCode); + assertEquals("Unknown command [somethingelse]", e.getMessage()); + } + + public void testMissingCommand() throws Exception { + multiCommand.subcommands.put("command1", new DummySubCommand()); + UserError e = expectThrows(UserError.class, () -> { + execute(); + }); + assertEquals(ExitCodes.USAGE, e.exitCode); + assertEquals("Missing command", e.getMessage()); + } + + public void testHelp() throws Exception { + multiCommand.subcommands.put("command1", new DummySubCommand()); + multiCommand.subcommands.put("command2", new DummySubCommand()); + execute("-h"); + String output = terminal.getOutput(); + assertTrue(output, output.contains("command1")); + assertTrue(output, output.contains("command2")); + } + + public void testSubcommandHelp() throws Exception { + multiCommand.subcommands.put("command1", new DummySubCommand()); + multiCommand.subcommands.put("command2", new DummySubCommand()); + execute("command2", "-h"); + String output = terminal.getOutput(); + assertFalse(output, output.contains("command1")); + assertTrue(output, output.contains("A dummy subcommand")); + } + + public void testSubcommandArguments() throws Exception { + multiCommand.subcommands.put("command1", new DummySubCommand()); + execute("command1", "foo", "bar"); + String output = terminal.getOutput(); + assertFalse(output, output.contains("command1")); + assertTrue(output, output.contains("Arguments: [foo, bar]")); } } diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java deleted file mode 100644 index 73d979495717..000000000000 --- a/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plugins; - -import java.io.IOException; -import java.nio.file.Path; - -import org.elasticsearch.cli.Command; -import org.elasticsearch.cli.CommandTestCase; -import org.elasticsearch.common.cli.CliToolTestCase; -import org.elasticsearch.cli.MockTerminal; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.junit.Before; - -import static org.hamcrest.Matchers.hasItem; -import static org.hamcrest.Matchers.is; - -public class PluginCliTests extends CommandTestCase { - - // the home dir for each test to use - Path homeDir; - - // settings used to create an Environment for tools - Settings.Builder settingsBuilder; - - @Before - public void setupHome() { - homeDir = createTempDir(); - settingsBuilder = Settings.builder() - .put("path.home", homeDir); - } - - @Override - protected Command newCommand() { - return new PluginCli(new Environment(settingsBuilder.build())); - } - - public void testHelpWorks() throws Exception { - MockTerminal terminal = new MockTerminal(); - /* nocommit - assertThat(new PluginCli(terminal).execute(args("--help")), is(OK_AND_EXIT)); - assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/plugins/plugin.help"); - - terminal.resetOutput(); - assertThat(new PluginCli(terminal).execute(args("install -h")), is(OK_AND_EXIT)); - assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/plugins/plugin-install.help"); - for (String plugin : InstallPluginCommand.OFFICIAL_PLUGINS) { - assertThat(terminal.getOutput(), containsString(plugin)); - } - - terminal.resetOutput(); - assertThat(new PluginCli(terminal).execute(args("remove --help")), is(OK_AND_EXIT)); - assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/plugins/plugin-remove.help"); - - terminal.resetOutput(); - assertThat(new PluginCli(terminal).execute(args("list -h")), is(OK_AND_EXIT)); - assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/plugins/plugin-list.help"); - */ - } - -} From 6cfdf9f4404a82f9da1f9a75d191184659e117c8 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 8 Mar 2016 17:29:31 -0800 Subject: [PATCH 110/320] Remove old commons-cli dep --- core/build.gradle | 1 - 1 file changed, 1 deletion(-) diff --git a/core/build.gradle b/core/build.gradle index 226158ca094d..ab3754e72ff6 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -49,7 +49,6 @@ dependencies { compile 'org.elasticsearch:securesm:1.0' // utilities - compile 'commons-cli:commons-cli:1.3.1' // nocommit: remove the old! compile 'net.sf.jopt-simple:jopt-simple:4.9' compile 'com.carrotsearch:hppc:0.7.1' From cb607a8faee195af9737602be2dc01ac2288a397 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 8 Mar 2016 19:18:14 -0800 Subject: [PATCH 111/320] Remove commons-cli sha and add jopt-simple sha --- distribution/licenses/commons-cli-1.3.1.jar.sha1 | 1 - distribution/licenses/jopt-simple-4.9.jar.sha1 | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) delete mode 100644 distribution/licenses/commons-cli-1.3.1.jar.sha1 create mode 100644 distribution/licenses/jopt-simple-4.9.jar.sha1 diff --git a/distribution/licenses/commons-cli-1.3.1.jar.sha1 b/distribution/licenses/commons-cli-1.3.1.jar.sha1 deleted file mode 100644 index fc366d027f52..000000000000 --- a/distribution/licenses/commons-cli-1.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1303efbc4b181e5a58bf2e967dc156a3132b97c0 diff --git a/distribution/licenses/jopt-simple-4.9.jar.sha1 b/distribution/licenses/jopt-simple-4.9.jar.sha1 new file mode 100644 index 000000000000..b86fa62ac20c --- /dev/null +++ b/distribution/licenses/jopt-simple-4.9.jar.sha1 @@ -0,0 +1 @@ +ee9e9eaa0a35360dcfeac129ff4923215fd65904 \ No newline at end of file From 1dafead2ebd00de570863d8c4819f0267fdc656a Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 8 Mar 2016 22:55:24 -0800 Subject: [PATCH 112/320] Fix precommit --- .../elasticsearch/bootstrap/BootstrapCLIParser.java | 3 +++ modules/lang-groovy/build.gradle | 9 +++++++++ plugins/repository-hdfs/build.gradle | 10 ++++++++++ 3 files changed, 22 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java index e44e397f67a3..f812bda178cb 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java @@ -29,6 +29,7 @@ import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.UserError; import org.elasticsearch.common.Strings; import org.elasticsearch.cli.Terminal; +import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.monitor.jvm.JvmInfo; final class BootstrapCliParser extends Command { @@ -54,6 +55,8 @@ final class BootstrapCliParser extends Command { .withRequiredArg(); } + // TODO: don't use system properties as a way to do this, its horrible... + @SuppressForbidden(reason = "Sets system properties passed as CLI parameters") @Override protected void execute(Terminal terminal, OptionSet options) throws Exception { if (options.has(versionOption)) { diff --git a/modules/lang-groovy/build.gradle b/modules/lang-groovy/build.gradle index 89444a4e926a..005a7d4be183 100644 --- a/modules/lang-groovy/build.gradle +++ b/modules/lang-groovy/build.gradle @@ -38,6 +38,15 @@ thirdPartyAudit.excludes = [ // for example we do not need ivy, scripts arent allowed to download code 'com.thoughtworks.xstream.XStream', 'groovyjarjarasm.asm.util.Textifiable', + 'org.apache.commons.cli.CommandLine', + 'org.apache.commons.cli.CommandLineParser', + 'org.apache.commons.cli.GnuParser', + 'org.apache.commons.cli.HelpFormatter', + 'org.apache.commons.cli.Option', + 'org.apache.commons.cli.OptionBuilder', + 'org.apache.commons.cli.Options', + 'org.apache.commons.cli.Parser', + 'org.apache.commons.cli.PosixParser', 'org.apache.ivy.Ivy', 'org.apache.ivy.core.event.IvyListener', 'org.apache.ivy.core.event.download.PrepareDownloadEvent', diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 915a85ebdc47..20050bdc31e1 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -191,6 +191,16 @@ thirdPartyAudit.excludes = [ 'org.apache.commons.beanutils.DynaClass', 'org.apache.commons.beanutils.DynaProperty', 'org.apache.commons.beanutils.PropertyUtils', + 'org.apache.commons.cli.CommandLine', + 'org.apache.commons.cli.CommandLineParser', + 'org.apache.commons.cli.GnuParser', + 'org.apache.commons.cli.HelpFormatter', + 'org.apache.commons.cli.Option', + 'org.apache.commons.cli.OptionBuilder', + 'org.apache.commons.cli.OptionGroup', + 'org.apache.commons.cli.Options', + 'org.apache.commons.cli.ParseException', + 'org.apache.commons.cli.PosixParser', 'org.apache.commons.compress.archivers.tar.TarArchiveEntry', 'org.apache.commons.compress.archivers.tar.TarArchiveInputStream', 'org.apache.commons.codec.DecoderException', From d822c6558f5983f7b90ae57ca9cd1f341bd72e59 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 8 Mar 2016 23:17:35 -0800 Subject: [PATCH 113/320] Fix file rename to match class name --- .../{BootstrapCLIParser.java => BootstrapCliParser.java} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename core/src/main/java/org/elasticsearch/bootstrap/{BootstrapCLIParser.java => BootstrapCliParser.java} (100%) diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCliParser.java similarity index 100% rename from core/src/main/java/org/elasticsearch/bootstrap/BootstrapCLIParser.java rename to core/src/main/java/org/elasticsearch/bootstrap/BootstrapCliParser.java From 80ae2b0002eada4c357cf6fad0a174b0946da3da Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 9 Mar 2016 00:10:59 -0800 Subject: [PATCH 114/320] Fix more licenses --- .../bootstrap/BootstrapCliParser.java | 8 +++++-- distribution/licenses/jopt-simple-LICENSE.txt | 24 +++++++++++++++++++ distribution/licenses/jopt-simple-NOTICE.txt | 0 plugins/repository-hdfs/build.gradle | 11 +-------- .../licenses/commons-cli-1.2.jar.sha1 | 1 + .../licenses/commons-cli-LICENSE.txt | 0 .../licenses/commons-cli-NOTICE.txt | 0 7 files changed, 32 insertions(+), 12 deletions(-) create mode 100644 distribution/licenses/jopt-simple-LICENSE.txt create mode 100644 distribution/licenses/jopt-simple-NOTICE.txt create mode 100644 plugins/repository-hdfs/licenses/commons-cli-1.2.jar.sha1 rename {distribution => plugins/repository-hdfs}/licenses/commons-cli-LICENSE.txt (100%) rename {distribution => plugins/repository-hdfs}/licenses/commons-cli-NOTICE.txt (100%) diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCliParser.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCliParser.java index f812bda178cb..5c927305f149 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCliParser.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCliParser.java @@ -51,7 +51,7 @@ final class BootstrapCliParser extends Command { pidfileOption = parser.acceptsAll(Arrays.asList("p", "pidfile"), "Creates a pid file in the specified path on start") .withRequiredArg(); - propertyOption = parser.accepts("E", "Configures an Elasticsearch setting") + propertyOption = parser.accepts("D", "Configures an Elasticsearch setting") .withRequiredArg(); } @@ -80,7 +80,11 @@ final class BootstrapCliParser extends Command { if (keyValue.length != 2) { throw new UserError(ExitCodes.USAGE, "Malformed elasticsearch setting, must be of the form key=value"); } - System.setProperty("es." + keyValue[0], keyValue[1]); + String key = keyValue[0]; + if (key.startsWith("es.") == false) { + key = "es." + key; + } + System.setProperty(key, keyValue[1]); } shouldRun = true; } diff --git a/distribution/licenses/jopt-simple-LICENSE.txt b/distribution/licenses/jopt-simple-LICENSE.txt new file mode 100644 index 000000000000..85f923a95268 --- /dev/null +++ b/distribution/licenses/jopt-simple-LICENSE.txt @@ -0,0 +1,24 @@ +/* + The MIT License + + Copyright (c) 2004-2015 Paul R. Holser, Jr. + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ diff --git a/distribution/licenses/jopt-simple-NOTICE.txt b/distribution/licenses/jopt-simple-NOTICE.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 20050bdc31e1..8fc9e50d7f3f 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -45,6 +45,7 @@ dependencies { compile 'com.google.guava:guava:16.0.1' compile 'com.google.protobuf:protobuf-java:2.5.0' compile 'commons-logging:commons-logging:1.1.3' + compile 'commons-cli:commons-cli:1.2' compile 'commons-collections:commons-collections:3.2.2' compile 'commons-configuration:commons-configuration:1.6' compile 'commons-io:commons-io:2.4' @@ -191,16 +192,6 @@ thirdPartyAudit.excludes = [ 'org.apache.commons.beanutils.DynaClass', 'org.apache.commons.beanutils.DynaProperty', 'org.apache.commons.beanutils.PropertyUtils', - 'org.apache.commons.cli.CommandLine', - 'org.apache.commons.cli.CommandLineParser', - 'org.apache.commons.cli.GnuParser', - 'org.apache.commons.cli.HelpFormatter', - 'org.apache.commons.cli.Option', - 'org.apache.commons.cli.OptionBuilder', - 'org.apache.commons.cli.OptionGroup', - 'org.apache.commons.cli.Options', - 'org.apache.commons.cli.ParseException', - 'org.apache.commons.cli.PosixParser', 'org.apache.commons.compress.archivers.tar.TarArchiveEntry', 'org.apache.commons.compress.archivers.tar.TarArchiveInputStream', 'org.apache.commons.codec.DecoderException', diff --git a/plugins/repository-hdfs/licenses/commons-cli-1.2.jar.sha1 b/plugins/repository-hdfs/licenses/commons-cli-1.2.jar.sha1 new file mode 100644 index 000000000000..d38d00127e8c --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-cli-1.2.jar.sha1 @@ -0,0 +1 @@ +2bf96b7aa8b611c177d329452af1dc933e14501c \ No newline at end of file diff --git a/distribution/licenses/commons-cli-LICENSE.txt b/plugins/repository-hdfs/licenses/commons-cli-LICENSE.txt similarity index 100% rename from distribution/licenses/commons-cli-LICENSE.txt rename to plugins/repository-hdfs/licenses/commons-cli-LICENSE.txt diff --git a/distribution/licenses/commons-cli-NOTICE.txt b/plugins/repository-hdfs/licenses/commons-cli-NOTICE.txt similarity index 100% rename from distribution/licenses/commons-cli-NOTICE.txt rename to plugins/repository-hdfs/licenses/commons-cli-NOTICE.txt From 712043315d549fbbc7949f955b9685550f30d92f Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 9 Mar 2016 00:37:15 -0800 Subject: [PATCH 115/320] Use Setting objects for tribe node client passthrough, and add scripts path to passthrough --- .../org/elasticsearch/tribe/TribeService.java | 23 +++++++++++-------- .../tribe/TribeServiceTests.java | 2 ++ 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index b2eb1cd59f0d..2bd40539807f 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -160,13 +160,13 @@ public class TribeService extends AbstractLifecycleComponent { BLOCKS_METADATA_INDICES_SETTING.getKey(), BLOCKS_METADATA_SETTING.getKey(), BLOCKS_READ_INDICES_SETTING.getKey(), BLOCKS_WRITE_INDICES_SETTING.getKey(), BLOCKS_WRITE_SETTING.getKey()); // these settings should be passed through to each tribe client, if they are not set explicitly - private static final List PASS_THROUGH_SETTINGS = Arrays.asList( - NetworkService.GLOBAL_NETWORK_HOST_SETTING.getKey(), - NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING.getKey(), - NetworkService.GLOBAL_NETWORK_PUBLISHHOST_SETTING.getKey(), - TransportSettings.HOST.getKey(), - TransportSettings.BIND_HOST.getKey(), - TransportSettings.PUBLISH_HOST.getKey() + private static final List> PASS_THROUGH_SETTINGS = Arrays.asList( + NetworkService.GLOBAL_NETWORK_HOST_SETTING, + NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING, + NetworkService.GLOBAL_NETWORK_PUBLISHHOST_SETTING, + TransportSettings.HOST, + TransportSettings.BIND_HOST, + TransportSettings.PUBLISH_HOST ); private final String onConflict; private final Set droppedIndices = ConcurrentCollections.newConcurrentSet(); @@ -224,9 +224,12 @@ public class TribeService extends AbstractLifecycleComponent { if (Environment.PATH_LOGS_SETTING.exists(globalSettings)) { sb.put(Environment.PATH_LOGS_SETTING.getKey(), Environment.PATH_LOGS_SETTING.get(globalSettings)); } - for (String passthrough : PASS_THROUGH_SETTINGS) { - if (sb.get(passthrough) == null && globalSettings.get(passthrough) != null) { - sb.put(passthrough, globalSettings.get(passthrough)); + if (Environment.PATH_SCRIPTS_SETTING.exists(globalSettings)) { + sb.put(Environment.PATH_SCRIPTS_SETTING.getKey(), Environment.PATH_SCRIPTS_SETTING.get(globalSettings)); + } + for (Setting passthrough : PASS_THROUGH_SETTINGS) { + if (passthrough.exists(tribeSettings) == false && passthrough.exists(globalSettings)) { + sb.put(passthrough.getKey(), globalSettings.get(passthrough.getKey())); } } sb.put(TRIBE_NAME_SETTING.getKey(), tribeName); diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java b/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java index 877fd24a7ba6..5174a317a40e 100644 --- a/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java +++ b/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java @@ -42,11 +42,13 @@ public class TribeServiceTests extends ESTestCase { .put("path.home", "some/path") .put("path.conf", "conf/path") .put("path.plugins", "plugins/path") + .put("path.scripts", "scripts/path") .put("path.logs", "logs/path").build(); Settings clientSettings = TribeService.buildClientSettings("tribe1", globalSettings, Settings.EMPTY); assertEquals("some/path", clientSettings.get("path.home")); assertEquals("conf/path", clientSettings.get("path.conf")); assertEquals("plugins/path", clientSettings.get("path.plugins")); + assertEquals("scripts/path", clientSettings.get("path.scripts")); assertEquals("logs/path", clientSettings.get("path.logs")); Settings tribeSettings = Settings.builder() From 98249507cff3e363ecb4c5f06f14f0bb96da1ad5 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 9 Mar 2016 09:38:46 +0100 Subject: [PATCH 116/320] Add missing index name to indexing slow log This was lost in refactoring even on the 2.x branch. The slow-log is not per index not per shard anymore such that we don't add the shard ID as the logger prefix. This commit adds back the index name as part of the logging message not as a prefix on the logger for better testabilitly. Closes #17025 --- .../org/elasticsearch/index/IndexingSlowLog.java | 15 ++++++++++----- .../elasticsearch/index/IndexingSlowLogTests.java | 14 ++++++++++---- 2 files changed, 20 insertions(+), 9 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java index 5452daa7f077..75d3d60daad9 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java +++ b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java @@ -36,6 +36,7 @@ import java.util.concurrent.TimeUnit; /** */ public final class IndexingSlowLog implements IndexingOperationListener { + private final Index index; private boolean reformat; private long indexWarnThreshold; private long indexInfoThreshold; @@ -85,6 +86,7 @@ public final class IndexingSlowLog implements IndexingOperationListener { IndexingSlowLog(IndexSettings indexSettings, ESLogger indexLogger, ESLogger deleteLogger) { this.indexLogger = indexLogger; this.deleteLogger = deleteLogger; + this.index = indexSettings.getIndex(); indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING, this::setReformat); this.reformat = indexSettings.getValue(INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING); @@ -141,13 +143,13 @@ public final class IndexingSlowLog implements IndexingOperationListener { private void postIndexing(ParsedDocument doc, long tookInNanos) { if (indexWarnThreshold >= 0 && tookInNanos > indexWarnThreshold) { - indexLogger.warn("{}", new SlowLogParsedDocumentPrinter(doc, tookInNanos, reformat, maxSourceCharsToLog)); + indexLogger.warn("{}", new SlowLogParsedDocumentPrinter(index, doc, tookInNanos, reformat, maxSourceCharsToLog)); } else if (indexInfoThreshold >= 0 && tookInNanos > indexInfoThreshold) { - indexLogger.info("{}", new SlowLogParsedDocumentPrinter(doc, tookInNanos, reformat, maxSourceCharsToLog)); + indexLogger.info("{}", new SlowLogParsedDocumentPrinter(index, doc, tookInNanos, reformat, maxSourceCharsToLog)); } else if (indexDebugThreshold >= 0 && tookInNanos > indexDebugThreshold) { - indexLogger.debug("{}", new SlowLogParsedDocumentPrinter(doc, tookInNanos, reformat, maxSourceCharsToLog)); + indexLogger.debug("{}", new SlowLogParsedDocumentPrinter(index, doc, tookInNanos, reformat, maxSourceCharsToLog)); } else if (indexTraceThreshold >= 0 && tookInNanos > indexTraceThreshold) { - indexLogger.trace("{}", new SlowLogParsedDocumentPrinter(doc, tookInNanos, reformat, maxSourceCharsToLog)); + indexLogger.trace("{}", new SlowLogParsedDocumentPrinter(index, doc, tookInNanos, reformat, maxSourceCharsToLog)); } } @@ -156,9 +158,11 @@ public final class IndexingSlowLog implements IndexingOperationListener { private final long tookInNanos; private final boolean reformat; private final int maxSourceCharsToLog; + private final Index index; - SlowLogParsedDocumentPrinter(ParsedDocument doc, long tookInNanos, boolean reformat, int maxSourceCharsToLog) { + SlowLogParsedDocumentPrinter(Index index, ParsedDocument doc, long tookInNanos, boolean reformat, int maxSourceCharsToLog) { this.doc = doc; + this.index = index; this.tookInNanos = tookInNanos; this.reformat = reformat; this.maxSourceCharsToLog = maxSourceCharsToLog; @@ -167,6 +171,7 @@ public final class IndexingSlowLog implements IndexingOperationListener { @Override public String toString() { StringBuilder sb = new StringBuilder(); + sb.append(index).append(" "); sb.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], "); sb.append("type[").append(doc.type()).append("], "); sb.append("id[").append(doc.id()).append("], "); diff --git a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java index e36763665112..9e05122322a9 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java @@ -36,24 +36,30 @@ import java.io.IOException; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.startsWith; public class IndexingSlowLogTests extends ESTestCase { public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException { BytesReference source = JsonXContent.contentBuilder().startObject().field("foo", "bar").endObject().bytes(); ParsedDocument pd = new ParsedDocument(new StringField("uid", "test:id", Store.YES), new LegacyIntField("version", 1, Store.YES), "id", "test", null, 0, -1, null, source, null); - + Index index = new Index("foo", "123"); // Turning off document logging doesn't log source[] - SlowLogParsedDocumentPrinter p = new SlowLogParsedDocumentPrinter(pd, 10, true, 0); + SlowLogParsedDocumentPrinter p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, 0); assertThat(p.toString(), not(containsString("source["))); // Turning on document logging logs the whole thing - p = new SlowLogParsedDocumentPrinter(pd, 10, true, Integer.MAX_VALUE); + p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, Integer.MAX_VALUE); assertThat(p.toString(), containsString("source[{\"foo\":\"bar\"}]")); // And you can truncate the source - p = new SlowLogParsedDocumentPrinter(pd, 10, true, 3); + p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, 3); assertThat(p.toString(), containsString("source[{\"f]")); + + // And you can truncate the source + p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, 3); + assertThat(p.toString(), containsString("source[{\"f]")); + assertThat(p.toString(), startsWith("[foo/123] took")); } public void testReformatSetting() { From dedc45ea627b15d4191486267a0035ac2bb7a210 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 9 Mar 2016 01:03:27 -0800 Subject: [PATCH 117/320] Fix tribe integ test to not try to pass through path settings --- core/src/test/java/org/elasticsearch/tribe/TribeIT.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java index 55a79ffddfc8..7313d880a632 100644 --- a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java +++ b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java @@ -132,6 +132,9 @@ public class TribeIT extends ESIntegTestCase { Settings.Builder tribe1Defaults = Settings.builder(); Settings.Builder tribe2Defaults = Settings.builder(); for (Map.Entry entry : asMap.entrySet()) { + if (entry.getKey().startsWith("path.")) { + continue; + } tribe1Defaults.put("tribe.t1." + entry.getKey(), entry.getValue()); tribe2Defaults.put("tribe.t2." + entry.getKey(), entry.getValue()); } From 997fccde09fa405bd62766281393b7aac8962d7a Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 9 Mar 2016 10:06:09 +0100 Subject: [PATCH 118/320] Remove unused delete logger in IndexingSlowLog The delete logger is a leftover and has no usage in this class. --- .../java/org/elasticsearch/index/IndexingSlowLog.java | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java index 75d3d60daad9..a74838b3b619 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java +++ b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java @@ -52,7 +52,6 @@ public final class IndexingSlowLog implements IndexingOperationListener { private SlowLogLevel level; private final ESLogger indexLogger; - private final ESLogger deleteLogger; private static final String INDEX_INDEXING_SLOWLOG_PREFIX = "index.indexing.slowlog"; public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.warn", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); @@ -76,16 +75,14 @@ public final class IndexingSlowLog implements IndexingOperationListener { }, true, Setting.Scope.INDEX); IndexingSlowLog(IndexSettings indexSettings) { - this(indexSettings, Loggers.getLogger(INDEX_INDEXING_SLOWLOG_PREFIX + ".index"), - Loggers.getLogger(INDEX_INDEXING_SLOWLOG_PREFIX + ".delete")); + this(indexSettings, Loggers.getLogger(INDEX_INDEXING_SLOWLOG_PREFIX + ".index", indexSettings.getSettings())); } /** * Build with the specified loggers. Only used to testing. */ - IndexingSlowLog(IndexSettings indexSettings, ESLogger indexLogger, ESLogger deleteLogger) { + IndexingSlowLog(IndexSettings indexSettings, ESLogger indexLogger) { this.indexLogger = indexLogger; - this.deleteLogger = deleteLogger; this.index = indexSettings.getIndex(); indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING, this::setReformat); @@ -111,7 +108,6 @@ public final class IndexingSlowLog implements IndexingOperationListener { private void setLevel(SlowLogLevel level) { this.level = level; this.indexLogger.setLevel(level.name()); - this.deleteLogger.setLevel(level.name()); } private void setWarnThreshold(TimeValue warnThreshold) { From 716e7267f3d1a1df7915956ac4e2f38e2ed4b3b2 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 9 Mar 2016 10:09:23 +0100 Subject: [PATCH 119/320] Remove unused test-only constructor from IndexingSlowLog --- .../java/org/elasticsearch/index/IndexingSlowLog.java | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java index a74838b3b619..d6fa552b203f 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java +++ b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java @@ -75,14 +75,7 @@ public final class IndexingSlowLog implements IndexingOperationListener { }, true, Setting.Scope.INDEX); IndexingSlowLog(IndexSettings indexSettings) { - this(indexSettings, Loggers.getLogger(INDEX_INDEXING_SLOWLOG_PREFIX + ".index", indexSettings.getSettings())); - } - - /** - * Build with the specified loggers. Only used to testing. - */ - IndexingSlowLog(IndexSettings indexSettings, ESLogger indexLogger) { - this.indexLogger = indexLogger; + this.indexLogger = Loggers.getLogger(INDEX_INDEXING_SLOWLOG_PREFIX + ".index", indexSettings.getSettings()); this.index = indexSettings.getIndex(); indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING, this::setReformat); From 7b5b0d451159198706b98f0da4cd770c7a31be2a Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Tue, 26 Jan 2016 12:35:51 +0100 Subject: [PATCH 120/320] Move missing() from SortBuilder interface to class As mentioned by @cbuescher on #16151 this method is really implemented only in the FieldSortBuilder. Moving the method down. Relates to #15178 --- .../elasticsearch/search/sort/FieldSortBuilder.java | 1 - .../search/sort/GeoDistanceSortBuilder.java | 10 ---------- .../elasticsearch/search/sort/ScoreSortBuilder.java | 5 ----- .../elasticsearch/search/sort/ScriptSortBuilder.java | 8 -------- .../org/elasticsearch/search/sort/SortBuilder.java | 6 ------ 5 files changed, 30 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index 4f082b057da8..67ceb75a29cb 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -68,7 +68,6 @@ public class FieldSortBuilder extends SortBuilder { * Sets the value when a field is missing in a doc. Can also be set to _last or * _first to sort missing last or first respectively. */ - @Override public FieldSortBuilder missing(Object missing) { this.missing = missing; return this; diff --git a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index e37eed61c6dd..708152af1f04 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -218,16 +218,6 @@ public class GeoDistanceSortBuilder extends SortBuilder return this.order; } - /** - * Not relevant. - * - * TODO should this throw an exception rather than silently ignore a parameter that is not used? - */ - @Override - public GeoDistanceSortBuilder missing(Object missing) { - return this; - } - /** * Defines which distance to use for sorting in the case a document contains multiple geo points. * Possible values: min and max diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java index 7435ff95f452..e70a34a7c3b1 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java @@ -41,11 +41,6 @@ public class ScoreSortBuilder extends SortBuilder { return this; } - @Override - public SortBuilder missing(Object missing) { - return this; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject("_score"); diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index e9a9c8df57ce..d02e4dc520a8 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -62,14 +62,6 @@ public class ScriptSortBuilder extends SortBuilder { return this; } - /** - * Not really relevant. - */ - @Override - public SortBuilder missing(Object missing) { - return this; - } - /** * Defines which distance to use for sorting in the case a document contains multiple geo points. * Possible values: min and max diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/SortBuilder.java index da80506dde28..0935b76ece95 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortBuilder.java @@ -45,10 +45,4 @@ public abstract class SortBuilder implements ToXContent { * The order of sorting. Defaults to {@link SortOrder#ASC}. */ public abstract SortBuilder order(SortOrder order); - - /** - * Sets the value when a field is missing in a doc. Can also be set to _last or - * _first to sort missing last or first respectively. - */ - public abstract SortBuilder missing(Object missing); } From b9b5c15fe137a05646881f910eae6f701cd1c4d7 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 9 Mar 2016 11:39:20 +0100 Subject: [PATCH 121/320] test: ensure the each node sees 2 nodes. --- .../ingest/IngestProcessorNotInstalledOnAllNodesIT.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java index abfe18f8c584..a415b0992a7b 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java @@ -67,9 +67,11 @@ public class IngestProcessorNotInstalledOnAllNodesIT extends ESIntegTestCase { public void testFailPipelineCreation() throws Exception { installPlugin = true; - internalCluster().startNode(); + String node1 = internalCluster().startNode(); installPlugin = false; - internalCluster().startNode(); + String node2 = internalCluster().startNode(); + ensureStableCluster(2, node1); + ensureStableCluster(2, node2); try { client().admin().cluster().preparePutPipeline("_id", pipelineSource).get(); From f8ab6a6669c6c364ffff3141cee44208d9d8c09e Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 9 Mar 2016 11:43:18 +0100 Subject: [PATCH 122/320] [TEST] Make boost more prominent in test since with new default similarity it might score lower without the boost --- .../java/org/elasticsearch/search/query/MultiMatchQueryIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index e0bc26c9296c..23e2592447bf 100644 --- a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -567,7 +567,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { // test if boosts work searchResponse = client().prepareSearch("test") - .setQuery(randomizeType(multiMatchQuery("the ultimate", "full_name", "first_name", "last_name", "category").field("last_name", 2) + .setQuery(randomizeType(multiMatchQuery("the ultimate", "full_name", "first_name", "last_name", "category").field("last_name", 10) .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) .operator(Operator.AND))).get(); assertFirstHit(searchResponse, hasId("ultimate1")); // has ultimate in the last_name and that is boosted From 7a53a396e43341d8a8b552e17706d6e996ea7dd2 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 9 Mar 2016 12:10:47 +0100 Subject: [PATCH 123/320] Remove Unneded @Inject annotations --- .../index/analysis/IcuTransformTokenFilterFactory.java | 2 -- .../elasticsearch/index/analysis/KuromojiAnalyzerProvider.java | 2 -- .../index/analysis/KuromojiBaseFormFilterFactory.java | 2 -- .../index/analysis/KuromojiKatakanaStemmerFactory.java | 2 -- .../index/analysis/KuromojiReadingFormFilterFactory.java | 2 -- .../index/analysis/PhoneticTokenFilterFactory.java | 2 -- .../index/analysis/SmartChineseAnalyzerProvider.java | 2 -- 7 files changed, 14 deletions(-) diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTransformTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTransformTokenFilterFactory.java index 6ecdf3888e9f..f145ad4ae305 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTransformTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTransformTokenFilterFactory.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.analysis; import com.ibm.icu.text.Transliterator; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.icu.ICUTransformFilter; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -36,7 +35,6 @@ public class IcuTransformTokenFilterFactory extends AbstractTokenFilterFactory { private final int dir; private final Transliterator transliterator; - @Inject public IcuTransformTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); this.id = settings.get("id", "Null"); diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiAnalyzerProvider.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiAnalyzerProvider.java index 8aa8ff3c1ddc..21d9b8040550 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiAnalyzerProvider.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiAnalyzerProvider.java @@ -23,7 +23,6 @@ import org.apache.lucene.analysis.ja.JapaneseAnalyzer; import org.apache.lucene.analysis.ja.JapaneseTokenizer; import org.apache.lucene.analysis.ja.dict.UserDictionary; import org.apache.lucene.analysis.util.CharArraySet; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -36,7 +35,6 @@ public class KuromojiAnalyzerProvider extends AbstractIndexAnalyzerProvider stopWords = Analysis.parseStopWords(env, settings, JapaneseAnalyzer.getDefaultStopSet()); diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiBaseFormFilterFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiBaseFormFilterFactory.java index e191d78198f4..aa035d9edfdf 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiBaseFormFilterFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiBaseFormFilterFactory.java @@ -21,14 +21,12 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ja.JapaneseBaseFormFilter; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; public class KuromojiBaseFormFilterFactory extends AbstractTokenFilterFactory { - @Inject public KuromojiBaseFormFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); } diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiKatakanaStemmerFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiKatakanaStemmerFactory.java index ebebdcb6bba4..491f48e34c1a 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiKatakanaStemmerFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiKatakanaStemmerFactory.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ja.JapaneseKatakanaStemFilter; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -30,7 +29,6 @@ public class KuromojiKatakanaStemmerFactory extends AbstractTokenFilterFactory { private final int minimumLength; - @Inject public KuromojiKatakanaStemmerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); minimumLength = settings.getAsInt("minimum_length", JapaneseKatakanaStemFilter.DEFAULT_MINIMUM_LENGTH); diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiReadingFormFilterFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiReadingFormFilterFactory.java index 59d1088fd1b3..d0eb0cecdb93 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiReadingFormFilterFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiReadingFormFilterFactory.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ja.JapaneseReadingFormFilter; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -30,7 +29,6 @@ public class KuromojiReadingFormFilterFactory extends AbstractTokenFilterFactory private final boolean useRomaji; - @Inject public KuromojiReadingFormFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); useRomaji = settings.getAsBoolean("use_romaji", false); diff --git a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java index e33f1f1e7e25..75da19c0a3cc 100644 --- a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java +++ b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java @@ -38,7 +38,6 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.phonetic.BeiderMorseFilter; import org.apache.lucene.analysis.phonetic.DoubleMetaphoneFilter; import org.apache.lucene.analysis.phonetic.PhoneticFilter; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -58,7 +57,6 @@ public class PhoneticTokenFilterFactory extends AbstractTokenFilterFactory { private NameType nametype; private RuleType ruletype; - @Inject public PhoneticTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); this.languageset = null; diff --git a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseAnalyzerProvider.java b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseAnalyzerProvider.java index 22fcf238725f..591912b8fa38 100644 --- a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseAnalyzerProvider.java +++ b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseAnalyzerProvider.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.cn.smart.SmartChineseAnalyzer; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -31,7 +30,6 @@ public class SmartChineseAnalyzerProvider extends AbstractIndexAnalyzerProvider< private final SmartChineseAnalyzer analyzer; - @Inject public SmartChineseAnalyzerProvider(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); From 11b18a996378bd83065fdc835c7dee9921c44aa0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 8 Mar 2016 15:06:10 +0100 Subject: [PATCH 124/320] Sort: Make ScoreSortBuilder implement NamedWriteable and add fromXContent parsing This change makes ScoreSortBuilder implement NamedWriteable, adds equals() and hashCode() and also implements parsing ScoreSortBuilder back from xContent. This is needed for the ongoing Search refactoring. --- .../search/sort/ScoreSortBuilder.java | 93 +++++++++++++++++-- .../search/sort/SortElementParserTemp.java | 3 +- .../search/sort/AbstractSortTestCase.java | 21 ++--- .../sort/GeoDistanceSortBuilderTests.java | 72 +++++++------- .../search/sort/ScoreSortBuilderTests.java | 82 ++++++++++++++++ 5 files changed, 213 insertions(+), 58 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/search/sort/ScoreSortBuilderTests.java diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java index e70a34a7c3b1..5d1a0d82987c 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java @@ -19,35 +19,116 @@ package org.elasticsearch.search.sort; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryParseContext; import java.io.IOException; +import java.util.Objects; /** * A sort builder allowing to sort by score. - * - * */ -public class ScoreSortBuilder extends SortBuilder { +public class ScoreSortBuilder extends SortBuilder implements NamedWriteable, + SortElementParserTemp { - private SortOrder order; + private static final String NAME = "_score"; + static final ScoreSortBuilder PROTOTYPE = new ScoreSortBuilder(); + public static final ParseField REVERSE_FIELD = new ParseField("reverse"); + public static final ParseField ORDER_FIELD = new ParseField("order"); + private SortOrder order = SortOrder.DESC; /** * The order of sort scoring. By default, its {@link SortOrder#DESC}. */ @Override public ScoreSortBuilder order(SortOrder order) { + Objects.requireNonNull(order, "sort order cannot be null."); this.order = order; return this; } + /** + * Get the order of sort scoring. By default, its {@link SortOrder#DESC}. + */ + public SortOrder order() { + return this.order; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject("_score"); + builder.startObject(NAME); if (order == SortOrder.ASC) { - builder.field("reverse", true); + builder.field(REVERSE_FIELD.getPreferredName(), true); } builder.endObject(); return builder; } + + @Override + public ScoreSortBuilder fromXContent(QueryParseContext context, String elementName) throws IOException { + XContentParser parser = context.parser(); + ParseFieldMatcher matcher = context.parseFieldMatcher(); + + XContentParser.Token token; + String currentName = parser.currentName(); + ScoreSortBuilder result = new ScoreSortBuilder(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentName = parser.currentName(); + } else if (token.isValue()) { + if (matcher.match(currentName, REVERSE_FIELD)) { + if (parser.booleanValue()) { + result.order(SortOrder.ASC); + } + // else we keep the default DESC + } else if (matcher.match(currentName, ORDER_FIELD)) { + result.order(SortOrder.fromString(parser.text())); + } else { + throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] failed to parse field [" + currentName + "]"); + } + } else { + throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] unexpected token [" + token + "]"); + } + } + return result; + } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + if (object == null || getClass() != object.getClass()) { + return false; + } + ScoreSortBuilder other = (ScoreSortBuilder) object; + return Objects.equals(order, other.order); + } + + @Override + public int hashCode() { + return Objects.hash(this.order); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + order.writeTo(out); + } + + @Override + public ScoreSortBuilder readFrom(StreamInput in) throws IOException { + return new ScoreSortBuilder().order(SortOrder.readOrderFrom(in)); + } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortElementParserTemp.java b/core/src/main/java/org/elasticsearch/search/sort/SortElementParserTemp.java index 8893471b6c1a..069f1380b49c 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortElementParserTemp.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortElementParserTemp.java @@ -19,13 +19,12 @@ package org.elasticsearch.search.sort; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.index.query.QueryParseContext; import java.io.IOException; // TODO once sort refactoring is done this needs to be merged into SortBuilder -public interface SortElementParserTemp { +public interface SortElementParserTemp { /** * Creates a new SortBuilder from the json held by the {@link SortElementParserTemp} * in {@link org.elasticsearch.common.xcontent.XContent} format diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index dfea1a9316bc..dc61f0ef34c4 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -43,7 +43,7 @@ import java.io.IOException; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; -public abstract class AbstractSortTestCase & ToXContent & SortElementParserTemp> extends ESTestCase { +public abstract class AbstractSortTestCase & SortElementParserTemp> extends ESTestCase { protected static NamedWriteableRegistry namedWriteableRegistry; @@ -53,7 +53,8 @@ public abstract class AbstractSortTestCase & ToXCont @BeforeClass public static void init() { namedWriteableRegistry = new NamedWriteableRegistry(); - namedWriteableRegistry.registerPrototype(GeoDistanceSortBuilder.class, GeoDistanceSortBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(SortBuilder.class, GeoDistanceSortBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(SortBuilder.class, ScoreSortBuilder.PROTOTYPE); indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).buildQueryParserRegistry(); } @@ -85,9 +86,9 @@ public abstract class AbstractSortTestCase & ToXCont XContentParser itemParser = XContentHelper.createParser(builder.bytes()); itemParser.nextToken(); - + /* - * filter out name of sort, or field name to sort on for element fieldSort + * filter out name of sort, or field name to sort on for element fieldSort */ itemParser.nextToken(); String elementName = itemParser.currentName(); @@ -95,7 +96,7 @@ public abstract class AbstractSortTestCase & ToXCont QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); context.reset(itemParser); - NamedWriteable parsedItem = testItem.fromXContent(context, elementName); + SortBuilder parsedItem = testItem.fromXContent(context, elementName); assertNotSame(testItem, parsedItem); assertEquals(testItem, parsedItem); assertEquals(testItem.hashCode(), parsedItem.hashCode()); @@ -146,17 +147,15 @@ public abstract class AbstractSortTestCase & ToXCont } } + @SuppressWarnings("unchecked") protected T copyItem(T original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { - @SuppressWarnings("unchecked") - T prototype = (T) namedWriteableRegistry.getPrototype(getPrototype(), original.getWriteableName()); - T copy = (T) prototype.readFrom(in); - return copy; + T prototype = (T) namedWriteableRegistry.getPrototype(SortBuilder.class, + original.getWriteableName()); + return prototype.readFrom(in); } } } - - protected abstract Class getPrototype(); } diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index e957db58b388..611053b14d5d 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -60,7 +60,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase getPrototype() { - return (Class) GeoDistanceSortBuilder.PROTOTYPE.getClass(); - } - public void testSortModeSumIsRejectedInSetter() { GeoDistanceSortBuilder builder = new GeoDistanceSortBuilder("testname", -1, -1); GeoPoint point = RandomGeoGenerator.randomPoint(getRandom()); @@ -189,23 +183,23 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase { + + @Override + protected ScoreSortBuilder createTestItem() { + return new ScoreSortBuilder().order(randomBoolean() ? SortOrder.ASC : SortOrder.DESC); + } + + @Override + protected ScoreSortBuilder mutate(ScoreSortBuilder original) throws IOException { + ScoreSortBuilder result = new ScoreSortBuilder(); + if (original.order() == SortOrder.ASC) { + result.order(SortOrder.DESC); + } else { + result.order(SortOrder.ASC); + } + return result; + } + + @Rule + public ExpectedException exceptionRule = ExpectedException.none(); + + /** + * test passing null to {@link ScoreSortBuilder#order(SortOrder)} is illegal + */ + public void testIllegalOrder() { + exceptionRule.expect(NullPointerException.class); + exceptionRule.expectMessage("sort order cannot be null."); + new ScoreSortBuilder().order(null); + } + + /** + * test parsing order parameter if specified as `order` field in the json + * instead of the `reverse` field that we render in toXContent + */ + public void testParseOrder() throws IOException { + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); + context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY)); + SortOrder order = randomBoolean() ? SortOrder.ASC : SortOrder.DESC; + String scoreSortString = "{ \"_score\": { \"order\": \""+ order.toString() +"\" }}"; + XContentParser parser = XContentFactory.xContent(scoreSortString).createParser(scoreSortString); + // need to skip until parser is located on second START_OBJECT + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + + context.reset(parser); + ScoreSortBuilder scoreSort = ScoreSortBuilder.PROTOTYPE.fromXContent(context, "_score"); + assertEquals(order, scoreSort.order()); + } +} From 06929f8ed42b74cf490533448a90741a4a8e1bd8 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Wed, 9 Mar 2016 15:32:54 +0100 Subject: [PATCH 125/320] Merge pull request #17030 from 36degrees/patch-1 Fix typo in clear cache documentation --- docs/reference/indices/clearcache.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/indices/clearcache.asciidoc b/docs/reference/indices/clearcache.asciidoc index 21008e5b46b7..8ebb9e3488a1 100644 --- a/docs/reference/indices/clearcache.asciidoc +++ b/docs/reference/indices/clearcache.asciidoc @@ -2,7 +2,7 @@ == Clear Cache The clear cache API allows to clear either all caches or specific cached -associated with one ore more indices. +associated with one or more indices. [source,js] -------------------------------------------------- From e411cbb0600ad119dacfbd214bba994f4f2aa4c0 Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Tue, 1 Mar 2016 18:10:46 -0500 Subject: [PATCH 126/320] Fixes the DiscoveryWithServiceDisruptionsIT#testIndicesDeleted test In particular, this test ensures we don't restart the master node until we know the index deletion has taken effect on master and the master eligible nodes. Closes #16917 Closes #16890 --- .../DiscoveryWithServiceDisruptionsIT.java | 34 ++++++++++++++----- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index b9d7107ed548..3948a4bab903 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -177,13 +177,17 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { } private void configureUnicastCluster(int numberOfNodes, @Nullable int[] unicastHostsOrdinals, int minimumMasterNode) throws ExecutionException, InterruptedException { + configureUnicastCluster(DEFAULT_SETTINGS, numberOfNodes, unicastHostsOrdinals, minimumMasterNode); + } + + private void configureUnicastCluster(Settings settings, int numberOfNodes, @Nullable int[] unicastHostsOrdinals, int minimumMasterNode) throws ExecutionException, InterruptedException { if (minimumMasterNode < 0) { minimumMasterNode = numberOfNodes / 2 + 1; } logger.info("---> configured unicast"); // TODO: Rarely use default settings form some of these Settings nodeSettings = Settings.builder() - .put(DEFAULT_SETTINGS) + .put(settings) .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minimumMasterNode) .build(); @@ -196,7 +200,6 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { } } - /** * Test that no split brain occurs under partial network partition. See https://github.com/elastic/elasticsearch/issues/2488 */ @@ -1075,25 +1078,40 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { * Tests that indices are properly deleted even if there is a master transition in between. * Test for https://github.com/elastic/elasticsearch/issues/11665 */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/16890") public void testIndicesDeleted() throws Exception { - configureUnicastCluster(3, null, 2); + final Settings settings = Settings.builder() + .put(DEFAULT_SETTINGS) + .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "0s") // don't wait on isolated data node + .put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "30s") // wait till cluster state is committed + .build(); + final String idxName = "test"; + configureUnicastCluster(settings, 3, null, 2); InternalTestCluster.Async> masterNodes = internalCluster().startMasterOnlyNodesAsync(2); InternalTestCluster.Async dataNode = internalCluster().startDataOnlyNodeAsync(); dataNode.get(); - masterNodes.get(); + final List allMasterEligibleNodes = masterNodes.get(); ensureStableCluster(3); assertAcked(prepareCreate("test")); ensureYellow(); - String masterNode1 = internalCluster().getMasterName(); + final String masterNode1 = internalCluster().getMasterName(); NetworkPartition networkPartition = new NetworkUnresponsivePartition(masterNode1, dataNode.get(), getRandom()); internalCluster().setDisruptionScheme(networkPartition); networkPartition.startDisrupting(); - internalCluster().client(masterNode1).admin().indices().prepareDelete("test").setTimeout("1s").get(); + // We know this will time out due to the partition, we check manually below to not proceed until + // the delete has been applied to the master node and the master eligible node. + internalCluster().client(masterNode1).admin().indices().prepareDelete(idxName).setTimeout("0s").get(); + // Don't restart the master node until we know the index deletion has taken effect on master and the master eligible node. + assertBusy(() -> { + for (String masterNode : allMasterEligibleNodes) { + final ClusterState masterState = internalCluster().clusterService(masterNode).state(); + assertTrue("index not deleted on " + masterNode, masterState.metaData().hasIndex(idxName) == false && + masterState.status() == ClusterState.ClusterStateStatus.APPLIED); + } + }); internalCluster().restartNode(masterNode1, InternalTestCluster.EMPTY_CALLBACK); ensureYellow(); - assertFalse(client().admin().indices().prepareExists("test").get().isExists()); + assertFalse(client().admin().indices().prepareExists(idxName).get().isExists()); } protected NetworkPartition addRandomPartition() { From d09ee3f174666620007f804fa5985e0e6ba5879c Mon Sep 17 00:00:00 2001 From: Nicholas Knize Date: Fri, 8 Jan 2016 22:32:41 -0600 Subject: [PATCH 127/320] Remove .geohash suffix from GeoDistanceQuery and GeoDistanceRangeQuery Occasionally the .geohash suffix in Geo{Distance|DistanceRange}Query would conflict with a mapping that defines a sub-field by the same name. This occurs often with nested and multi-fields a mapping defines a geo_point sub-field using the field name "geohash". Since the QueryParser already handles parsing geohash encoded geopoints without requiring the ".geohash" suffix, the suffix parsing can be removed altogether. This commit removes the .geohash suffix parsing, adds explicit test coverage for the nested query use-case, and adds random distance queries to the nested query test suite. --- .../mapper/geo/BaseGeoPointFieldMapper.java | 1 - .../index/query/GeoDistanceQueryParser.java | 3 -- .../query/GeoDistanceRangeQueryParser.java | 9 ------ .../index/query/AbstractQueryTestCase.java | 3 +- .../query/GeoDistanceRangeQueryTests.java | 32 +++++++++++++++++++ .../query/GeohashCellQueryBuilderTests.java | 2 +- .../index/query/NestedQueryBuilderTests.java | 1 + 7 files changed, 36 insertions(+), 15 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java index 5e617dd6815d..f72533d30cf9 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java @@ -65,7 +65,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr public static final String LON = "lon"; public static final String LON_SUFFIX = "." + LON; public static final String GEOHASH = "geohash"; - public static final String GEOHASH_SUFFIX = "." + GEOHASH; public static final String IGNORE_MALFORMED = "ignore_malformed"; } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryParser.java index c35a31f8d84b..3828f786903a 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryParser.java @@ -120,9 +120,6 @@ public class GeoDistanceQueryParser implements QueryParser> protected static final String DATE_FIELD_NAME = "mapped_date"; protected static final String OBJECT_FIELD_NAME = "mapped_object"; protected static final String GEO_POINT_FIELD_NAME = "mapped_geo_point"; + protected static final String GEO_POINT_FIELD_MAPPING = "type=geo_point,lat_lon=true,geohash=true,geohash_prefix=true"; protected static final String GEO_SHAPE_FIELD_NAME = "mapped_geo_shape"; protected static final String[] MAPPED_FIELD_NAMES = new String[] { STRING_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, GEO_SHAPE_FIELD_NAME }; @@ -300,7 +301,7 @@ public abstract class AbstractQueryTestCase> BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object", - GEO_POINT_FIELD_NAME, "type=geo_point,lat_lon=true,geohash=true,geohash_prefix=true", + GEO_POINT_FIELD_NAME, GEO_POINT_FIELD_MAPPING, GEO_SHAPE_FIELD_NAME, "type=geo_shape" ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); // also add mappings for two inner field in the object field diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java index f07e695a1a07..cb0c374c5c08 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java @@ -24,10 +24,12 @@ import org.apache.lucene.spatial.geopoint.search.GeoPointDistanceRangeQuery; import org.apache.lucene.spatial.util.GeoDistanceUtils; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.Version; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.unit.DistanceUnit; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery; import org.elasticsearch.test.geo.RandomGeoGenerator; @@ -296,6 +298,36 @@ public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase assertThat(query, instanceOf(TermQuery.class)); TermQuery termQuery = (TermQuery) query; Term term = termQuery.getTerm(); - assertThat(term.field(), equalTo(queryBuilder.fieldName() + GeoPointFieldMapper.Names.GEOHASH_SUFFIX)); + assertThat(term.field(), equalTo(queryBuilder.fieldName() + "." + GeoPointFieldMapper.Names.GEOHASH)); String geohash = queryBuilder.geohash(); if (queryBuilder.precision() != null) { int len = Math.min(queryBuilder.precision(), geohash.length()); diff --git a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java index 0f7e2e67e382..beef2df15d4d 100644 --- a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java @@ -52,6 +52,7 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase Date: Wed, 9 Mar 2016 19:43:56 +0300 Subject: [PATCH 128/320] Don't return all indices immediately if count of expressions >1 and first expression is * #17027 --- .../metadata/IndexNameExpressionResolver.java | 2 +- .../metadata/WildcardExpressionResolverTests.java | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index cca633a7651f..9bd4ba6112bb 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -686,7 +686,7 @@ public class IndexNameExpressionResolver extends AbstractComponent { } private boolean isEmptyOrTrivialWildcard(List expressions) { - return expressions.isEmpty() || (expressions.size() == 1 && (MetaData.ALL.equals(expressions.get(0))) || Regex.isMatchAllPattern(expressions.get(0))); + return expressions.isEmpty() || (expressions.size() == 1 && (MetaData.ALL.equals(expressions.get(0)) || Regex.isMatchAllPattern(expressions.get(0)))); } private List resolveEmptyOrTrivialWildcard(IndicesOptions options, MetaData metaData, boolean assertEmpty) { diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java index d9cf9f0d7909..744477d6722e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java @@ -47,6 +47,8 @@ public class WildcardExpressionResolverTests extends ESTestCase { assertThat(newHashSet(resolver.resolve(context, Arrays.asList("test*"))), equalTo(newHashSet("testXXX", "testXYY", "testYYY"))); assertThat(newHashSet(resolver.resolve(context, Arrays.asList("testX*"))), equalTo(newHashSet("testXXX", "testXYY"))); assertThat(newHashSet(resolver.resolve(context, Arrays.asList("testX*", "kuku"))), equalTo(newHashSet("testXXX", "testXYY", "kuku"))); + assertThat(newHashSet(resolver.resolve(context, Arrays.asList("*"))), equalTo(newHashSet("testXXX", "testXYY", "testYYY", "kuku"))); + assertThat(newHashSet(resolver.resolve(context, Arrays.asList("*", "-kuku"))), equalTo(newHashSet("testXXX", "testXYY", "testYYY"))); } public void testConvertWildcardsTests() { @@ -107,6 +109,18 @@ public class WildcardExpressionResolverTests extends ESTestCase { assertThat(newHashSet(resolver.resolve(context, Arrays.asList("*Y*X"))).size(), equalTo(0)); } + public void testAll() { + MetaData.Builder mdBuilder = MetaData.builder() + .put(indexBuilder("testXXX")) + .put(indexBuilder("testXYY")) + .put(indexBuilder("testYYY")); + ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); + IndexNameExpressionResolver.WildcardExpressionResolver resolver = new IndexNameExpressionResolver.WildcardExpressionResolver(); + + IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.lenientExpandOpen()); + assertThat(newHashSet(resolver.resolve(context, Arrays.asList("_all"))), equalTo(newHashSet("testXXX", "testXYY", "testYYY"))); + } + private IndexMetaData.Builder indexBuilder(String index) { return IndexMetaData.builder(index).settings(settings(Version.CURRENT).put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); } From 5bb72dbcd247637ed823bd3f89a9c54e88fe7645 Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Sat, 5 Mar 2016 00:33:36 -0500 Subject: [PATCH 129/320] construct suggestion context from query context --- .../common/io/stream/StreamInput.java | 7 - .../common/io/stream/StreamOutput.java | 6 - .../completion/CompletionSuggestParser.java | 60 ++---- .../CompletionSuggestionBuilder.java | 191 +++++++++++------- .../CompletionSuggestionContext.java | 24 ++- .../suggest/completion/FuzzyOptions.java | 23 +++ .../suggest/completion/RegexOptions.java | 31 +++ .../context/CategoryContextMapping.java | 28 +-- .../context/CategoryQueryContext.java | 24 --- .../completion/context/ContextMapping.java | 46 ++++- .../completion/context/ContextMappings.java | 8 +- .../completion/context/GeoContextMapping.java | 40 ++-- .../completion/context/GeoQueryContext.java | 35 ---- .../completion/context/QueryContext.java | 3 +- .../AbstractSuggestionBuilderTestCase.java | 53 +++-- .../ContextCompletionSuggestSearchIT.java | 77 ++++--- .../CategoryContextMappingTests.java | 68 +++---- .../completion/CategoryQueryContextTests.java | 19 -- .../CompletionSuggesterBuilderTests.java | 109 +++++++--- .../completion/GeoContextMappingTests.java | 72 +++---- .../completion/GeoQueryContextTests.java | 29 --- .../completion/QueryContextTestCase.java | 15 +- 22 files changed, 515 insertions(+), 453 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index ce459662009b..cd2b6fb79c74 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -716,13 +716,6 @@ public abstract class StreamInput extends InputStream { return readNamedWriteable(SuggestionBuilder.class); } - /** - * Reads a completion {@link QueryContext} from the current stream - */ - public QueryContext readCompletionSuggestionQueryContext() throws IOException { - return readNamedWriteable(QueryContext.class); - } - /** * Reads a {@link org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder} from the current stream */ diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index 95ec0fec2927..a4385dce192d 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -740,10 +740,4 @@ public abstract class StreamOutput extends OutputStream { writeNamedWriteable(suggestion); } - /** - * Writes a completion {@link QueryContext} to the current stream - */ - public void writeCompletionSuggestionQueryContext(QueryContext queryContext) throws IOException { - writeNamedWriteable(queryContext); - } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java index 04f63042d492..e5b70db6999e 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -34,14 +33,8 @@ import org.elasticsearch.index.query.RegexpFlag; import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestUtils.Fields; import org.elasticsearch.search.suggest.SuggestionSearchContext; -import org.elasticsearch.search.suggest.completion.context.ContextMapping; -import org.elasticsearch.search.suggest.completion.context.ContextMappings; import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; /** * Parses query options for {@link CompletionSuggester} @@ -74,27 +67,20 @@ import java.util.Map; public class CompletionSuggestParser implements SuggestContextParser { private static ObjectParser TLP_PARSER = new ObjectParser<>(CompletionSuggestionBuilder.SUGGESTION_NAME, null); - private static ObjectParser REGEXP_PARSER = new ObjectParser<>(RegexOptions.REGEX_OPTIONS.getPreferredName(), RegexOptions.Builder::new); - private static ObjectParser FUZZY_PARSER = new ObjectParser<>(FuzzyOptions.FUZZY_OPTIONS.getPreferredName(), FuzzyOptions.Builder::new); static { - FUZZY_PARSER.declareInt(FuzzyOptions.Builder::setFuzzyMinLength, FuzzyOptions.MIN_LENGTH_FIELD); - FUZZY_PARSER.declareInt(FuzzyOptions.Builder::setMaxDeterminizedStates, FuzzyOptions.MAX_DETERMINIZED_STATES_FIELD); - FUZZY_PARSER.declareBoolean(FuzzyOptions.Builder::setUnicodeAware, FuzzyOptions.UNICODE_AWARE_FIELD); - FUZZY_PARSER.declareInt(FuzzyOptions.Builder::setFuzzyPrefixLength, FuzzyOptions.PREFIX_LENGTH_FIELD); - FUZZY_PARSER.declareBoolean(FuzzyOptions.Builder::setTranspositions, FuzzyOptions.TRANSPOSITION_FIELD); - FUZZY_PARSER.declareValue((a, b) -> { - try { - a.setFuzziness(Fuzziness.parse(b).asDistance()); - } catch (IOException e) { - throw new ElasticsearchException(e); - } - }, Fuzziness.FIELD); - REGEXP_PARSER.declareInt(RegexOptions.Builder::setMaxDeterminizedStates, RegexOptions.MAX_DETERMINIZED_STATES); - REGEXP_PARSER.declareStringOrNull(RegexOptions.Builder::setFlags, RegexOptions.FLAGS_VALUE); - TLP_PARSER.declareStringArray(CompletionSuggestionContext::setPayloadFields, CompletionSuggestionBuilder.PAYLOAD_FIELD); - TLP_PARSER.declareObjectOrDefault(CompletionSuggestionContext::setFuzzyOptionsBuilder, FUZZY_PARSER, FuzzyOptions.Builder::new, FuzzyOptions.FUZZY_OPTIONS); - TLP_PARSER.declareObject(CompletionSuggestionContext::setRegexOptionsBuilder, REGEXP_PARSER, RegexOptions.REGEX_OPTIONS); + TLP_PARSER.declareField((parser, completionSuggestionContext, context) -> { + if (parser.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { + if (parser.booleanValue()) { + completionSuggestionContext.setFuzzyOptions(new FuzzyOptions.Builder().build()); + } + } else { + completionSuggestionContext.setFuzzyOptions(FuzzyOptions.parse(parser)); + } + }, + FuzzyOptions.FUZZY_OPTIONS, ObjectParser.ValueType.OBJECT_OR_BOOLEAN); + TLP_PARSER.declareField((parser, completionSuggestionContext, context) -> completionSuggestionContext.setRegexOptions(RegexOptions.parse(parser)), + RegexOptions.REGEX_OPTIONS, ObjectParser.ValueType.OBJECT); TLP_PARSER.declareString(SuggestionSearchContext.SuggestionContext::setField, Fields.FIELD); TLP_PARSER.declareField((p, v, c) -> { String analyzerName = p.text(); @@ -132,7 +118,7 @@ public class CompletionSuggestParser implements SuggestContextParser { } @Override - public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, QueryShardContext shardContext) throws IOException { + public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, QueryShardContext shardContext) throws IOException { MapperService mapperService = shardContext.getMapperService(); final CompletionSuggestionContext suggestion = new CompletionSuggestionContext(shardContext); final ContextAndSuggest contextAndSuggest = new ContextAndSuggest(mapperService); @@ -146,28 +132,12 @@ public class CompletionSuggestParser implements SuggestContextParser { if (type.hasContextMappings() == false && contextParser != null) { throw new IllegalArgumentException("suggester [" + type.name() + "] doesn't expect any context"); } - Map> queryContexts = Collections.emptyMap(); - if (type.hasContextMappings() && contextParser != null) { - ContextMappings contextMappings = type.getContextMappings(); - contextParser.nextToken(); - queryContexts = new HashMap<>(contextMappings.size()); - assert contextParser.currentToken() == XContentParser.Token.START_OBJECT; - XContentParser.Token currentToken; - String currentFieldName; - while ((currentToken = contextParser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (currentToken == XContentParser.Token.FIELD_NAME) { - currentFieldName = contextParser.currentName(); - final ContextMapping mapping = contextMappings.get(currentFieldName); - queryContexts.put(currentFieldName, mapping.parseQueryContext(contextParser)); - } - } - contextParser.close(); - } + suggestion.setQueryContexts(CompletionSuggestionBuilder.parseQueryContexts(contextParser, type)); suggestion.setFieldType(type); - suggestion.setQueryContexts(queryContexts); return suggestion; } else { throw new IllegalArgumentException("Field [" + suggestion.getField() + "] is not a completion suggest field"); } } + } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index f2540e4862cf..38242a29ae4b 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -18,29 +18,37 @@ */ package org.elasticsearch.search.suggest.completion; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.search.suggest.SuggestUtils; import org.elasticsearch.search.suggest.SuggestionBuilder; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; -import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; -import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; +import org.elasticsearch.search.suggest.completion.context.ContextMapping; +import org.elasticsearch.search.suggest.completion.context.ContextMappings; import org.elasticsearch.search.suggest.completion.context.QueryContext; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Set; /** * Defines a suggest command based on a prefix, typically to provide "auto-complete" functionality @@ -55,10 +63,40 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder TLP_PARSER = + new ObjectParser<>(CompletionSuggestionBuilder.SUGGESTION_NAME, null); + static { + TLP_PARSER.declareStringArray(CompletionSuggestionBuilder::payload, CompletionSuggestionBuilder.PAYLOAD_FIELD); + TLP_PARSER.declareField((parser, completionSuggestionContext, context) -> { + if (parser.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { + if (parser.booleanValue()) { + completionSuggestionContext.fuzzyOptions = new FuzzyOptions.Builder().build(); + } + } else { + completionSuggestionContext.fuzzyOptions = FuzzyOptions.parse(parser); + } + }, + FuzzyOptions.FUZZY_OPTIONS, ObjectParser.ValueType.OBJECT_OR_BOOLEAN); + TLP_PARSER.declareField((parser, completionSuggestionContext, context) -> + completionSuggestionContext.regexOptions = RegexOptions.parse(parser), + RegexOptions.REGEX_OPTIONS, ObjectParser.ValueType.OBJECT); + TLP_PARSER.declareString(CompletionSuggestionBuilder::field, SuggestUtils.Fields.FIELD); + TLP_PARSER.declareString(CompletionSuggestionBuilder::analyzer, SuggestUtils.Fields.ANALYZER); + TLP_PARSER.declareInt(CompletionSuggestionBuilder::size, SuggestUtils.Fields.SIZE); + TLP_PARSER.declareInt(CompletionSuggestionBuilder::shardSize, SuggestUtils.Fields.SHARD_SIZE); + TLP_PARSER.declareField((p, v, c) -> { + // Copy the current structure. We will parse, once the mapping is provided + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + builder.copyCurrentStructure(p); + v.contextBytes = builder.bytes(); + p.skipChildren(); + }, CompletionSuggestionBuilder.CONTEXTS_FIELD, ObjectParser.ValueType.OBJECT); // context is deprecated + } + private FuzzyOptions fuzzyOptions; private RegexOptions regexOptions; - private final Map> queryContexts = new HashMap<>(); - private final Set payloadFields = new HashSet<>(); + private BytesReference contextBytes = null; + private List payloadFields = Collections.emptyList(); public CompletionSuggestionBuilder(String fieldname) { super(fieldname); @@ -117,36 +155,33 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder fields) { - this.payloadFields.addAll(fields); + this.payloadFields = fields; return this; } /** - * Sets query contexts for a category context - * @param name of the category context to execute on - * @param queryContexts a list of {@link CategoryQueryContext} + * Sets query contexts for completion + * @param queryContexts named query contexts + * see {@link org.elasticsearch.search.suggest.completion.context.CategoryQueryContext} + * and {@link org.elasticsearch.search.suggest.completion.context.GeoQueryContext} */ - public CompletionSuggestionBuilder categoryContexts(String name, CategoryQueryContext... queryContexts) { - return contexts(name, queryContexts); - } - - /** - * Sets query contexts for a geo context - * @param name of the geo context to execute on - * @param queryContexts a list of {@link GeoQueryContext} - */ - public CompletionSuggestionBuilder geoContexts(String name, GeoQueryContext... queryContexts) { - return contexts(name, queryContexts); - } - - private CompletionSuggestionBuilder contexts(String name, QueryContext... queryContexts) { - List contexts = this.queryContexts.get(name); - if (contexts == null) { - contexts = new ArrayList<>(2); - this.queryContexts.put(name, contexts); + public CompletionSuggestionBuilder contexts(Map> queryContexts) { + try { + XContentBuilder contentBuilder = XContentFactory.jsonBuilder(); + contentBuilder.startObject(); + for (Map.Entry> contextEntry : queryContexts.entrySet()) { + contentBuilder.startArray(contextEntry.getKey()); + for (ToXContent queryContext : contextEntry.getValue()) { + queryContext.toXContent(contentBuilder, EMPTY_PARAMS); + } + contentBuilder.endArray(); + } + contentBuilder.endObject(); + contextBytes = contentBuilder.bytes(); + return this; + } catch (IOException e) { + throw new IllegalArgumentException(e); } - Collections.addAll(contexts, queryContexts); - return this; } @Override @@ -164,33 +199,44 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder> entry : this.queryContexts.entrySet()) { - builder.startArray(entry.getKey()); - for (ToXContent queryContext : entry.getValue()) { - queryContext.toXContent(builder, params); - } - builder.endArray(); - } - builder.endObject(); + if (contextBytes != null) { + XContentParser contextParser = XContentFactory.xContent(XContentType.JSON).createParser(contextBytes); + builder.field(CONTEXTS_FIELD.getPreferredName()); + builder.copyCurrentStructure(contextParser); } return builder; } @Override protected CompletionSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException { - // NORELEASE implement parsing logic - throw new UnsupportedOperationException(); + CompletionSuggestionBuilder builder = new CompletionSuggestionBuilder(); + TLP_PARSER.parse(parseContext.parser(), builder); + return builder; } @Override protected SuggestionContext innerBuild(QueryShardContext context) throws IOException { CompletionSuggestionContext suggestionContext = new CompletionSuggestionContext(context); // copy over common settings to each suggestion builder - populateCommonFields(context.getMapperService(), suggestionContext); - // NORELEASE - // still need to populate CompletionSuggestionContext's specific settings + final MapperService mapperService = context.getMapperService(); + populateCommonFields(mapperService, suggestionContext); + suggestionContext.setPayloadFields(payloadFields); + suggestionContext.setFuzzyOptions(fuzzyOptions); + suggestionContext.setRegexOptions(regexOptions); + MappedFieldType mappedFieldType = mapperService.fullName(suggestionContext.getField()); + if (mappedFieldType == null) { + throw new ElasticsearchException("Field [" + suggestionContext.getField() + "] is not a completion suggest field"); + } else if (mappedFieldType instanceof CompletionFieldMapper.CompletionFieldType) { + CompletionFieldMapper.CompletionFieldType type = (CompletionFieldMapper.CompletionFieldType) mappedFieldType; + if (type.hasContextMappings() && contextBytes != null) { + XContentParser contextParser = XContentFactory.xContent(contextBytes).createParser(contextBytes); + suggestionContext.setQueryContexts(parseQueryContexts(contextParser, type)); + } else if (contextBytes != null) { + throw new IllegalArgumentException("suggester [" + type.name() + "] doesn't expect any context"); + } + } else { + throw new IllegalArgumentException("Field [" + suggestionContext.getField() + "] is not a completion suggest field"); + } return suggestionContext; } @@ -217,18 +263,10 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder> namedQueryContexts : queryContexts.entrySet()) { - out.writeString(namedQueryContexts.getKey()); - List queryContexts = namedQueryContexts.getValue(); - out.writeVInt(queryContexts.size()); - for (QueryContext queryContext : queryContexts) { - out.writeCompletionSuggestionQueryContext(queryContext); - } - } + out.writeBytesReference(contextBytes); } } @@ -237,9 +275,11 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder payloadFields = new ArrayList<>(numPayloadField); for (int i = 0; i < numPayloadField; i++) { - completionSuggestionBuilder.payloadFields.add(in.readString()); + payloadFields.add(in.readString()); } + completionSuggestionBuilder.payloadFields = payloadFields; } if (in.readBoolean()) { completionSuggestionBuilder.fuzzyOptions = FuzzyOptions.readFuzzyOptions(in); @@ -248,30 +288,43 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder queryContexts = new ArrayList<>(numQueryContexts); - for (int j = 0; j < numQueryContexts; j++) { - queryContexts.add(in.readCompletionSuggestionQueryContext()); - } - completionSuggestionBuilder.queryContexts.put(queryContextName, queryContexts); - } + completionSuggestionBuilder.contextBytes = in.readBytesReference(); } return completionSuggestionBuilder; } @Override protected boolean doEquals(CompletionSuggestionBuilder other) { - return Objects.equals(payloadFields, other.payloadFields) && + return Objects.equals(payloadFields, other.payloadFields) && Objects.equals(fuzzyOptions, other.fuzzyOptions) && Objects.equals(regexOptions, other.regexOptions) && - Objects.equals(queryContexts, other.queryContexts); + Objects.equals(contextBytes, other.contextBytes); } @Override protected int doHashCode() { - return Objects.hash(payloadFields, fuzzyOptions, regexOptions, queryContexts); + return Objects.hash(payloadFields, fuzzyOptions, regexOptions, contextBytes); + } + + static Map> parseQueryContexts( + XContentParser contextParser, CompletionFieldMapper.CompletionFieldType type) throws IOException { + Map> queryContexts = Collections.emptyMap(); + if (type.hasContextMappings() && contextParser != null) { + ContextMappings contextMappings = type.getContextMappings(); + contextParser.nextToken(); + queryContexts = new HashMap<>(contextMappings.size()); + assert contextParser.currentToken() == XContentParser.Token.START_OBJECT; + XContentParser.Token currentToken; + String currentFieldName; + while ((currentToken = contextParser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (currentToken == XContentParser.Token.FIELD_NAME) { + currentFieldName = contextParser.currentName(); + final ContextMapping mapping = contextMappings.get(currentFieldName); + queryContexts.put(currentFieldName, mapping.parseQueryContext(contextParser)); + } + } + contextParser.close(); + } + return queryContexts; } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java index b20b9a5aeef2..c6814551174f 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java @@ -44,7 +44,7 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest private CompletionFieldMapper.CompletionFieldType fieldType; private FuzzyOptions fuzzyOptions; private RegexOptions regexOptions; - private Map> queryContexts = Collections.emptyMap(); + private Map> queryContexts = Collections.emptyMap(); private Set payloadFields = Collections.emptySet(); CompletionFieldMapper.CompletionFieldType getFieldType() { @@ -55,15 +55,15 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest this.fieldType = fieldType; } - void setRegexOptionsBuilder(RegexOptions.Builder regexOptionsBuilder) { - this.regexOptions = regexOptionsBuilder.build(); + void setRegexOptions(RegexOptions regexOptions) { + this.regexOptions = regexOptions; } - void setFuzzyOptionsBuilder(FuzzyOptions.Builder fuzzyOptionsBuilder) { - this.fuzzyOptions = fuzzyOptionsBuilder.build(); + void setFuzzyOptions(FuzzyOptions fuzzyOptions) { + this.fuzzyOptions = fuzzyOptions; } - void setQueryContexts(Map> queryContexts) { + void setQueryContexts(Map> queryContexts) { this.queryContexts = queryContexts; } @@ -79,6 +79,18 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest return payloadFields; } + public FuzzyOptions getFuzzyOptions() { + return fuzzyOptions; + } + + public RegexOptions getRegexOptions() { + return regexOptions; + } + + public Map> getQueryContexts() { + return queryContexts; + } + CompletionQuery toQuery() { CompletionFieldMapper.CompletionFieldType fieldType = getFieldType(); final CompletionQuery query; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java index 317ac049d6b6..aac58d7cb368 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java @@ -21,13 +21,16 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; import org.apache.lucene.util.automaton.Operations; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.Objects; @@ -43,6 +46,22 @@ public class FuzzyOptions implements ToXContent, Writeable { static final ParseField UNICODE_AWARE_FIELD = new ParseField("unicode_aware"); static final ParseField MAX_DETERMINIZED_STATES_FIELD = new ParseField("max_determinized_states"); + static ObjectParser FUZZY_PARSER = new ObjectParser<>(FUZZY_OPTIONS.getPreferredName(), Builder::new); + static { + FUZZY_PARSER.declareInt(FuzzyOptions.Builder::setFuzzyMinLength, MIN_LENGTH_FIELD); + FUZZY_PARSER.declareInt(FuzzyOptions.Builder::setMaxDeterminizedStates, MAX_DETERMINIZED_STATES_FIELD); + FUZZY_PARSER.declareBoolean(FuzzyOptions.Builder::setUnicodeAware, UNICODE_AWARE_FIELD); + FUZZY_PARSER.declareInt(FuzzyOptions.Builder::setFuzzyPrefixLength, PREFIX_LENGTH_FIELD); + FUZZY_PARSER.declareBoolean(FuzzyOptions.Builder::setTranspositions, TRANSPOSITION_FIELD); + FUZZY_PARSER.declareValue((a, b) -> { + try { + a.setFuzziness(Fuzziness.parse(b).asDistance()); + } catch (IOException e) { + throw new ElasticsearchException(e); + } + }, Fuzziness.FIELD); + } + private int editDistance; private boolean transpositions; private int fuzzyMinLength; @@ -63,6 +82,10 @@ public class FuzzyOptions implements ToXContent, Writeable { private FuzzyOptions() { } + public static FuzzyOptions parse(XContentParser parser) throws IOException { + return FUZZY_PARSER.parse(parser).build(); + } + public static Builder builder() { return new Builder(); } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java index fc183cdb1c1b..a1dcec2d5557 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java @@ -21,12 +21,15 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.RegexpFlag; import java.io.IOException; @@ -39,6 +42,25 @@ public class RegexOptions implements ToXContent, Writeable { static final ParseField REGEX_OPTIONS = new ParseField(NAME); static final ParseField FLAGS_VALUE = new ParseField("flags", "flags_value"); static final ParseField MAX_DETERMINIZED_STATES = new ParseField("max_determinized_states"); + + + private static ObjectParser REGEXP_PARSER = + new ObjectParser<>(REGEX_OPTIONS.getPreferredName(), RegexOptions.Builder::new); + static { + REGEXP_PARSER.declareInt(RegexOptions.Builder::setMaxDeterminizedStates, MAX_DETERMINIZED_STATES); + REGEXP_PARSER.declareField((parser, builder, aVoid) -> { + if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { + builder.setFlags(parser.text()); + } else if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) { + builder.setFlagsValue(parser.intValue()); + } else { + throw new ElasticsearchParseException(REGEX_OPTIONS.getPreferredName() + + " " + FLAGS_VALUE.getPreferredName() + " supports string or number"); + } + }, FLAGS_VALUE, ObjectParser.ValueType.VALUE); + REGEXP_PARSER.declareStringOrNull(RegexOptions.Builder::setFlags, FLAGS_VALUE); + } + private int flagsValue; private int maxDeterminizedStates; @@ -69,6 +91,10 @@ public class RegexOptions implements ToXContent, Writeable { return new Builder(); } + public static RegexOptions parse(XContentParser parser) throws IOException { + return REGEXP_PARSER.parse(parser).build(); + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -135,6 +161,11 @@ public class RegexOptions implements ToXContent, Writeable { return this; } + private Builder setFlagsValue(int flagsValue) { + this.flagsValue = flagsValue; + return this; + } + /** * Sets the maximum automaton states allowed for the regular expression expansion */ diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java index 10ac3935cc29..c9cb165aef74 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java @@ -36,6 +36,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.stream.Collectors; /** * A {@link ContextMapping} that uses a simple string as a criteria @@ -44,7 +45,7 @@ import java.util.Set; * {@link CategoryQueryContext} defines options for constructing * a unit of query context for this context type */ -public class CategoryContextMapping extends ContextMapping { +public class CategoryContextMapping extends ContextMapping { private static final String FIELD_FIELDNAME = "path"; @@ -137,6 +138,11 @@ public class CategoryContextMapping extends ContextMapping { return (values == null) ? Collections.emptySet() : values; } + @Override + protected CategoryQueryContext prototype() { + return CategoryQueryContext.PROTOTYPE; + } + /** * Parse a list of {@link CategoryQueryContext} * using parser. A QueryContexts accepts one of the following forms: @@ -154,19 +160,13 @@ public class CategoryContextMapping extends ContextMapping { * */ @Override - public List parseQueryContext(XContentParser parser) throws IOException, ElasticsearchParseException { - List queryContexts = new ArrayList<>(); - Token token = parser.nextToken(); - if (token == Token.START_OBJECT || token == Token.VALUE_STRING) { - CategoryQueryContext parse = CategoryQueryContext.PROTOTYPE.fromXContext(parser); - queryContexts.add(new QueryContext(parse.getCategory(), parse.getBoost(), parse.isPrefix())); - } else if (token == Token.START_ARRAY) { - while (parser.nextToken() != Token.END_ARRAY) { - CategoryQueryContext parse = CategoryQueryContext.PROTOTYPE.fromXContext(parser); - queryContexts.add(new QueryContext(parse.getCategory(), parse.getBoost(), parse.isPrefix())); - } - } - return queryContexts; + public List toInternalQueryContexts(List queryContexts) { + List internalInternalQueryContexts = new ArrayList<>(queryContexts.size()); + internalInternalQueryContexts.addAll( + queryContexts.stream() + .map(queryContext -> new InternalQueryContext(queryContext.getCategory(), queryContext.getBoost(), queryContext.isPrefix())) + .collect(Collectors.toList())); + return internalInternalQueryContexts; } @Override diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java index 8db9afe5ae35..a164faff8b1d 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java @@ -21,14 +21,11 @@ package org.elasticsearch.search.suggest.completion.context; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; -import java.util.Collections; import java.util.Objects; import static org.elasticsearch.search.suggest.completion.context.CategoryContextMapping.CONTEXT_BOOST; @@ -98,11 +95,6 @@ public final class CategoryQueryContext implements QueryContext { return result; } - @Override - public String getWriteableName() { - return NAME; - } - private static ObjectParser CATEGORY_PARSER = new ObjectParser<>(NAME, null); static { CATEGORY_PARSER.declareString(Builder::setCategory, new ParseField(CONTEXT_VALUE)); @@ -134,22 +126,6 @@ public final class CategoryQueryContext implements QueryContext { return builder; } - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeBoolean(isPrefix); - out.writeVInt(boost); - out.writeString(category); - } - - @Override - public QueryContext readFrom(StreamInput in) throws IOException { - Builder builder = new Builder(); - builder.isPrefix = in.readBoolean(); - builder.boost = in.readVInt(); - builder.category = in.readString(); - return builder.build(); - } - public static class Builder { private String category; private boolean isPrefix = false; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java index 42e5cc0a1577..501f4d153d12 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java @@ -23,11 +23,13 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.Set; @@ -38,7 +40,7 @@ import java.util.Set; * * Implementations have to define how contexts are parsed at query/index time */ -public abstract class ContextMapping implements ToXContent { +public abstract class ContextMapping implements ToXContent { public static final String FIELD_TYPE = "type"; public static final String FIELD_NAME = "name"; @@ -94,10 +96,25 @@ public abstract class ContextMapping implements ToXContent { */ protected abstract Set parseContext(ParseContext.Document document); + protected abstract T prototype(); + /** * Parses query contexts for this mapper */ - public abstract List parseQueryContext(XContentParser parser) throws IOException, ElasticsearchParseException; + public List parseQueryContext(XContentParser parser) throws IOException, ElasticsearchParseException { + List queryContexts = new ArrayList<>(); + Token token = parser.nextToken(); + if (token == Token.START_OBJECT || token == Token.VALUE_STRING) { + queryContexts.add((T) prototype().fromXContext(parser)); + } else if (token == Token.START_ARRAY) { + while (parser.nextToken() != Token.END_ARRAY) { + queryContexts.add((T) prototype().fromXContext(parser)); + } + } + return toInternalQueryContexts(queryContexts); + } + + protected abstract List toInternalQueryContexts(List queryContexts); /** * Implementations should add specific configurations @@ -136,17 +153,38 @@ public abstract class ContextMapping implements ToXContent { } } - public static class QueryContext { + public static class InternalQueryContext { public final String context; public final int boost; public final boolean isPrefix; - public QueryContext(String context, int boost, boolean isPrefix) { + public InternalQueryContext(String context, int boost, boolean isPrefix) { this.context = context; this.boost = boost; this.isPrefix = isPrefix; } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + InternalQueryContext that = (InternalQueryContext) o; + + if (boost != that.boost) return false; + if (isPrefix != that.isPrefix) return false; + return context != null ? context.equals(that.context) : that.context == null; + + } + + @Override + public int hashCode() { + int result = context != null ? context.hashCode() : 0; + result = 31 * result + boost; + result = 31 * result + (isPrefix ? 1 : 0); + return result; + } + @Override public String toString() { return "QueryContext{" + diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java index ccd4b2d58480..ff550a8d34e6 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java @@ -152,7 +152,7 @@ public class ContextMappings implements ToXContent { * @param queryContexts a map of context mapping name and collected query contexts * @return a context-enabled query */ - public ContextQuery toContextQuery(CompletionQuery query, Map> queryContexts) { + public ContextQuery toContextQuery(CompletionQuery query, Map> queryContexts) { ContextQuery typedContextQuery = new ContextQuery(query); if (queryContexts.isEmpty() == false) { CharsRefBuilder scratch = new CharsRefBuilder(); @@ -161,9 +161,9 @@ public class ContextMappings implements ToXContent { scratch.setCharAt(0, (char) typeId); scratch.setLength(1); ContextMapping mapping = contextMappings.get(typeId); - List queryContext = queryContexts.get(mapping.name()); - if (queryContext != null) { - for (ContextMapping.QueryContext context : queryContext) { + List internalQueryContext = queryContexts.get(mapping.name()); + if (internalQueryContext != null) { + for (ContextMapping.InternalQueryContext context : internalQueryContext) { scratch.append(context.context); typedContextQuery.addContext(scratch.toCharsRef(), context.boost, !context.isPrefix); scratch.setLength(1); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java index 870ec3bd122b..41d78e753533 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java @@ -42,6 +42,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.stream.Collectors; import static org.apache.lucene.spatial.util.GeoHashUtils.addNeighbors; import static org.apache.lucene.spatial.util.GeoHashUtils.stringEncode; @@ -56,7 +57,7 @@ import static org.apache.lucene.spatial.util.GeoHashUtils.stringEncode; * {@link GeoQueryContext} defines the options for constructing * a unit of query context for this context type */ -public class GeoContextMapping extends ContextMapping { +public class GeoContextMapping extends ContextMapping { public static final String FIELD_PRECISION = "precision"; public static final String FIELD_FIELDNAME = "path"; @@ -221,6 +222,11 @@ public class GeoContextMapping extends ContextMapping { return locations; } + @Override + protected GeoQueryContext prototype() { + return GeoQueryContext.PROTOTYPE; + } + /** * Parse a list of {@link GeoQueryContext} * using parser. A QueryContexts accepts one of the following forms: @@ -245,17 +251,8 @@ public class GeoContextMapping extends ContextMapping { * see {@link GeoUtils#parseGeoPoint(String, GeoPoint)} for GEO POINT */ @Override - public List parseQueryContext(XContentParser parser) throws IOException, ElasticsearchParseException { - List queryContexts = new ArrayList<>(); - Token token = parser.nextToken(); - if (token == Token.START_OBJECT || token == Token.VALUE_STRING) { - queryContexts.add(GeoQueryContext.PROTOTYPE.fromXContext(parser)); - } else if (token == Token.START_ARRAY) { - while (parser.nextToken() != Token.END_ARRAY) { - queryContexts.add(GeoQueryContext.PROTOTYPE.fromXContext(parser)); - } - } - List queryContextList = new ArrayList<>(); + public List toInternalQueryContexts(List queryContexts) { + List internalQueryContextList = new ArrayList<>(); for (GeoQueryContext queryContext : queryContexts) { int minPrecision = Math.min(this.precision, queryContext.getPrecision()); GeoPoint point = queryContext.getGeoPoint(); @@ -265,19 +262,20 @@ public class GeoContextMapping extends ContextMapping { if (queryContext.getNeighbours().isEmpty() && geoHash.length() == this.precision) { addNeighbors(geoHash, locations); } else if (queryContext.getNeighbours().isEmpty() == false) { - for (Integer neighbourPrecision : queryContext.getNeighbours()) { - if (neighbourPrecision < geoHash.length()) { + queryContext.getNeighbours().stream() + .filter(neighbourPrecision -> neighbourPrecision < geoHash.length()) + .forEach(neighbourPrecision -> { String truncatedGeoHash = geoHash.substring(0, neighbourPrecision); locations.add(truncatedGeoHash); addNeighbors(truncatedGeoHash, locations); - } - } - } - for (String location : locations) { - queryContextList.add(new QueryContext(location, queryContext.getBoost(), location.length() < this.precision)); + }); } + internalQueryContextList.addAll( + locations.stream() + .map(location -> new InternalQueryContext(location, queryContext.getBoost(), location.length() < this.precision)) + .collect(Collectors.toList())); } - return queryContextList; + return internalQueryContextList; } @Override @@ -301,7 +299,7 @@ public class GeoContextMapping extends ContextMapping { private int precision = DEFAULT_PRECISION; private String fieldName = null; - protected Builder(String name) { + public Builder(String name) { super(name); } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoQueryContext.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoQueryContext.java index 5b406abc1d41..913702c18d02 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoQueryContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoQueryContext.java @@ -23,14 +23,11 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; -import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Objects; @@ -114,11 +111,6 @@ public final class GeoQueryContext implements QueryContext { return new Builder(); } - @Override - public String getWriteableName() { - return NAME; - } - private static ObjectParser GEO_CONTEXT_PARSER = new ObjectParser<>(NAME, null); static { GEO_CONTEXT_PARSER.declareField((parser, geoQueryContext, geoContextMapping) -> geoQueryContext.setGeoPoint(GeoUtils.parseGeoPoint(parser)), new ParseField(CONTEXT_VALUE), ObjectParser.ValueType.OBJECT); @@ -159,33 +151,6 @@ public final class GeoQueryContext implements QueryContext { return builder; } - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeGeoPoint(geoPoint); - out.writeVInt(boost); - out.writeInt(precision); - out.writeVInt(neighbours.size()); - for (Integer neighbour : neighbours) { - out.writeVInt(neighbour); - } - } - - @Override - public QueryContext readFrom(StreamInput in) throws IOException { - Builder builder = new Builder(); - builder.geoPoint = in.readGeoPoint(); - builder.boost = in.readVInt(); - builder.precision = in.readInt(); - int nNeighbour = in.readVInt(); - if (nNeighbour != 0) { - builder.neighbours = new ArrayList<>(nNeighbour); - for (int i = 0; i < nNeighbour; i++) { - builder.neighbours.add(in.readVInt()); - } - } - return builder.build(); - } - public static class Builder { private GeoPoint geoPoint; private int boost = 1; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/QueryContext.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/QueryContext.java index ccfd4a8d3dac..9d96bf814475 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/QueryContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/QueryContext.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.suggest.completion.context; -import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentParser; @@ -28,7 +27,7 @@ import java.io.IOException; /** * Interface for serializing/de-serializing completion query context */ -public interface QueryContext extends ToXContent, NamedWriteable { +public interface QueryContext extends ToXContent { QueryContext fromXContext(XContentParser parser) throws IOException; } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index f31b57d851f4..2c310b04870b 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.suggest; import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -160,7 +161,8 @@ public abstract class AbstractSuggestionBuilderTestCase mockMapperServiceAndSuggestionBuilder( + IndexSettings idxSettings, AnalysisService mockAnalysisService, SB suggestBuilder) { + final MapperService mapperService = new MapperService(idxSettings, mockAnalysisService, null, + new IndicesModule().getMapperRegistry(), null) { + @Override + public MappedFieldType fullName(String fullName) { + StringFieldType type = new StringFieldType(); + if (randomBoolean()) { + type.setSearchAnalyzer(new NamedAnalyzer("foo", new WhitespaceAnalyzer())); + } + return type; + } + }; + return new Tuple<>(mapperService, suggestBuilder); + } + /** * parses random suggestion builder via old parseElement method and via * build, comparing the results for equality @@ -226,30 +244,21 @@ public abstract class AbstractSuggestionBuilderTestCase mapperServiceSBTuple = + mockMapperServiceAndSuggestionBuilder(idxSettings, mockAnalysisService, suggestionBuilder); + suggestionBuilder = mapperServiceSBTuple.v2(); + QueryShardContext mockShardContext = new QueryShardContext(idxSettings, + null, null, mapperServiceSBTuple.v1(), null, scriptService, null) { + @Override + public MappedFieldType fieldMapper(String name) { + StringFieldMapper.Builder builder = new StringFieldMapper.Builder(name); + return builder.build(new Mapper.BuilderContext(idxSettings.getSettings(), new ContentPath(1))).fieldType(); + } + }; + mockShardContext.setMapUnmappedFieldAsString(true); suggestBuilder.addSuggestion(randomAsciiOfLength(10), suggestionBuilder); if (suggestionBuilder.text() == null) { diff --git a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java index fa552172b66c..58458c9d2449 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -36,12 +36,14 @@ import org.elasticsearch.search.suggest.completion.context.ContextBuilder; import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.GeoContextMapping; import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; +import org.elasticsearch.search.suggest.completion.context.QueryContext; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; @@ -180,7 +182,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") - .categoryContexts("cat", CategoryQueryContext.builder().setCategory("cat0").build()); + .contexts(Collections.singletonMap("cat", Collections.singletonList(CategoryQueryContext.builder().setCategory("cat0").build()))); assertSuggestions("foo", prefix, "suggestion8", "suggestion6", "suggestion4", "suggestion2", "suggestion0"); } @@ -207,9 +209,9 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") - .categoryContexts("cat", - CategoryQueryContext.builder().setCategory("cat0").setBoost(3).build(), - CategoryQueryContext.builder().setCategory("cat1").build() + .contexts(Collections.singletonMap("cat", + Arrays.asList(CategoryQueryContext.builder().setCategory("cat0").setBoost(3).build(), + CategoryQueryContext.builder().setCategory("cat1").build())) ); assertSuggestions("foo", prefix, "suggestion8", "suggestion6", "suggestion4", "suggestion9", "suggestion2"); } @@ -267,24 +269,21 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { // filter only on context cat CompletionSuggestionBuilder catFilterSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); - catFilterSuggest.categoryContexts("cat", CategoryQueryContext.builder().setCategory("cat0").build()); + catFilterSuggest.contexts(Collections.singletonMap("cat", Collections.singletonList(CategoryQueryContext.builder().setCategory("cat0").build()))); assertSuggestions("foo", catFilterSuggest, "suggestion8", "suggestion6", "suggestion4", "suggestion2", "suggestion0"); // filter only on context type CompletionSuggestionBuilder typeFilterSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); - typeFilterSuggest.categoryContexts("type", CategoryQueryContext.builder().setCategory("type2").build(), - CategoryQueryContext.builder().setCategory("type1").build()); + typeFilterSuggest.contexts(Collections.singletonMap("type", Arrays.asList(CategoryQueryContext.builder().setCategory("type2").build(), + CategoryQueryContext.builder().setCategory("type1").build()))); assertSuggestions("foo", typeFilterSuggest, "suggestion9", "suggestion6", "suggestion5", "suggestion2", "suggestion1"); CompletionSuggestionBuilder multiContextFilterSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); // query context order should never matter - if (randomBoolean()) { - multiContextFilterSuggest.categoryContexts("type", CategoryQueryContext.builder().setCategory("type2").build()); - multiContextFilterSuggest.categoryContexts("cat", CategoryQueryContext.builder().setCategory("cat2").build()); - } else { - multiContextFilterSuggest.categoryContexts("cat", CategoryQueryContext.builder().setCategory("cat2").build()); - multiContextFilterSuggest.categoryContexts("type", CategoryQueryContext.builder().setCategory("type2").build()); - } + Map> contextMap = new HashMap<>(); + contextMap.put("type", Collections.singletonList(CategoryQueryContext.builder().setCategory("type2").build())); + contextMap.put("cat", Collections.singletonList(CategoryQueryContext.builder().setCategory("cat2").build())); + multiContextFilterSuggest.contexts(contextMap); assertSuggestions("foo", multiContextFilterSuggest, "suggestion6", "suggestion2"); } @@ -315,36 +314,33 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { // boost only on context cat CompletionSuggestionBuilder catBoostSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); - catBoostSuggest.categoryContexts("cat", - CategoryQueryContext.builder().setCategory("cat0").setBoost(3).build(), - CategoryQueryContext.builder().setCategory("cat1").build()); + catBoostSuggest.contexts(Collections.singletonMap("cat", + Arrays.asList( + CategoryQueryContext.builder().setCategory("cat0").setBoost(3).build(), + CategoryQueryContext.builder().setCategory("cat1").build()))); assertSuggestions("foo", catBoostSuggest, "suggestion8", "suggestion6", "suggestion4", "suggestion9", "suggestion2"); // boost only on context type CompletionSuggestionBuilder typeBoostSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); - typeBoostSuggest.categoryContexts("type", - CategoryQueryContext.builder().setCategory("type2").setBoost(2).build(), - CategoryQueryContext.builder().setCategory("type1").setBoost(4).build()); + typeBoostSuggest.contexts(Collections.singletonMap("type", + Arrays.asList( + CategoryQueryContext.builder().setCategory("type2").setBoost(2).build(), + CategoryQueryContext.builder().setCategory("type1").setBoost(4).build()))); assertSuggestions("foo", typeBoostSuggest, "suggestion9", "suggestion5", "suggestion6", "suggestion1", "suggestion2"); // boost on both contexts CompletionSuggestionBuilder multiContextBoostSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); // query context order should never matter - if (randomBoolean()) { - multiContextBoostSuggest.categoryContexts("type", - CategoryQueryContext.builder().setCategory("type2").setBoost(2).build(), - CategoryQueryContext.builder().setCategory("type1").setBoost(4).build()); - multiContextBoostSuggest.categoryContexts("cat", - CategoryQueryContext.builder().setCategory("cat0").setBoost(3).build(), - CategoryQueryContext.builder().setCategory("cat1").build()); - } else { - multiContextBoostSuggest.categoryContexts("cat", - CategoryQueryContext.builder().setCategory("cat0").setBoost(3).build(), - CategoryQueryContext.builder().setCategory("cat1").build()); - multiContextBoostSuggest.categoryContexts("type", - CategoryQueryContext.builder().setCategory("type2").setBoost(2).build(), - CategoryQueryContext.builder().setCategory("type1").setBoost(4).build()); - } + Map> contextMap = new HashMap<>(); + contextMap.put("type", Arrays.asList( + CategoryQueryContext.builder().setCategory("type2").setBoost(2).build(), + CategoryQueryContext.builder().setCategory("type1").setBoost(4).build()) + ); + contextMap.put("cat", Arrays.asList( + CategoryQueryContext.builder().setCategory("cat0").setBoost(3).build(), + CategoryQueryContext.builder().setCategory("cat1").build()) + ); + multiContextBoostSuggest.contexts(contextMap); assertSuggestions("foo", multiContextBoostSuggest, "suggestion9", "suggestion6", "suggestion5", "suggestion2", "suggestion1"); } @@ -463,7 +459,8 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); CompletionSuggestionBuilder geoFilteringPrefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") - .geoContexts("geo", GeoQueryContext.builder().setGeoPoint(new GeoPoint(geoPoints[0])).build()); + .contexts(Collections.singletonMap("geo", Collections.singletonList( + GeoQueryContext.builder().setGeoPoint(new GeoPoint(geoPoints[0])).build()))); assertSuggestions("foo", geoFilteringPrefix, "suggestion8", "suggestion6", "suggestion4", "suggestion2", "suggestion0"); } @@ -497,7 +494,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { GeoQueryContext context1 = GeoQueryContext.builder().setGeoPoint(geoPoints[0]).setBoost(2).build(); GeoQueryContext context2 = GeoQueryContext.builder().setGeoPoint(geoPoints[1]).build(); CompletionSuggestionBuilder geoBoostingPrefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") - .geoContexts("geo", context1, context2); + .contexts(Collections.singletonMap("geo", Arrays.asList(context1, context2))); assertSuggestions("foo", geoBoostingPrefix, "suggestion8", "suggestion6", "suggestion4", "suggestion9", "suggestion7"); } @@ -528,7 +525,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { indexRandom(true, indexRequestBuilders); ensureYellow(INDEX); CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") - .geoContexts("geo", GeoQueryContext.builder().setGeoPoint(new GeoPoint(52.2263, 4.543)).build()); + .contexts(Collections.singletonMap("geo", Collections.singletonList(GeoQueryContext.builder().setGeoPoint(new GeoPoint(52.2263, 4.543)).build()))); assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); } @@ -569,7 +566,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); CompletionSuggestionBuilder geoNeighbourPrefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") - .geoContexts("geo", GeoQueryContext.builder().setGeoPoint(GeoPoint.fromGeohash(geohash)).build()); + .contexts(Collections.singletonMap("geo", Collections.singletonList(GeoQueryContext.builder().setGeoPoint(GeoPoint.fromGeohash(geohash)).build()))); assertSuggestions("foo", geoNeighbourPrefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); } @@ -626,7 +623,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { String suggestionName = randomAsciiOfLength(10); CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).text("h").size(10) - .geoContexts("st", GeoQueryContext.builder().setGeoPoint(new GeoPoint(52.52, 13.4)).build()); + .contexts(Collections.singletonMap("st", Collections.singletonList(GeoQueryContext.builder().setGeoPoint(new GeoPoint(52.52, 13.4)).build()))); SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(suggestionName, context).get(); assertEquals(suggestResponse.getSuggest().size(), 1); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java index 0d27ba04a914..e0aabe773021 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java @@ -190,11 +190,11 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { XContentBuilder builder = jsonBuilder().value("context1"); XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes()); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List queryContexts = mapping.parseQueryContext(parser); - assertThat(queryContexts.size(), equalTo(1)); - assertThat(queryContexts.get(0).context, equalTo("context1")); - assertThat(queryContexts.get(0).boost, equalTo(1)); - assertThat(queryContexts.get(0).isPrefix, equalTo(false)); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(1)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); } public void testQueryContextParsingArray() throws Exception { @@ -204,14 +204,14 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endArray(); XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes()); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List queryContexts = mapping.parseQueryContext(parser); - assertThat(queryContexts.size(), equalTo(2)); - assertThat(queryContexts.get(0).context, equalTo("context1")); - assertThat(queryContexts.get(0).boost, equalTo(1)); - assertThat(queryContexts.get(0).isPrefix, equalTo(false)); - assertThat(queryContexts.get(1).context, equalTo("context2")); - assertThat(queryContexts.get(1).boost, equalTo(1)); - assertThat(queryContexts.get(1).isPrefix, equalTo(false)); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(2)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(1)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(1).context, equalTo("context2")); + assertThat(internalQueryContexts.get(1).boost, equalTo(1)); + assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); } public void testQueryContextParsingObject() throws Exception { @@ -222,11 +222,11 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject(); XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes()); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List queryContexts = mapping.parseQueryContext(parser); - assertThat(queryContexts.size(), equalTo(1)); - assertThat(queryContexts.get(0).context, equalTo("context1")); - assertThat(queryContexts.get(0).boost, equalTo(10)); - assertThat(queryContexts.get(0).isPrefix, equalTo(true)); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(10)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); } @@ -245,14 +245,14 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endArray(); XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes()); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List queryContexts = mapping.parseQueryContext(parser); - assertThat(queryContexts.size(), equalTo(2)); - assertThat(queryContexts.get(0).context, equalTo("context1")); - assertThat(queryContexts.get(0).boost, equalTo(2)); - assertThat(queryContexts.get(0).isPrefix, equalTo(true)); - assertThat(queryContexts.get(1).context, equalTo("context2")); - assertThat(queryContexts.get(1).boost, equalTo(3)); - assertThat(queryContexts.get(1).isPrefix, equalTo(false)); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(2)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(2)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + assertThat(internalQueryContexts.get(1).context, equalTo("context2")); + assertThat(internalQueryContexts.get(1).boost, equalTo(3)); + assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); } public void testQueryContextParsingMixed() throws Exception { @@ -266,14 +266,14 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endArray(); XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes()); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List queryContexts = mapping.parseQueryContext(parser); - assertThat(queryContexts.size(), equalTo(2)); - assertThat(queryContexts.get(0).context, equalTo("context1")); - assertThat(queryContexts.get(0).boost, equalTo(2)); - assertThat(queryContexts.get(0).isPrefix, equalTo(true)); - assertThat(queryContexts.get(1).context, equalTo("context2")); - assertThat(queryContexts.get(1).boost, equalTo(1)); - assertThat(queryContexts.get(1).isPrefix, equalTo(false)); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(2)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(2)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + assertThat(internalQueryContexts.get(1).context, equalTo("context2")); + assertThat(internalQueryContexts.get(1).boost, equalTo(1)); + assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); } public void testParsingContextFromDocument() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryQueryContextTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryQueryContextTests.java index b4d80ebba980..a4cfc71a3bc0 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryQueryContextTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryQueryContextTests.java @@ -38,25 +38,6 @@ public class CategoryQueryContextTests extends QueryContextTestCase randomAsciiOfLength(10))); - break; - case 1: - builder.setBoost(randomValueOtherThan(original.getBoost(), () -> randomIntBetween(1, 5))); - break; - case 2: - builder.setPrefix(!original.isPrefix()); - break; - - } - return builder.build(); - } - @Override protected CategoryQueryContext prototype() { return CategoryQueryContext.PROTOTYPE; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java index c5547163d279..c92d2a1eaf3b 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -19,10 +19,21 @@ package org.elasticsearch.search.suggest.completion; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AnalysisService; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.core.CompletionFieldMapper; +import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; +import org.elasticsearch.search.suggest.completion.context.CategoryContextMapping; import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; +import org.elasticsearch.search.suggest.completion.context.ContextMapping; +import org.elasticsearch.search.suggest.completion.context.ContextMappings; +import org.elasticsearch.search.suggest.completion.context.GeoContextMapping; import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; import org.elasticsearch.search.suggest.completion.context.QueryContext; import org.junit.BeforeClass; @@ -30,18 +41,28 @@ import org.junit.BeforeClass; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.hamcrest.core.IsInstanceOf.instanceOf; public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTestCase { - @BeforeClass - public static void initQueryContexts() { - namedWriteableRegistry.registerPrototype(QueryContext.class, CategoryQueryContext.PROTOTYPE); - namedWriteableRegistry.registerPrototype(QueryContext.class, GeoQueryContext.PROTOTYPE); - } - @Override protected CompletionSuggestionBuilder randomSuggestionBuilder() { + return randomSuggestionBuilderWithContextInfo().builder; + } + + private static class BuilderAndInfo { + CompletionSuggestionBuilder builder; + List catContexts = new ArrayList<>(); + List geoContexts = new ArrayList<>(); + } + + private BuilderAndInfo randomSuggestionBuilderWithContextInfo() { + final BuilderAndInfo builderAndInfo = new BuilderAndInfo(); CompletionSuggestionBuilder testBuilder = new CompletionSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); switch (randomIntBetween(0, 3)) { case 0: @@ -60,38 +81,66 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe List payloads = new ArrayList<>(); Collections.addAll(payloads, generateRandomStringArray(5, 10, false, false)); maybeSet(testBuilder::payload, payloads); + Map> contextMap = new HashMap<>(); if (randomBoolean()) { int numContext = randomIntBetween(1, 5); - CategoryQueryContext[] contexts = new CategoryQueryContext[numContext]; + List contexts = new ArrayList<>(numContext); for (int i = 0; i < numContext; i++) { - contexts[i] = CategoryQueryContextTests.randomCategoryQueryContext(); + contexts.add(CategoryQueryContextTests.randomCategoryQueryContext()); } - testBuilder.categoryContexts(randomAsciiOfLength(10), contexts); + String name = randomAsciiOfLength(10); + contextMap.put(name, contexts); + builderAndInfo.catContexts.add(name); } if (randomBoolean()) { int numContext = randomIntBetween(1, 5); - GeoQueryContext[] contexts = new GeoQueryContext[numContext]; + List contexts = new ArrayList<>(numContext); for (int i = 0; i < numContext; i++) { - contexts[i] = GeoQueryContextTests.randomGeoQueryContext(); + contexts.add(GeoQueryContextTests.randomGeoQueryContext()); } - testBuilder.geoContexts(randomAsciiOfLength(10), contexts); + String name = randomAsciiOfLength(10); + contextMap.put(name, contexts); + builderAndInfo.geoContexts.add(name); } - return testBuilder; + testBuilder.contexts(contextMap); + builderAndInfo.builder = testBuilder; + return builderAndInfo; } @Override protected void assertSuggestionContext(SuggestionContext oldSuggestion, SuggestionContext newSuggestion) { + assertThat(oldSuggestion, instanceOf(CompletionSuggestionContext.class)); + assertThat(newSuggestion, instanceOf(CompletionSuggestionContext.class)); + CompletionSuggestionContext oldCompletionSuggestion = (CompletionSuggestionContext) oldSuggestion; + CompletionSuggestionContext newCompletionSuggestion = (CompletionSuggestionContext) newSuggestion; + assertEquals(oldCompletionSuggestion.getPayloadFields(), newCompletionSuggestion.getPayloadFields()); + assertEquals(oldCompletionSuggestion.getFuzzyOptions(), newCompletionSuggestion.getFuzzyOptions()); + assertEquals(oldCompletionSuggestion.getRegexOptions(), newCompletionSuggestion.getRegexOptions()); + assertEquals(oldCompletionSuggestion.getQueryContexts(), newCompletionSuggestion.getQueryContexts()); } @Override - public void testBuild() throws IOException { - // skip for now - } - - @Override - public void testFromXContent() throws IOException { - // skip for now + protected Tuple mockMapperServiceAndSuggestionBuilder( + IndexSettings idxSettings, AnalysisService mockAnalysisService, CompletionSuggestionBuilder suggestBuilder) { + final BuilderAndInfo builderAndInfo = randomSuggestionBuilderWithContextInfo(); + final MapperService mapperService = new MapperService(idxSettings, mockAnalysisService, null, + new IndicesModule().getMapperRegistry(), null) { + @Override + public MappedFieldType fullName(String fullName) { + CompletionFieldMapper.CompletionFieldType type = new CompletionFieldMapper.CompletionFieldType(); + List contextMappings = builderAndInfo.catContexts.stream() + .map(catContext -> new CategoryContextMapping.Builder(catContext).build()) + .collect(Collectors.toList()); + contextMappings.addAll(builderAndInfo.geoContexts.stream() + .map(geoContext -> new GeoContextMapping.Builder(geoContext).build()) + .collect(Collectors.toList())); + type.setContextMappings(new ContextMappings(contextMappings)); + return type; + } + }; + final CompletionSuggestionBuilder builder = builderAndInfo.builder; + return new Tuple<>(mapperService, builder); } @Override @@ -103,20 +152,20 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe builder.payload(payloads); break; case 1: - int numCategoryContext = randomIntBetween(1, 5); - CategoryQueryContext[] categoryContexts = new CategoryQueryContext[numCategoryContext]; - for (int i = 0; i < numCategoryContext; i++) { - categoryContexts[i] = CategoryQueryContextTests.randomCategoryQueryContext(); + int nCatContext = randomIntBetween(1, 5); + List contexts = new ArrayList<>(nCatContext); + for (int i = 0; i < nCatContext; i++) { + contexts.add(CategoryQueryContextTests.randomCategoryQueryContext()); } - builder.categoryContexts(randomAsciiOfLength(10), categoryContexts); + builder.contexts(Collections.singletonMap(randomAsciiOfLength(10), contexts)); break; case 2: - int numGeoContext = randomIntBetween(1, 5); - GeoQueryContext[] geoContexts = new GeoQueryContext[numGeoContext]; - for (int i = 0; i < numGeoContext; i++) { - geoContexts[i] = GeoQueryContextTests.randomGeoQueryContext(); + int nGeoContext = randomIntBetween(1, 5); + List geoContexts = new ArrayList<>(nGeoContext); + for (int i = 0; i < nGeoContext; i++) { + geoContexts.add(GeoQueryContextTests.randomGeoQueryContext()); } - builder.geoContexts(randomAsciiOfLength(10), geoContexts); + builder.contexts(Collections.singletonMap(randomAsciiOfLength(10), geoContexts)); break; case 3: builder.prefix(randomAsciiOfLength(10), FuzzyOptionsTests.randomFuzzyOptions()); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java index 471de9c3e932..c16b0ce645b5 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java @@ -202,15 +202,15 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { XContentBuilder builder = jsonBuilder().value("ezs42e44yx96"); XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes()); GeoContextMapping mapping = ContextBuilder.geo("geo").build(); - List queryContexts = mapping.parseQueryContext(parser); - assertThat(queryContexts.size(), equalTo(1 + 8)); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1 + 8)); Collection locations = new ArrayList<>(); locations.add("ezs42e"); addNeighbors("ezs42e", GeoContextMapping.DEFAULT_PRECISION, locations); - for (ContextMapping.QueryContext queryContext : queryContexts) { - assertThat(queryContext.context, isIn(locations)); - assertThat(queryContext.boost, equalTo(1)); - assertThat(queryContext.isPrefix, equalTo(false)); + for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) { + assertThat(internalQueryContext.context, isIn(locations)); + assertThat(internalQueryContext.boost, equalTo(1)); + assertThat(internalQueryContext.isPrefix, equalTo(false)); } } @@ -221,15 +221,15 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endObject(); XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes()); GeoContextMapping mapping = ContextBuilder.geo("geo").build(); - List queryContexts = mapping.parseQueryContext(parser); - assertThat(queryContexts.size(), equalTo(1 + 8)); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1 + 8)); Collection locations = new ArrayList<>(); locations.add("wh0n94"); addNeighbors("wh0n94", GeoContextMapping.DEFAULT_PRECISION, locations); - for (ContextMapping.QueryContext queryContext : queryContexts) { - assertThat(queryContext.context, isIn(locations)); - assertThat(queryContext.boost, equalTo(1)); - assertThat(queryContext.isPrefix, equalTo(false)); + for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) { + assertThat(internalQueryContext.context, isIn(locations)); + assertThat(internalQueryContext.boost, equalTo(1)); + assertThat(internalQueryContext.isPrefix, equalTo(false)); } } @@ -244,8 +244,8 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endObject(); XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes()); GeoContextMapping mapping = ContextBuilder.geo("geo").build(); - List queryContexts = mapping.parseQueryContext(parser); - assertThat(queryContexts.size(), equalTo(1 + 1 + 8 + 1 + 8 + 1 + 8)); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1 + 1 + 8 + 1 + 8 + 1 + 8)); Collection locations = new ArrayList<>(); locations.add("wh0n94"); locations.add("w"); @@ -254,10 +254,10 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { addNeighbors("wh", 2, locations); locations.add("wh0"); addNeighbors("wh0", 3, locations); - for (ContextMapping.QueryContext queryContext : queryContexts) { - assertThat(queryContext.context, isIn(locations)); - assertThat(queryContext.boost, equalTo(10)); - assertThat(queryContext.isPrefix, equalTo(queryContext.context.length() < GeoContextMapping.DEFAULT_PRECISION)); + for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) { + assertThat(internalQueryContext.context, isIn(locations)); + assertThat(internalQueryContext.boost, equalTo(10)); + assertThat(internalQueryContext.isPrefix, equalTo(internalQueryContext.context.length() < GeoContextMapping.DEFAULT_PRECISION)); } } @@ -282,8 +282,8 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endArray(); XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes()); GeoContextMapping mapping = ContextBuilder.geo("geo").build(); - List queryContexts = mapping.parseQueryContext(parser); - assertThat(queryContexts.size(), equalTo(1 + 1 + 8 + 1 + 8 + 1 + 8 + 1 + 1 + 8)); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1 + 1 + 8 + 1 + 8 + 1 + 8 + 1 + 1 + 8)); Collection firstLocations = new ArrayList<>(); firstLocations.add("wh0n94"); firstLocations.add("w"); @@ -296,15 +296,15 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { secondLocations.add("w5cx04"); secondLocations.add("w5cx0"); addNeighbors("w5cx0", 5, secondLocations); - for (ContextMapping.QueryContext queryContext : queryContexts) { - if (firstLocations.contains(queryContext.context)) { - assertThat(queryContext.boost, equalTo(10)); - } else if (secondLocations.contains(queryContext.context)) { - assertThat(queryContext.boost, equalTo(2)); + for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) { + if (firstLocations.contains(internalQueryContext.context)) { + assertThat(internalQueryContext.boost, equalTo(10)); + } else if (secondLocations.contains(internalQueryContext.context)) { + assertThat(internalQueryContext.boost, equalTo(2)); } else { - fail(queryContext.context + " was not expected"); + fail(internalQueryContext.context + " was not expected"); } - assertThat(queryContext.isPrefix, equalTo(queryContext.context.length() < GeoContextMapping.DEFAULT_PRECISION)); + assertThat(internalQueryContext.isPrefix, equalTo(internalQueryContext.context.length() < GeoContextMapping.DEFAULT_PRECISION)); } } @@ -325,8 +325,8 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endArray(); XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes()); GeoContextMapping mapping = ContextBuilder.geo("geo").build(); - List queryContexts = mapping.parseQueryContext(parser); - assertThat(queryContexts.size(), equalTo(1 + 1 + 8 + 1 + 8 + 1 + 8)); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1 + 1 + 8 + 1 + 8 + 1 + 8)); Collection firstLocations = new ArrayList<>(); firstLocations.add("wh0n94"); firstLocations.add("w"); @@ -336,15 +336,15 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { Collection secondLocations = new ArrayList<>(); secondLocations.add("w5cx04"); addNeighbors("w5cx04", 6, secondLocations); - for (ContextMapping.QueryContext queryContext : queryContexts) { - if (firstLocations.contains(queryContext.context)) { - assertThat(queryContext.boost, equalTo(10)); - } else if (secondLocations.contains(queryContext.context)) { - assertThat(queryContext.boost, equalTo(1)); + for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) { + if (firstLocations.contains(internalQueryContext.context)) { + assertThat(internalQueryContext.boost, equalTo(10)); + } else if (secondLocations.contains(internalQueryContext.context)) { + assertThat(internalQueryContext.boost, equalTo(1)); } else { - fail(queryContext.context + " was not expected"); + fail(internalQueryContext.context + " was not expected"); } - assertThat(queryContext.isPrefix, equalTo(queryContext.context.length() < GeoContextMapping.DEFAULT_PRECISION)); + assertThat(internalQueryContext.isPrefix, equalTo(internalQueryContext.context.length() < GeoContextMapping.DEFAULT_PRECISION)); } } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoQueryContextTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoQueryContextTests.java index d26be5036e2f..1f724967820c 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoQueryContextTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/GeoQueryContextTests.java @@ -49,35 +49,6 @@ public class GeoQueryContextTests extends QueryContextTestCase return randomGeoQueryContext(); } - @Override - protected GeoQueryContext createMutation(GeoQueryContext original) throws IOException { - final GeoQueryContext.Builder builder = GeoQueryContext.builder(); - builder.setGeoPoint(original.getGeoPoint()).setBoost(original.getBoost()) - .setNeighbours(original.getNeighbours()).setPrecision(original.getPrecision()); - switch (randomIntBetween(0, 3)) { - case 0: - builder.setGeoPoint(randomValueOtherThan(original.getGeoPoint() ,() -> - new GeoPoint(randomDouble(), randomDouble()))); - break; - case 1: - builder.setBoost(randomValueOtherThan(original.getBoost() ,() -> randomIntBetween(1, 5))); - break; - case 2: - builder.setPrecision(randomValueOtherThan(original.getPrecision() ,() -> randomIntBetween(1, 12))); - break; - case 3: - builder.setNeighbours(randomValueOtherThan(original.getNeighbours(), () -> { - List newNeighbours = new ArrayList<>(); - for (int i = 0; i < randomIntBetween(1, 12); i++) { - newNeighbours.add(randomIntBetween(1, 12)); - } - return newNeighbours; - })); - break; - } - return builder.build(); - } - @Override protected GeoQueryContext prototype() { return GeoQueryContext.PROTOTYPE; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/QueryContextTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/QueryContextTestCase.java index b4a6a5b1da0b..78b73e68890f 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/QueryContextTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/QueryContextTestCase.java @@ -26,24 +26,27 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.suggest.completion.context.QueryContext; +import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import static junit.framework.TestCase.assertEquals; -public abstract class QueryContextTestCase extends WritableTestCase { + +public abstract class QueryContextTestCase extends ESTestCase { private static final int NUMBER_OF_RUNS = 20; + /** + * create random model that is put under test + */ + protected abstract QC createTestModel(); + /** * query context prototype to read serialized format */ protected abstract QC prototype(); - @Override - protected QC readFrom(StreamInput in) throws IOException { - return (QC) prototype().readFrom(in); - } - public void testToXContext() throws IOException { for (int i = 0; i < NUMBER_OF_RUNS; i++) { QueryContext toXContent = createTestModel(); From 987f2f5aa8575bbbee2ba524e9e5fa48abe9391d Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Sat, 5 Mar 2016 01:07:57 -0500 Subject: [PATCH 130/320] cleanup --- .../completion/CompletionSuggester.java | 2 +- .../CompletionSuggestionBuilder.java | 53 +++++++++++++------ .../CompletionSuggestionContext.java | 12 ++--- .../suggest/completion/FuzzyOptions.java | 28 +++++----- .../suggest/completion/RegexOptions.java | 21 ++++---- .../completion/context/ContextMapping.java | 8 ++- .../CompletionSuggesterBuilderTests.java | 1 + 7 files changed, 72 insertions(+), 53 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index be90a2e7e73e..e3953c8e0b49 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -84,7 +84,7 @@ public class CompletionSuggester extends Suggester } // collect payloads final Map> payload = new HashMap<>(0); - Set payloadFields = suggestionContext.getPayloadFields(); + List payloadFields = suggestionContext.getPayloadFields(); if (payloadFields.isEmpty() == false) { final int readerIndex = ReaderUtil.subIndex(suggestDoc.doc, leaves); final LeafReaderContext subReaderContext = leaves.get(readerIndex); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 38242a29ae4b..141e41e826b3 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.search.suggest.completion; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -64,9 +64,9 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder TLP_PARSER = - new ObjectParser<>(CompletionSuggestionBuilder.SUGGESTION_NAME, null); + new ObjectParser<>(SUGGESTION_NAME, null); static { - TLP_PARSER.declareStringArray(CompletionSuggestionBuilder::payload, CompletionSuggestionBuilder.PAYLOAD_FIELD); + TLP_PARSER.declareStringArray(CompletionSuggestionBuilder::payload, PAYLOAD_FIELD); TLP_PARSER.declareField((parser, completionSuggestionContext, context) -> { if (parser.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { if (parser.booleanValue()) { @@ -90,7 +90,7 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder> queryContexts = Collections.emptyMap(); - private Set payloadFields = Collections.emptySet(); + private List payloadFields = Collections.emptyList(); CompletionFieldMapper.CompletionFieldType getFieldType() { return this.fieldType; @@ -67,15 +65,11 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest this.queryContexts = queryContexts; } - void setPayloadFields(Set fields) { + void setPayloadFields(List fields) { this.payloadFields = fields; } - void setPayloadFields(List fields) { - setPayloadFields(new HashSet<>(fields)); - } - - Set getPayloadFields() { + List getPayloadFields() { return payloadFields; } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java index aac58d7cb368..709124443bd1 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java @@ -40,20 +40,20 @@ import java.util.Objects; */ public class FuzzyOptions implements ToXContent, Writeable { static final ParseField FUZZY_OPTIONS = new ParseField("fuzzy"); - static final ParseField TRANSPOSITION_FIELD = new ParseField("transpositions"); - static final ParseField MIN_LENGTH_FIELD = new ParseField("min_length"); - static final ParseField PREFIX_LENGTH_FIELD = new ParseField("prefix_length"); - static final ParseField UNICODE_AWARE_FIELD = new ParseField("unicode_aware"); - static final ParseField MAX_DETERMINIZED_STATES_FIELD = new ParseField("max_determinized_states"); + private static final ParseField TRANSPOSITION_FIELD = new ParseField("transpositions"); + private static final ParseField MIN_LENGTH_FIELD = new ParseField("min_length"); + private static final ParseField PREFIX_LENGTH_FIELD = new ParseField("prefix_length"); + private static final ParseField UNICODE_AWARE_FIELD = new ParseField("unicode_aware"); + private static final ParseField MAX_DETERMINIZED_STATES_FIELD = new ParseField("max_determinized_states"); - static ObjectParser FUZZY_PARSER = new ObjectParser<>(FUZZY_OPTIONS.getPreferredName(), Builder::new); + private static ObjectParser PARSER = new ObjectParser<>(FUZZY_OPTIONS.getPreferredName(), Builder::new); static { - FUZZY_PARSER.declareInt(FuzzyOptions.Builder::setFuzzyMinLength, MIN_LENGTH_FIELD); - FUZZY_PARSER.declareInt(FuzzyOptions.Builder::setMaxDeterminizedStates, MAX_DETERMINIZED_STATES_FIELD); - FUZZY_PARSER.declareBoolean(FuzzyOptions.Builder::setUnicodeAware, UNICODE_AWARE_FIELD); - FUZZY_PARSER.declareInt(FuzzyOptions.Builder::setFuzzyPrefixLength, PREFIX_LENGTH_FIELD); - FUZZY_PARSER.declareBoolean(FuzzyOptions.Builder::setTranspositions, TRANSPOSITION_FIELD); - FUZZY_PARSER.declareValue((a, b) -> { + PARSER.declareInt(Builder::setFuzzyMinLength, MIN_LENGTH_FIELD); + PARSER.declareInt(Builder::setMaxDeterminizedStates, MAX_DETERMINIZED_STATES_FIELD); + PARSER.declareBoolean(Builder::setUnicodeAware, UNICODE_AWARE_FIELD); + PARSER.declareInt(Builder::setFuzzyPrefixLength, PREFIX_LENGTH_FIELD); + PARSER.declareBoolean(Builder::setTranspositions, TRANSPOSITION_FIELD); + PARSER.declareValue((a, b) -> { try { a.setFuzziness(Fuzziness.parse(b).asDistance()); } catch (IOException e) { @@ -82,8 +82,8 @@ public class FuzzyOptions implements ToXContent, Writeable { private FuzzyOptions() { } - public static FuzzyOptions parse(XContentParser parser) throws IOException { - return FUZZY_PARSER.parse(parser).build(); + static FuzzyOptions parse(XContentParser parser) throws IOException { + return PARSER.parse(parser).build(); } public static Builder builder() { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java index a1dcec2d5557..81e524d6e3f5 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java @@ -38,17 +38,14 @@ import java.io.IOException; * Regular expression options for completion suggester */ public class RegexOptions implements ToXContent, Writeable { - static final String NAME = "regex"; - static final ParseField REGEX_OPTIONS = new ParseField(NAME); - static final ParseField FLAGS_VALUE = new ParseField("flags", "flags_value"); - static final ParseField MAX_DETERMINIZED_STATES = new ParseField("max_determinized_states"); + static final ParseField REGEX_OPTIONS = new ParseField("regex"); + private static final ParseField FLAGS_VALUE = new ParseField("flags", "flags_value"); + private static final ParseField MAX_DETERMINIZED_STATES = new ParseField("max_determinized_states"); - - private static ObjectParser REGEXP_PARSER = - new ObjectParser<>(REGEX_OPTIONS.getPreferredName(), RegexOptions.Builder::new); + private static ObjectParser PARSER = new ObjectParser<>(REGEX_OPTIONS.getPreferredName(), Builder::new); static { - REGEXP_PARSER.declareInt(RegexOptions.Builder::setMaxDeterminizedStates, MAX_DETERMINIZED_STATES); - REGEXP_PARSER.declareField((parser, builder, aVoid) -> { + PARSER.declareInt(Builder::setMaxDeterminizedStates, MAX_DETERMINIZED_STATES); + PARSER.declareField((parser, builder, aVoid) -> { if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { builder.setFlags(parser.text()); } else if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) { @@ -58,7 +55,7 @@ public class RegexOptions implements ToXContent, Writeable { + " " + FLAGS_VALUE.getPreferredName() + " supports string or number"); } }, FLAGS_VALUE, ObjectParser.ValueType.VALUE); - REGEXP_PARSER.declareStringOrNull(RegexOptions.Builder::setFlags, FLAGS_VALUE); + PARSER.declareStringOrNull(Builder::setFlags, FLAGS_VALUE); } private int flagsValue; @@ -91,8 +88,8 @@ public class RegexOptions implements ToXContent, Writeable { return new Builder(); } - public static RegexOptions parse(XContentParser parser) throws IOException { - return REGEXP_PARSER.parse(parser).build(); + static RegexOptions parse(XContentParser parser) throws IOException { + return PARSER.parse(parser).build(); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java index 501f4d153d12..959a749a858f 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java @@ -96,12 +96,15 @@ public abstract class ContextMapping implements ToXConte */ protected abstract Set parseContext(ParseContext.Document document); + /** + * Prototype for the query context + */ protected abstract T prototype(); /** * Parses query contexts for this mapper */ - public List parseQueryContext(XContentParser parser) throws IOException, ElasticsearchParseException { + public final List parseQueryContext(XContentParser parser) throws IOException, ElasticsearchParseException { List queryContexts = new ArrayList<>(); Token token = parser.nextToken(); if (token == Token.START_OBJECT || token == Token.VALUE_STRING) { @@ -114,6 +117,9 @@ public abstract class ContextMapping implements ToXConte return toInternalQueryContexts(queryContexts); } + /** + * Convert query contexts to common representation + */ protected abstract List toInternalQueryContexts(List queryContexts); /** diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java index c92d2a1eaf3b..478c85fd5d90 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -113,6 +113,7 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe assertThat(newSuggestion, instanceOf(CompletionSuggestionContext.class)); CompletionSuggestionContext oldCompletionSuggestion = (CompletionSuggestionContext) oldSuggestion; CompletionSuggestionContext newCompletionSuggestion = (CompletionSuggestionContext) newSuggestion; + assertEquals(oldCompletionSuggestion.getFieldType(), newCompletionSuggestion.getFieldType()); assertEquals(oldCompletionSuggestion.getPayloadFields(), newCompletionSuggestion.getPayloadFields()); assertEquals(oldCompletionSuggestion.getFuzzyOptions(), newCompletionSuggestion.getFuzzyOptions()); assertEquals(oldCompletionSuggestion.getRegexOptions(), newCompletionSuggestion.getRegexOptions()); From 55c58c56a887dc53cabbfaf8fa984e388c847ccf Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Wed, 9 Mar 2016 11:14:01 -0500 Subject: [PATCH 131/320] incorporate feedback --- .../CompletionSuggestionBuilder.java | 39 ++++++++++++------- .../CompletionSuggesterBuilderTests.java | 3 +- 2 files changed, 25 insertions(+), 17 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 141e41e826b3..00592e6c9841 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -63,10 +63,10 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder TLP_PARSER = + private static ObjectParser TLP_PARSER = new ObjectParser<>(SUGGESTION_NAME, null); static { - TLP_PARSER.declareStringArray(CompletionSuggestionBuilder::payload, PAYLOAD_FIELD); + TLP_PARSER.declareStringArray(CompletionSuggestionBuilder.InnerBuilder::payload, PAYLOAD_FIELD); TLP_PARSER.declareField((parser, completionSuggestionContext, context) -> { if (parser.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { if (parser.booleanValue()) { @@ -80,10 +80,10 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder completionSuggestionContext.regexOptions = RegexOptions.parse(parser), RegexOptions.REGEX_OPTIONS, ObjectParser.ValueType.OBJECT); - TLP_PARSER.declareString(CompletionSuggestionBuilder::field, SuggestUtils.Fields.FIELD); - TLP_PARSER.declareString(CompletionSuggestionBuilder::analyzer, SuggestUtils.Fields.ANALYZER); - TLP_PARSER.declareInt(CompletionSuggestionBuilder::size, SuggestUtils.Fields.SIZE); - TLP_PARSER.declareInt(CompletionSuggestionBuilder::shardSize, SuggestUtils.Fields.SHARD_SIZE); + TLP_PARSER.declareString(CompletionSuggestionBuilder.InnerBuilder::field, SuggestUtils.Fields.FIELD); + TLP_PARSER.declareString(CompletionSuggestionBuilder.InnerBuilder::analyzer, SuggestUtils.Fields.ANALYZER); + TLP_PARSER.declareInt(CompletionSuggestionBuilder.InnerBuilder::size, SuggestUtils.Fields.SIZE); + TLP_PARSER.declareInt(CompletionSuggestionBuilder.InnerBuilder::shardSize, SuggestUtils.Fields.SHARD_SIZE); TLP_PARSER.declareField((p, v, c) -> { // Copy the current structure. We will parse, once the mapping is provided XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); @@ -93,10 +93,10 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder payloadFields = Collections.emptyList(); + protected FuzzyOptions fuzzyOptions; + protected RegexOptions regexOptions; + protected BytesReference contextBytes = null; + protected List payloadFields = Collections.emptyList(); public CompletionSuggestionBuilder(String fieldname) { super(fieldname); @@ -167,6 +167,7 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder fields) { + Objects.requireNonNull(fields, "payload must not be null"); this.payloadFields = fields; return this; } @@ -178,6 +179,7 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder> queryContexts) { + Objects.requireNonNull(queryContexts, "contexts must not be null"); try { XContentBuilder contentBuilder = XContentFactory.jsonBuilder(); contentBuilder.startObject(); @@ -196,10 +198,17 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder(mapperService, builder); + return new Tuple<>(mapperService, builderAndInfo.builder); } @Override From e7cffa5e9fd3c2eb177eb57e86a55c13006c47ad Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Wed, 9 Mar 2016 11:37:15 -0500 Subject: [PATCH 132/320] simplify mocking field type in SuggestionBuilderTests --- .../AbstractSuggestionBuilderTestCase.java | 35 +++++++++---------- .../CompletionSuggesterBuilderTests.java | 28 ++++++--------- 2 files changed, 26 insertions(+), 37 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 2c310b04870b..bfbac27a8ca1 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -213,20 +213,12 @@ public abstract class AbstractSuggestionBuilderTestCase mockMapperServiceAndSuggestionBuilder( - IndexSettings idxSettings, AnalysisService mockAnalysisService, SB suggestBuilder) { - final MapperService mapperService = new MapperService(idxSettings, mockAnalysisService, null, - new IndicesModule().getMapperRegistry(), null) { - @Override - public MappedFieldType fullName(String fullName) { - StringFieldType type = new StringFieldType(); - if (randomBoolean()) { - type.setSearchAnalyzer(new NamedAnalyzer("foo", new WhitespaceAnalyzer())); - } - return type; - } - }; - return new Tuple<>(mapperService, suggestBuilder); + protected Tuple randomFieldTypeAndSuggestionBuilder() { + StringFieldType type = new StringFieldType(); + if (randomBoolean()) { + type.setSearchAnalyzer(new NamedAnalyzer("foo", new WhitespaceAnalyzer())); + } + return new Tuple<>(type, randomTestBuilder()); } /** @@ -246,12 +238,17 @@ public abstract class AbstractSuggestionBuilderTestCase mapperServiceSBTuple = - mockMapperServiceAndSuggestionBuilder(idxSettings, mockAnalysisService, suggestionBuilder); - suggestionBuilder = mapperServiceSBTuple.v2(); + final Tuple mappedFieldTypeSBTuple = randomFieldTypeAndSuggestionBuilder(); + final MapperService mapperService = new MapperService(idxSettings, mockAnalysisService, null, + new IndicesModule().getMapperRegistry(), null) { + @Override + public MappedFieldType fullName(String fullName) { + return mappedFieldTypeSBTuple.v1(); + } + }; + SB suggestionBuilder = mappedFieldTypeSBTuple.v2(); QueryShardContext mockShardContext = new QueryShardContext(idxSettings, - null, null, mapperServiceSBTuple.v1(), null, scriptService, null) { + null, null, mapperService, null, scriptService, null) { @Override public MappedFieldType fieldMapper(String name) { StringFieldMapper.Builder builder = new StringFieldMapper.Builder(name); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java index 137030e4c15d..1a8efbb748de 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -122,25 +122,17 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe } @Override - protected Tuple mockMapperServiceAndSuggestionBuilder( - IndexSettings idxSettings, AnalysisService mockAnalysisService, CompletionSuggestionBuilder suggestBuilder) { + protected Tuple randomFieldTypeAndSuggestionBuilder() { final BuilderAndInfo builderAndInfo = randomSuggestionBuilderWithContextInfo(); - final MapperService mapperService = new MapperService(idxSettings, mockAnalysisService, null, - new IndicesModule().getMapperRegistry(), null) { - @Override - public MappedFieldType fullName(String fullName) { - CompletionFieldMapper.CompletionFieldType type = new CompletionFieldMapper.CompletionFieldType(); - List contextMappings = builderAndInfo.catContexts.stream() - .map(catContext -> new CategoryContextMapping.Builder(catContext).build()) - .collect(Collectors.toList()); - contextMappings.addAll(builderAndInfo.geoContexts.stream() - .map(geoContext -> new GeoContextMapping.Builder(geoContext).build()) - .collect(Collectors.toList())); - type.setContextMappings(new ContextMappings(contextMappings)); - return type; - } - }; - return new Tuple<>(mapperService, builderAndInfo.builder); + CompletionFieldMapper.CompletionFieldType type = new CompletionFieldMapper.CompletionFieldType(); + List contextMappings = builderAndInfo.catContexts.stream() + .map(catContext -> new CategoryContextMapping.Builder(catContext).build()) + .collect(Collectors.toList()); + contextMappings.addAll(builderAndInfo.geoContexts.stream() + .map(geoContext -> new GeoContextMapping.Builder(geoContext).build()) + .collect(Collectors.toList())); + type.setContextMappings(new ContextMappings(contextMappings)); + return new Tuple<>(type, builderAndInfo.builder); } @Override From db534347e5f543621458252d5bffc2229cb239e4 Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Wed, 9 Mar 2016 10:01:15 -0800 Subject: [PATCH 133/320] Fix typos in comments/strings of `test` module. --- test/build.gradle | 2 +- test/framework/build.gradle | 2 +- .../org/elasticsearch/bootstrap/BootstrapForTesting.java | 2 +- .../main/java/org/elasticsearch/test/ESIntegTestCase.java | 8 ++++---- .../java/org/elasticsearch/test/ESSingleNodeTestCase.java | 4 ++-- .../src/main/java/org/elasticsearch/test/ESTestCase.java | 2 +- .../org/elasticsearch/test/InternalSettingsPlugin.java | 2 +- .../test/disruption/BlockClusterStateProcessing.java | 2 +- .../test/hamcrest/ElasticsearchAssertions.java | 6 +++--- 9 files changed, 15 insertions(+), 15 deletions(-) diff --git a/test/build.gradle b/test/build.gradle index 564f8673307e..7e1b5725147b 100644 --- a/test/build.gradle +++ b/test/build.gradle @@ -27,7 +27,7 @@ subprojects { apply plugin: 'elasticsearch.build' - // the main files are actually test files, so use the appopriate forbidden api sigs + // the main files are actually test files, so use the appropriate forbidden api sigs forbiddenApisMain { bundledSignatures = ['jdk-unsafe', 'jdk-deprecated'] signaturesURLs = [PrecommitTasks.getResource('/forbidden/all-signatures.txt'), diff --git a/test/framework/build.gradle b/test/framework/build.gradle index 355459f99f59..8ee5fbfe81a0 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -36,7 +36,7 @@ dependencies { compileJava.options.compilerArgs << '-Xlint:-cast,-rawtypes,-try,-unchecked' compileTestJava.options.compilerArgs << '-Xlint:-rawtypes' -// the main files are actually test files, so use the appopriate forbidden api sigs +// the main files are actually test files, so use the appropriate forbidden api sigs forbiddenApisMain { bundledSignatures = ['jdk-unsafe', 'jdk-deprecated'] signaturesURLs = [PrecommitTasks.getResource('/forbidden/all-signatures.txt'), diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index aa77c670a42c..68eb0420b395 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -166,7 +166,7 @@ public class BootstrapForTesting { } /** - * we dont know which codesources belong to which plugin, so just remove the permission from key codebases + * we don't know which codesources belong to which plugin, so just remove the permission from key codebases * like core, test-framework, etc. this way tests fail if accesscontroller blocks are missing. */ @SuppressForbidden(reason = "accesses fully qualified URLs to configure security") diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index a3161f4090f9..5d0379af2022 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1696,7 +1696,7 @@ public abstract class ESIntegTestCase extends ESTestCase { /** Helper method to create list of plugins without specifying generic types. */ @SafeVarargs - @SuppressWarnings("varargs") // due to type erasure, the varargs type is non-reifiable, which casues this warning + @SuppressWarnings("varargs") // due to type erasure, the varargs type is non-reifiable, which causes this warning protected final Collection> pluginList(Class... plugins) { return Arrays.asList(plugins); } @@ -1704,7 +1704,7 @@ public abstract class ESIntegTestCase extends ESTestCase { /** * This method is used to obtain additional settings for clients created by the internal cluster. * These settings will be applied on the client in addition to some randomized settings defined in - * the cluster. These setttings will also override any other settings the internal cluster might + * the cluster. These settings will also override any other settings the internal cluster might * add by default. */ protected Settings transportClientSettings() { @@ -1840,7 +1840,7 @@ public abstract class ESIntegTestCase extends ESTestCase { } @Override public String description() { - return "a test plugin that registeres index.tests.seed as an index setting"; + return "a test plugin that registers index.tests.seed as an index setting"; } public void onModule(SettingsModule module) { module.registerSetting(INDEX_TEST_SEED_SETTING); @@ -1981,7 +1981,7 @@ public abstract class ESIntegTestCase extends ESTestCase { @After public final void after() throws Exception { printTestMessage("finished"); - // Deleting indices is going to clear search contexts implicitely so we + // Deleting indices is going to clear search contexts implicitly so we // need to check that there are no more in-flight search contexts before // we remove indices super.ensureAllSearchContextsReleased(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index fc7134002623..2988bf169e97 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -108,7 +108,7 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { public void setUp() throws Exception { super.setUp(); // Create the node lazily, on the first test. This is ok because we do not randomize any settings, - // only the cluster name. This allows us to have overriden properties for plugins and the version to use. + // only the cluster name. This allows us to have overridden properties for plugins and the version to use. if (NODE == null) { startNode(); } @@ -153,7 +153,7 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { /** Helper method to create list of plugins without specifying generic types. */ @SafeVarargs - @SuppressWarnings("varargs") // due to type erasure, the varargs type is non-reifiable, which casues this warning + @SuppressWarnings("varargs") // due to type erasure, the varargs type is non-reifiable, which causes this warning protected final Collection> pluginList(Class... plugins) { return Arrays.asList(plugins); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 84d887338024..8afffeb5e8cb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -130,7 +130,7 @@ public abstract class ESTestCase extends LuceneTestCase { protected void afterIfFailed(List errors) { } - /** called after a test is finished, but only if succesfull */ + /** called after a test is finished, but only if successful */ protected void afterIfSuccessful() throws Exception { } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java index 64719f0f9de2..d8a8a9304a15 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java @@ -31,7 +31,7 @@ public final class InternalSettingsPlugin extends Plugin { @Override public String description() { - return "a plugin that allows to set values for internal settings which are can't be set via the ordinary API without this pluging installed"; + return "a plugin that allows to set values for internal settings which are can't be set via the ordinary API without this plugin installed"; } public static final Setting VERSION_CREATED = Setting.intSetting("index.version.created", 0, false, Setting.Scope.INDEX); diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java index e318843e84f7..881fa43ce47c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java @@ -56,7 +56,7 @@ public class BlockClusterStateProcessing extends SingleNodeDisruption { } logger.info("delaying cluster state updates on node [{}]", disruptionNodeCopy); boolean success = disruptionLatch.compareAndSet(null, new CountDownLatch(1)); - assert success : "startDisrupting called without waiting on stopDistrupting to complete"; + assert success : "startDisrupting called without waiting on stopDisrupting to complete"; final CountDownLatch started = new CountDownLatch(1); clusterService.submitStateUpdateTask("service_disruption_block", new ClusterStateUpdateTask(Priority.IMMEDIATE) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index cb3bbc7436b5..7adf6d2b4b95 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -144,7 +144,7 @@ public class ElasticsearchAssertions { } /** - * Checks that all shard requests of a replicated brodcast request failed due to a cluster block + * Checks that all shard requests of a replicated broadcast request failed due to a cluster block * * @param replicatedBroadcastResponse the response that should only contain failed shard responses * @@ -716,7 +716,7 @@ public class ElasticsearchAssertions { /** * Applies basic assertions on the SearchResponse. This method checks if all shards were successful, if - * any of the shards threw an exception and if the response is serializeable. + * any of the shards threw an exception and if the response is serializable. */ public static SearchResponse assertSearchResponse(SearchRequestBuilder request) { return assertSearchResponse(request.get()); @@ -724,7 +724,7 @@ public class ElasticsearchAssertions { /** * Applies basic assertions on the SearchResponse. This method checks if all shards were successful, if - * any of the shards threw an exception and if the response is serializeable. + * any of the shards threw an exception and if the response is serializable. */ public static SearchResponse assertSearchResponse(SearchResponse response) { assertNoFailures(response); From e72dac91b3c63e92c53d2beb8d9d4d5c234d3235 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 8 Mar 2016 10:41:17 +0100 Subject: [PATCH 134/320] Use index UUID to lookup indices on IndicesService Today we use the index name to lookup index instances on the IndicesService which applied to search reqeusts but also to index deletion etc. This commit moves the interface to expcet and `Index` instance which is a tuple and looks up the index by uuid rather than by name. This prevents accidential modificaiton of the wrong index if and index is recreated or searching from the _wrong_ index in such a case. Accessing an index that has the same name but different UUID will now result in an IndexNotFoundException. Closes #17001 --- .../TransportClearIndicesCacheAction.java | 2 +- .../TransportIndicesSegmentsAction.java | 2 +- .../action/bulk/TransportShardBulkAction.java | 6 +- .../action/search/ShardSearchFailure.java | 6 +- .../suggest/TransportSuggestAction.java | 2 +- .../TransportReplicationAction.java | 7 +- .../InstanceShardOperationRequest.java | 13 ++- ...ransportInstanceSingleOperationAction.java | 2 +- .../TransportShardMultiTermsVectorAction.java | 6 +- .../action/update/TransportUpdateAction.java | 16 ++-- .../action/update/UpdateRequest.java | 5 +- .../cluster/ClusterChangedEvent.java | 16 ++-- .../metadata/MetaDataCreateIndexService.java | 12 +-- .../metadata/MetaDataIndexAliasesService.java | 7 +- .../metadata/MetaDataMappingService.java | 73 ++++++++------- .../elasticsearch/indices/IndicesService.java | 88 ++++++++----------- .../cluster/IndicesClusterStateService.java | 37 ++++---- .../indices/recovery/RecoverySource.java | 2 +- .../indices/store/IndicesStore.java | 2 +- .../TransportNodesListShardStoreMetaData.java | 4 +- .../elasticsearch/search/SearchException.java | 9 +- .../elasticsearch/search/SearchService.java | 7 +- .../search/SearchShardTarget.java | 52 +++++------ .../controller/SearchPhaseController.java | 2 +- .../fetch/ScrollQueryFetchSearchResult.java | 3 +- .../search/internal/InternalSearchHit.java | 3 +- .../search/internal/InternalSearchHits.java | 5 +- .../internal/ShardSearchLocalRequest.java | 18 ++-- .../search/internal/ShardSearchRequest.java | 5 +- .../internal/ShardSearchTransportRequest.java | 7 +- .../search/query/ScrollQuerySearchResult.java | 3 +- .../search/suggest/SuggestParseElement.java | 8 +- .../suggest/SuggestionSearchContext.java | 24 ++--- .../suggest/phrase/PhraseSuggester.java | 2 +- .../shards/IndicesShardStoreRequestIT.java | 5 +- ...ortInstanceSingleOperationActionTests.java | 14 +-- .../cluster/ClusterChangedEventTests.java | 4 +- .../index/IndexWithShadowReplicasIT.java | 10 ++- .../query/plugin/CustomQueryParserIT.java | 2 +- .../index/shard/IndexShardTests.java | 56 ++++++------ .../IndexingMemoryControllerTests.java | 10 +-- ...dicesLifecycleListenerSingleNodeTests.java | 10 ++- .../indices/IndicesServiceTests.java | 8 +- .../flush/SyncedFlushSingleNodeTests.java | 12 +-- .../indices/recovery/IndexRecoveryIT.java | 7 +- .../indices/settings/UpdateSettingsIT.java | 4 +- .../indices/state/RareClusterStateIT.java | 4 +- .../store/IndicesStoreIntegrationIT.java | 3 +- .../recovery/RecoveriesCollectionTests.java | 2 +- .../search/child/ParentFieldLoadingIT.java | 4 +- .../messy/tests/GeoShapeIntegrationTests.java | 4 +- .../elasticsearch/test/ESIntegTestCase.java | 13 ++- .../test/ESSingleNodeTestCase.java | 11 ++- .../test/InternalTestCluster.java | 5 +- 54 files changed, 334 insertions(+), 310 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java index bc229d72b1b4..7bc9f50252ae 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java @@ -77,7 +77,7 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastByNodeAc @Override protected EmptyResult shardOperation(ClearIndicesCacheRequest request, ShardRouting shardRouting) { - IndexService service = indicesService.indexService(shardRouting.getIndexName()); + IndexService service = indicesService.indexService(shardRouting.index()); if (service != null) { IndexShard shard = service.getShardOrNull(shardRouting.id()); boolean clearedAtLeastOne = false; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java index fd45e22a171c..f700a198e2c4 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java @@ -93,7 +93,7 @@ public class TransportIndicesSegmentsAction extends TransportBroadcastByNodeActi @Override protected ShardSegments shardOperation(IndicesSegmentsRequest request, ShardRouting shardRouting) { - IndexService indexService = indicesService.indexServiceSafe(shardRouting.getIndexName()); + IndexService indexService = indicesService.indexServiceSafe(shardRouting.index()); IndexShard indexShard = indexService.getShard(shardRouting.id()); return new ShardSegments(indexShard.routingEntry(), indexShard.segments(request.verbose())); } diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 30f6b03a116b..f1eeae35e08a 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -47,6 +47,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; @@ -104,8 +105,9 @@ public class TransportShardBulkAction extends TransportReplicationAction shardOperationOnPrimary(MetaData metaData, BulkShardRequest request) { - final IndexService indexService = indicesService.indexServiceSafe(request.index()); - final IndexShard indexShard = indexService.getShard(request.shardId().id()); + ShardId shardId = request.shardId(); + final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); + final IndexShard indexShard = indexService.getShard(shardId.getId()); long[] preVersions = new long[request.items().length]; VersionType[] preVersionTypes = new VersionType[request.items().length]; diff --git a/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java b/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java index 0139186562cf..2a01eb4e1c6b 100644 --- a/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java +++ b/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java @@ -32,8 +32,6 @@ import org.elasticsearch.search.SearchShardTarget; import java.io.IOException; -import static org.elasticsearch.search.SearchShardTarget.readSearchShardTarget; - /** * Represents a failure to search on a specific shard. */ @@ -106,7 +104,7 @@ public class ShardSearchFailure implements ShardOperationFailedException { @Override public int shardId() { if (shardTarget != null) { - return shardTarget.shardId(); + return shardTarget.shardId().id(); } return -1; } @@ -133,7 +131,7 @@ public class ShardSearchFailure implements ShardOperationFailedException { @Override public void readFrom(StreamInput in) throws IOException { if (in.readBoolean()) { - shardTarget = readSearchShardTarget(in); + shardTarget = new SearchShardTarget(in); } reason = in.readString(); status = RestStatus.readFrom(in); diff --git a/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java b/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java index 0ed985785577..7a3540607749 100644 --- a/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java +++ b/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java @@ -143,7 +143,7 @@ public class TransportSuggestAction extends TransportBroadcastAction() { diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java b/core/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java index ccdf934958dc..94b0e745a8e8 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java @@ -75,12 +75,12 @@ public class TransportShardMultiTermsVectorAction extends TransportSingleShardAc @Override protected MultiTermVectorsShardResponse shardOperation(MultiTermVectorsShardRequest request, ShardId shardId) { - MultiTermVectorsShardResponse response = new MultiTermVectorsShardResponse(); + final MultiTermVectorsShardResponse response = new MultiTermVectorsShardResponse(); + final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); + final IndexShard indexShard = indexService.getShard(shardId.id()); for (int i = 0; i < request.locations.size(); i++) { TermVectorsRequest termVectorsRequest = request.requests.get(i); try { - IndexService indexService = indicesService.indexServiceSafe(request.index()); - IndexShard indexShard = indexService.getShard(shardId.id()); TermVectorsResponse termVectorsResponse = TermVectorsService.getTermVectors(indexShard, termVectorsRequest); termVectorsResponse.updateTookInMillis(termVectorsRequest.startTime()); response.add(request.locations.get(i), termVectorsResponse); diff --git a/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index 0aefa825f2a3..75feeb8fbca4 100644 --- a/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -51,6 +51,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndexAlreadyExistsException; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; @@ -147,8 +148,8 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio @Override protected ShardIterator shards(ClusterState clusterState, UpdateRequest request) { - if (request.shardId() != -1) { - return clusterState.routingTable().index(request.concreteIndex()).shard(request.shardId()).primaryShardIt(); + if (request.getShardId() != null) { + return clusterState.routingTable().index(request.concreteIndex()).shard(request.getShardId().getId()).primaryShardIt(); } ShardIterator shardIterator = clusterService.operationRouting() .indexShards(clusterState, request.concreteIndex(), request.type(), request.id(), request.routing()); @@ -167,8 +168,9 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio } protected void shardOperation(final UpdateRequest request, final ActionListener listener, final int retryCount) { - final IndexService indexService = indicesService.indexServiceSafe(request.concreteIndex()); - final IndexShard indexShard = indexService.getShard(request.shardId()); + final ShardId shardId = request.getShardId(); + final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); + final IndexShard indexShard = indexService.getShard(shardId.getId()); final UpdateHelper.Result result = updateHelper.prepare(request, indexShard); switch (result.operation()) { case UPSERT: @@ -194,7 +196,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio if (e instanceof VersionConflictEngineException) { if (retryCount < request.retryOnConflict()) { logger.trace("Retry attempt [{}] of [{}] on version conflict on [{}][{}][{}]", - retryCount + 1, request.retryOnConflict(), request.index(), request.shardId(), request.id()); + retryCount + 1, request.retryOnConflict(), request.index(), request.getShardId(), request.id()); threadPool.executor(executor()).execute(new ActionRunnable(listener) { @Override protected void doRun() { @@ -267,9 +269,9 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio break; case NONE: UpdateResponse update = result.action(); - IndexService indexServiceOrNull = indicesService.indexService(request.concreteIndex()); + IndexService indexServiceOrNull = indicesService.indexService(shardId.getIndex()); if (indexServiceOrNull != null) { - IndexShard shard = indexService.getShardOrNull(request.shardId()); + IndexShard shard = indexService.getShardOrNull(shardId.getId()); if (shard != null) { shard.noopUpdate(request.type()); } diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java index 6bc69ed4d9c7..14c127c07039 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; @@ -88,7 +89,7 @@ public class UpdateRequest extends InstanceShardOperationRequest } public UpdateRequest(String index, String type, String id) { - this.index = index; + super(index); this.type = type; this.id = id; } @@ -195,7 +196,7 @@ public class UpdateRequest extends InstanceShardOperationRequest return parent; } - int shardId() { + public ShardId getShardId() { return this.shardId; } diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterChangedEvent.java b/core/src/main/java/org/elasticsearch/cluster/ClusterChangedEvent.java index e851b7814da5..c8a7924ba0fb 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterChangedEvent.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterChangedEvent.java @@ -23,6 +23,7 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.index.Index; import java.util.ArrayList; import java.util.Collections; @@ -120,7 +121,7 @@ public class ClusterChangedEvent { /** * Returns the indices deleted in this event */ - public List indicesDeleted() { + public List indicesDeleted() { // If the new cluster state has a new cluster UUID, the likely scenario is that a node was elected // master that has had its data directory wiped out, in which case we don't want to delete the indices and lose data; // rather we want to import them as dangling indices instead. So we check here if the cluster UUID differs from the previous @@ -131,17 +132,18 @@ public class ClusterChangedEvent { if (metaDataChanged() == false || isNewCluster()) { return Collections.emptyList(); } - List deleted = null; - for (ObjectCursor cursor : previousState.metaData().indices().keys()) { - String index = cursor.value; - if (!state.metaData().hasIndex(index)) { + List deleted = null; + for (ObjectCursor cursor : previousState.metaData().indices().values()) { + IndexMetaData index = cursor.value; + IndexMetaData current = state.metaData().index(index.getIndex().getName()); + if (current == null || index.getIndexUUID().equals(current.getIndexUUID()) == false) { if (deleted == null) { deleted = new ArrayList<>(); } - deleted.add(index); + deleted.add(index.getIndex()); } } - return deleted == null ? Collections.emptyList() : deleted; + return deleted == null ? Collections.emptyList() : deleted; } /** diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 62f3ad802a02..177c46e5537f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -53,6 +53,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.env.Environment; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.mapper.DocumentMapper; @@ -188,7 +189,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { @Override public ClusterState execute(ClusterState currentState) throws Exception { - boolean indexCreated = false; + Index createdIndex = null; String removalReason = null; try { validate(request, currentState); @@ -308,10 +309,9 @@ public class MetaDataCreateIndexService extends AbstractComponent { // Set up everything, now locally create the index to see that things are ok, and apply final IndexMetaData tmpImd = IndexMetaData.builder(request.index()).settings(actualIndexSettings).build(); // create the index here (on the master) to validate it can be created, as well as adding the mapping - indicesService.createIndex(nodeServicesProvider, tmpImd, Collections.emptyList()); - indexCreated = true; + final IndexService indexService = indicesService.createIndex(nodeServicesProvider, tmpImd, Collections.emptyList()); + createdIndex = indexService.index(); // now add the mappings - IndexService indexService = indicesService.indexServiceSafe(request.index()); MapperService mapperService = indexService.mapperService(); // first, add the default mapping if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) { @@ -415,9 +415,9 @@ public class MetaDataCreateIndexService extends AbstractComponent { removalReason = "cleaning up after validating index on master"; return updatedState; } finally { - if (indexCreated) { + if (createdIndex != null) { // Index was already partially created - need to clean up - indicesService.removeIndex(request.index(), removalReason != null ? removalReason : "failed to create index"); + indicesService.removeIndex(createdIndex, removalReason != null ? removalReason : "failed to create index"); } } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java index 52154bd2c042..1f0eaf0cda08 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.NodeServicesProvider; @@ -74,7 +75,7 @@ public class MetaDataIndexAliasesService extends AbstractComponent { @Override public ClusterState execute(final ClusterState currentState) { - List indicesToClose = new ArrayList<>(); + List indicesToClose = new ArrayList<>(); Map indices = new HashMap<>(); try { for (AliasAction aliasAction : request.actions()) { @@ -112,7 +113,7 @@ public class MetaDataIndexAliasesService extends AbstractComponent { logger.warn("[{}] failed to temporary create in order to apply alias action", e, indexMetaData.getIndex()); continue; } - indicesToClose.add(indexMetaData.getIndex().getName()); + indicesToClose.add(indexMetaData.getIndex()); } indices.put(indexMetaData.getIndex().getName(), indexService); } @@ -153,7 +154,7 @@ public class MetaDataIndexAliasesService extends AbstractComponent { } return currentState; } finally { - for (String index : indicesToClose) { + for (Index index : indicesToClose) { indicesService.removeIndex(index, "created for alias processing"); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index c06a5cc7c1ca..51095a2d0de8 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.NodeServicesProvider; @@ -112,13 +113,13 @@ public class MetaDataMappingService extends AbstractComponent { MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); for (Map.Entry> entry : tasksPerIndex.entrySet()) { - String index = entry.getKey(); - IndexMetaData indexMetaData = mdBuilder.get(index); + IndexMetaData indexMetaData = mdBuilder.get(entry.getKey()); if (indexMetaData == null) { // index got deleted on us, ignore... - logger.debug("[{}] ignoring tasks - index meta data doesn't exist", index); + logger.debug("[{}] ignoring tasks - index meta data doesn't exist", entry.getKey()); continue; } + final Index index = indexMetaData.getIndex(); // the tasks lists to iterate over, filled with the list of mapping tasks, trying to keep // the latest (based on order) update mapping one per node List allIndexTasks = entry.getValue(); @@ -127,7 +128,7 @@ public class MetaDataMappingService extends AbstractComponent { if (indexMetaData.isSameUUID(task.indexUUID)) { hasTaskWithRightUUID = true; } else { - logger.debug("[{}] ignoring task [{}] - index meta data doesn't match task uuid", index, task); + logger.debug("{} ignoring task [{}] - index meta data doesn't match task uuid", index, task); } } if (hasTaskWithRightUUID == false) { @@ -136,7 +137,7 @@ public class MetaDataMappingService extends AbstractComponent { // construct the actual index if needed, and make sure the relevant mappings are there boolean removeIndex = false; - IndexService indexService = indicesService.indexService(index); + IndexService indexService = indicesService.indexService(indexMetaData.getIndex()); if (indexService == null) { // we need to create the index here, and add the current mapping to it, so we can merge indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.emptyList()); @@ -208,47 +209,57 @@ public class MetaDataMappingService extends AbstractComponent { class PutMappingExecutor implements ClusterStateTaskExecutor { @Override - public BatchResult execute(ClusterState currentState, List tasks) throws Exception { - Set indicesToClose = new HashSet<>(); + public BatchResult execute(ClusterState currentState, + List tasks) throws Exception { + Set indicesToClose = new HashSet<>(); BatchResult.Builder builder = BatchResult.builder(); try { // precreate incoming indices; for (PutMappingClusterStateUpdateRequest request : tasks) { - // failures here mean something is broken with our cluster state - fail all tasks by letting exceptions bubble up - for (String index : request.indices()) { - final IndexMetaData indexMetaData = currentState.metaData().index(index); - if (indexMetaData != null && indicesService.hasIndex(index) == false) { - // if we don't have the index, we will throw exceptions later; - indicesToClose.add(index); - IndexService indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.emptyList()); - // add mappings for all types, we need them for cross-type validation - for (ObjectCursor mapping : indexMetaData.getMappings().values()) { - indexService.mapperService().merge(mapping.value.type(), mapping.value.source(), MapperService.MergeReason.MAPPING_RECOVERY, request.updateAllTypes()); + final List indices = new ArrayList<>(request.indices().length); + try { + for (String index : request.indices()) { + final IndexMetaData indexMetaData = currentState.metaData().index(index); + if (indexMetaData != null) { + if (indicesService.hasIndex(indexMetaData.getIndex()) == false) { + // if the index does not exists we create it once, add all types to the mapper service and + // close it later once we are done with mapping update + indicesToClose.add(indexMetaData.getIndex()); + IndexService indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, + Collections.emptyList()); + // add mappings for all types, we need them for cross-type validation + for (ObjectCursor mapping : indexMetaData.getMappings().values()) { + indexService.mapperService().merge(mapping.value.type(), mapping.value.source(), + MapperService.MergeReason.MAPPING_RECOVERY, request.updateAllTypes()); + } + } + indices.add(indexMetaData.getIndex()); + } else { + // we didn't find the index in the clusterstate - maybe it was deleted + // NOTE: this doesn't fail the entire batch only the current PutMapping request we are processing + throw new IndexNotFoundException(index); } } - } - } - for (PutMappingClusterStateUpdateRequest request : tasks) { - try { - currentState = applyRequest(currentState, request); + currentState = applyRequest(currentState, request, indices); builder.success(request); } catch (Throwable t) { builder.failure(request, t); } } - return builder.build(currentState); } finally { - for (String index : indicesToClose) { + for (Index index : indicesToClose) { indicesService.removeIndex(index, "created for mapping processing"); } } } - private ClusterState applyRequest(ClusterState currentState, PutMappingClusterStateUpdateRequest request) throws IOException { + private ClusterState applyRequest(ClusterState currentState, PutMappingClusterStateUpdateRequest request, + List indices) throws IOException { String mappingType = request.type(); CompressedXContent mappingUpdateSource = new CompressedXContent(request.source()); - for (String index : request.indices()) { + final MetaData metaData = currentState.metaData(); + for (Index index : indices) { IndexService indexService = indicesService.indexServiceSafe(index); // try and parse it (no need to add it here) so we can bail early in case of parsing exception DocumentMapper newMapper; @@ -270,7 +281,7 @@ public class MetaDataMappingService extends AbstractComponent { // and a put mapping api call, so we don't which type did exist before. // Also the order of the mappings may be backwards. if (newMapper.parentFieldMapper().active()) { - IndexMetaData indexMetaData = currentState.metaData().index(index); + IndexMetaData indexMetaData = metaData.index(index); for (ObjectCursor mapping : indexMetaData.getMappings().values()) { if (newMapper.parentFieldMapper().type().equals(mapping.value.type())) { throw new IllegalArgumentException("can't add a _parent field that points to an already existing type"); @@ -290,11 +301,11 @@ public class MetaDataMappingService extends AbstractComponent { if (!MapperService.DEFAULT_MAPPING.equals(mappingType) && !PercolatorService.TYPE_NAME.equals(mappingType) && mappingType.charAt(0) == '_') { throw new InvalidTypeNameException("Document mapping type name can't start with '_'"); } - MetaData.Builder builder = MetaData.builder(currentState.metaData()); - for (String index : request.indices()) { + MetaData.Builder builder = MetaData.builder(metaData); + for (Index index : indices) { // do the actual merge here on the master, and update the mapping source IndexService indexService = indicesService.indexService(index); - if (indexService == null) { + if (indexService == null) { // TODO this seems impossible given we use indexServiceSafe above continue; } @@ -326,7 +337,7 @@ public class MetaDataMappingService extends AbstractComponent { } } - IndexMetaData indexMetaData = currentState.metaData().index(index); + IndexMetaData indexMetaData = metaData.index(index); if (indexMetaData == null) { throw new IndexNotFoundException(index); } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index 440a11a1904b..6fd833471eda 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -103,6 +103,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Predicate; +import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; @@ -185,14 +186,14 @@ public class IndicesService extends AbstractLifecycleComponent i ExecutorService indicesStopExecutor = Executors.newFixedThreadPool(5, EsExecutors.daemonThreadFactory("indices_shutdown")); // Copy indices because we modify it asynchronously in the body of the loop - Set indices = new HashSet<>(this.indices.keySet()); + final Set indices = this.indices.values().stream().map(s -> s.index()).collect(Collectors.toSet()); final CountDownLatch latch = new CountDownLatch(indices.size()); - for (final String index : indices) { + for (final Index index : indices) { indicesStopExecutor.execute(() -> { try { removeIndex(index, "shutdown", false); } catch (Throwable e) { - logger.warn("failed to remove index on stop [" + index + "]", e); + logger.warn("failed to remove index on stop " + index + "", e); } finally { latch.countDown(); } @@ -256,7 +257,7 @@ public class IndicesService extends AbstractLifecycleComponent i } Map> statsByShard = new HashMap<>(); - for (IndexService indexService : indices.values()) { + for (IndexService indexService : this) { for (IndexShard indexShard : indexService) { try { if (indexShard.routingEntry() == null) { @@ -290,17 +291,8 @@ public class IndicesService extends AbstractLifecycleComponent i return indices.values().iterator(); } - public boolean hasIndex(String index) { - return indices.containsKey(index); - } - - /** - * Returns an IndexService for the specified index if exists otherwise returns null. - * - */ - @Nullable - public IndexService indexService(String index) { - return indices.get(index); + public boolean hasIndex(Index index) { + return indices.containsKey(index.getUUID()); } /** @@ -309,33 +301,21 @@ public class IndicesService extends AbstractLifecycleComponent i */ @Nullable public IndexService indexService(Index index) { - return indexService(index.getName()); - } - - /** - * Returns an IndexService for the specified index if exists otherwise a {@link IndexNotFoundException} is thrown. - */ - public IndexService indexServiceSafe(String index) { - IndexService indexService = indexService(index); - if (indexService == null) { - throw new IndexNotFoundException(index); - } - return indexService; + return indices.get(index.getUUID()); } /** * Returns an IndexService for the specified index if exists otherwise a {@link IndexNotFoundException} is thrown. */ public IndexService indexServiceSafe(Index index) { - IndexService indexService = indexServiceSafe(index.getName()); - if (indexService.indexUUID().equals(index.getUUID()) == false) { + IndexService indexService = indices.get(index.getUUID()); + if (indexService == null) { throw new IndexNotFoundException(index); } + assert indexService.indexUUID().equals(index.getUUID()) : "uuid mismatch local: " + indexService.indexUUID() + " incoming: " + index.getUUID(); return indexService; } - - /** * Creates a new {@link IndexService} for the given metadata. * @param indexMetaData the index metadata to create the index for @@ -346,10 +326,13 @@ public class IndicesService extends AbstractLifecycleComponent i if (!lifecycle.started()) { throw new IllegalStateException("Can't create an index [" + indexMetaData.getIndex() + "], node is closed"); } + if (indexMetaData.getIndexUUID().equals(IndexMetaData.INDEX_UUID_NA_VALUE)) { + throw new IllegalArgumentException("index must have a real UUID found value: [" + indexMetaData.getIndexUUID() + "]"); + } final Index index = indexMetaData.getIndex(); final Predicate indexNameMatcher = (indexExpression) -> indexNameExpressionResolver.matchesIndex(index.getName(), indexExpression, clusterService.state()); final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, indexNameMatcher, indexScopeSetting); - if (indices.containsKey(index.getName())) { + if (hasIndex(index)) { throw new IndexAlreadyExistsException(index); } logger.debug("creating Index [{}], shards [{}]/[{}{}]", @@ -378,7 +361,7 @@ public class IndicesService extends AbstractLifecycleComponent i try { assert indexService.getIndexEventListener() == listener; listener.afterIndexCreated(indexService); - indices = newMapBuilder(indices).put(index.getName(), indexService).immutableMap(); + indices = newMapBuilder(indices).put(index.getUUID(), indexService).immutableMap(); success = true; return indexService; } finally { @@ -395,22 +378,24 @@ public class IndicesService extends AbstractLifecycleComponent i * @param index the index to remove * @param reason the high level reason causing this removal */ - public void removeIndex(String index, String reason) { + public void removeIndex(Index index, String reason) { removeIndex(index, reason, false); } - private void removeIndex(String index, String reason, boolean delete) { + private void removeIndex(Index index, String reason, boolean delete) { + final String indexName = index.getName(); try { final IndexService indexService; final IndexEventListener listener; synchronized (this) { - if (indices.containsKey(index) == false) { + if (hasIndex(index) == false) { return; } - logger.debug("[{}] closing ... (reason [{}])", index, reason); + logger.debug("[{}] closing ... (reason [{}])", indexName, reason); Map newIndices = new HashMap<>(indices); - indexService = newIndices.remove(index); + indexService = newIndices.remove(index.getUUID()); + assert indexService != null : "IndexService is null for index: " + index; indices = unmodifiableMap(newIndices); listener = indexService.getIndexEventListener(); } @@ -419,9 +404,9 @@ public class IndicesService extends AbstractLifecycleComponent i if (delete) { listener.beforeIndexDeleted(indexService); } - logger.debug("[{}] closing index service (reason [{}])", index, reason); + logger.debug("{} closing index service (reason [{}])", index, reason); indexService.close(reason, delete); - logger.debug("[{}] closed... (reason [{}])", index, reason); + logger.debug("{} closed... (reason [{}])", index, reason); listener.afterIndexClosed(indexService.index(), indexService.getIndexSettings().getSettings()); if (delete) { final IndexSettings indexSettings = indexService.getIndexSettings(); @@ -474,12 +459,12 @@ public class IndicesService extends AbstractLifecycleComponent i * Deletes the given index. Persistent parts of the index * like the shards files, state and transaction logs are removed once all resources are released. * - * Equivalent to {@link #removeIndex(String, String)} but fires + * Equivalent to {@link #removeIndex(Index, String)} but fires * different lifecycle events to ensure pending resources of this index are immediately removed. * @param index the index to delete * @param reason the high level reason causing this delete */ - public void deleteIndex(String index, String reason) throws IOException { + public void deleteIndex(Index index, String reason) throws IOException { removeIndex(index, reason, true); } @@ -505,16 +490,17 @@ public class IndicesService extends AbstractLifecycleComponent i public void deleteIndexStore(String reason, IndexMetaData metaData, ClusterState clusterState, boolean closed) throws IOException { if (nodeEnv.hasNodeFile()) { synchronized (this) { - String indexName = metaData.getIndex().getName(); - if (indices.containsKey(indexName)) { - String localUUid = indices.get(indexName).indexUUID(); - throw new IllegalStateException("Can't delete index store for [" + indexName + "] - it's still part of the indices service [" + localUUid + "] [" + metaData.getIndexUUID() + "]"); + Index index = metaData.getIndex(); + if (hasIndex(index)) { + String localUUid = indexService(index).indexUUID(); + throw new IllegalStateException("Can't delete index store for [" + index.getName() + "] - it's still part of the indices service [" + localUUid + "] [" + metaData.getIndexUUID() + "]"); } - if (clusterState.metaData().hasIndex(indexName) && (clusterState.nodes().localNode().masterNode() == true)) { + + if (clusterState.metaData().hasIndex(index.getName()) && (clusterState.nodes().localNode().masterNode() == true)) { // we do not delete the store if it is a master eligible node and the index is still in the cluster state // because we want to keep the meta data for indices around even if no shards are left here - final IndexMetaData index = clusterState.metaData().index(indexName); - throw new IllegalStateException("Can't delete closed index store for [" + indexName + "] - it's still part of the cluster state [" + index.getIndexUUID() + "] [" + metaData.getIndexUUID() + "]"); + final IndexMetaData idxMeta = clusterState.metaData().index(index.getName()); + throw new IllegalStateException("Can't delete closed index store for [" + index.getName() + "] - it's still part of the cluster state [" + idxMeta.getIndexUUID() + "] [" + metaData.getIndexUUID() + "]"); } } final IndexSettings indexSettings = buildIndexSettings(metaData); @@ -607,7 +593,7 @@ public class IndicesService extends AbstractLifecycleComponent i * @return true if the index can be deleted on this node */ public boolean canDeleteIndexContents(Index index, IndexSettings indexSettings, boolean closed) { - final IndexService indexService = this.indices.get(index.getName()); + final IndexService indexService = indexService(index); // Closed indices may be deleted, even if they are on a shared // filesystem. Since it is closed we aren't deleting it for relocation if (indexSettings.isOnSharedFilesystem() == false || closed) { @@ -634,7 +620,7 @@ public class IndicesService extends AbstractLifecycleComponent i */ public boolean canDeleteShardContent(ShardId shardId, IndexSettings indexSettings) { assert shardId.getIndex().equals(indexSettings.getIndex()); - final IndexService indexService = this.indices.get(shardId.getIndexName()); + final IndexService indexService = indexService(shardId.getIndex()); if (indexSettings.isOnSharedFilesystem() == false) { if (indexService != null && nodeEnv.hasNodeFile()) { return indexService.hasShard(shardId.id()) == false; diff --git a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 7998afb7656e..af667f356e8d 100644 --- a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -46,6 +46,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.Callback; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexShardAlreadyExistsException; @@ -157,13 +158,13 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent { @Override public void handle(final IndexShard.ShardFailure shardFailure) { - final IndexService indexService = indicesService.indexService(shardFailure.routing.shardId().getIndex().getName()); + final IndexService indexService = indicesService.indexService(shardFailure.routing.shardId().getIndex()); final ShardRouting shardRouting = shardFailure.routing; threadPool.generic().execute(() -> { synchronized (mutex) { diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java index 9a5c23fc2e18..934730c7c930 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java @@ -83,7 +83,7 @@ public class RecoverySource extends AbstractComponent implements IndexEventListe } private RecoveryResponse recover(final StartRecoveryRequest request) throws IOException { - final IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex().getName()); + final IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); final IndexShard shard = indexService.getShard(request.shardId().id()); // starting recovery from that our (the source) shard state is marking the shard to be in recovery mode as well, otherwise diff --git a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index d0aec817ee9a..6e9859efb2e3 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -348,7 +348,7 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe return null; } ShardId shardId = request.shardId; - IndexService indexService = indicesService.indexService(shardId.getIndexName()); + IndexService indexService = indicesService.indexService(shardId.getIndex()); if (indexService != null && indexService.indexUUID().equals(request.indexUUID)) { return indexService.getShardOrNull(shardId.id()); } diff --git a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index bcc2d7f74c45..e009cbf04d18 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -126,7 +126,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction imp } final SearchContext createContext(ShardSearchRequest request, @Nullable Engine.Searcher searcher) { - IndexService indexService = indicesService.indexServiceSafe(request.index()); - IndexShard indexShard = indexService.getShard(request.shardId()); - - SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(), indexShard.shardId().getIndex(), request.shardId()); + IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); + IndexShard indexShard = indexService.getShard(request.shardId().getId()); + SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(), indexShard.shardId()); Engine.Searcher engineSearcher = searcher == null ? indexShard.acquireSearcher("search") : searcher; diff --git a/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java b/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java index d3958505d708..d675a93b691a 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java +++ b/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java @@ -23,28 +23,38 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.Index; +import org.elasticsearch.index.shard.ShardId; import java.io.IOException; /** * The target that the search request was executed on. */ -public class SearchShardTarget implements Streamable, Comparable { +public class SearchShardTarget implements Writeable, Comparable { private Text nodeId; private Text index; - private int shardId; + private ShardId shardId; - private SearchShardTarget() { + public SearchShardTarget(StreamInput in) throws IOException { + if (in.readBoolean()) { + nodeId = in.readText(); + } + shardId = ShardId.readShardId(in); + index = new Text(shardId.getIndexName()); + } + public SearchShardTarget(String nodeId, ShardId shardId) { + this.nodeId = nodeId == null ? null : new Text(nodeId); + this.index = new Text(shardId.getIndexName()); + this.shardId = shardId; } public SearchShardTarget(String nodeId, Index index, int shardId) { - this.nodeId = nodeId == null ? null : new Text(nodeId); - this.index = new Text(index.getName()); - this.shardId = shardId; + this(nodeId, new ShardId(index, shardId)); } @Nullable @@ -73,36 +83,26 @@ public class SearchShardTarget implements Streamable, Comparable o1, AtomicArray.Entry o2) { int i = o1.value.shardTarget().index().compareTo(o2.value.shardTarget().index()); if (i == 0) { - i = o1.value.shardTarget().shardId() - o2.value.shardTarget().shardId(); + i = o1.value.shardTarget().shardId().id() - o2.value.shardTarget().shardId().id(); } return i; } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/ScrollQueryFetchSearchResult.java b/core/src/main/java/org/elasticsearch/search/fetch/ScrollQueryFetchSearchResult.java index fb0fc75299fb..dbaee5b64bb6 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/ScrollQueryFetchSearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/ScrollQueryFetchSearchResult.java @@ -26,7 +26,6 @@ import org.elasticsearch.transport.TransportResponse; import java.io.IOException; -import static org.elasticsearch.search.SearchShardTarget.readSearchShardTarget; import static org.elasticsearch.search.fetch.QueryFetchSearchResult.readQueryFetchSearchResult; /** @@ -56,7 +55,7 @@ public class ScrollQueryFetchSearchResult extends TransportResponse { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - shardTarget = readSearchShardTarget(in); + shardTarget = new SearchShardTarget(in); result = readQueryFetchSearchResult(in); result.shardTarget(shardTarget); } diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java index c6afe325bb32..dcbcce503a42 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java +++ b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java @@ -55,7 +55,6 @@ import static java.util.Collections.singletonMap; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.lucene.Lucene.readExplanation; import static org.elasticsearch.common.lucene.Lucene.writeExplanation; -import static org.elasticsearch.search.SearchShardTarget.readSearchShardTarget; import static org.elasticsearch.search.highlight.HighlightField.readHighlightField; import static org.elasticsearch.search.internal.InternalSearchHitField.readSearchHitField; @@ -638,7 +637,7 @@ public class InternalSearchHit implements SearchHit { if (context.streamShardTarget() == ShardTargetType.STREAM) { if (in.readBoolean()) { - shard = readSearchShardTarget(in); + shard = new SearchShardTarget(in); } } else if (context.streamShardTarget() == ShardTargetType.LOOKUP) { int lookupId = in.readVInt(); diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHits.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHits.java index 9e787cf2aa94..09d11e1a1a3d 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHits.java +++ b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHits.java @@ -34,7 +34,6 @@ import java.util.IdentityHashMap; import java.util.Iterator; import java.util.Map; -import static org.elasticsearch.search.SearchShardTarget.readSearchShardTarget; import static org.elasticsearch.search.internal.InternalSearchHit.readSearchHit; /** @@ -216,7 +215,7 @@ public class InternalSearchHits implements SearchHits { // read the lookup table first int lookupSize = in.readVInt(); for (int i = 0; i < lookupSize; i++) { - context.handleShardLookup().put(in.readVInt(), readSearchShardTarget(in)); + context.handleShardLookup().put(in.readVInt(), new SearchShardTarget(in)); } } @@ -262,4 +261,4 @@ public class InternalSearchHits implements SearchHits { } } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java index 0f46461f4a29..56ad8ed9467c 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java @@ -58,8 +58,7 @@ import static org.elasticsearch.search.Scroll.readScroll; public class ShardSearchLocalRequest implements ShardSearchRequest { - private String index; - private int shardId; + private ShardId shardId; private int numberOfShards; private SearchType searchType; private Scroll scroll; @@ -97,8 +96,7 @@ public class ShardSearchLocalRequest implements ShardSearchRequest { public ShardSearchLocalRequest(ShardId shardId, int numberOfShards, SearchType searchType, SearchSourceBuilder source, String[] types, Boolean requestCache) { - this.index = shardId.getIndexName(); - this.shardId = shardId.id(); + this.shardId = shardId; this.numberOfShards = numberOfShards; this.searchType = searchType; this.source = source; @@ -106,13 +104,9 @@ public class ShardSearchLocalRequest implements ShardSearchRequest { this.requestCache = requestCache; } - @Override - public String index() { - return index; - } @Override - public int shardId() { + public ShardId shardId() { return shardId; } @@ -177,8 +171,7 @@ public class ShardSearchLocalRequest implements ShardSearchRequest { @SuppressWarnings("unchecked") protected void innerReadFrom(StreamInput in) throws IOException { - index = in.readString(); - shardId = in.readVInt(); + shardId = ShardId.readShardId(in); searchType = SearchType.fromId(in.readByte()); numberOfShards = in.readVInt(); if (in.readBoolean()) { @@ -195,8 +188,7 @@ public class ShardSearchLocalRequest implements ShardSearchRequest { } protected void innerWriteTo(StreamOutput out, boolean asKey) throws IOException { - out.writeString(index); - out.writeVInt(shardId); + shardId.writeTo(out); out.writeByte(searchType.id()); if (!asKey) { out.writeVInt(numberOfShards); diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java index 1f0b3d1f188d..82ff69078aa8 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.internal; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.script.Template; import org.elasticsearch.search.Scroll; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -34,9 +35,7 @@ import java.io.IOException; */ public interface ShardSearchRequest { - String index(); - - int shardId(); + ShardId shardId(); String[] types(); diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java index 48ea31c170a5..dc19f84c7a78 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.script.Template; import org.elasticsearch.search.Scroll; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -71,13 +72,9 @@ public class ShardSearchTransportRequest extends TransportRequest implements Sha return originalIndices.indicesOptions(); } - @Override - public String index() { - return shardSearchLocalRequest.index(); - } @Override - public int shardId() { + public ShardId shardId() { return shardSearchLocalRequest.shardId(); } diff --git a/core/src/main/java/org/elasticsearch/search/query/ScrollQuerySearchResult.java b/core/src/main/java/org/elasticsearch/search/query/ScrollQuerySearchResult.java index ebb7615da44b..bcdd94adf891 100644 --- a/core/src/main/java/org/elasticsearch/search/query/ScrollQuerySearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/query/ScrollQuerySearchResult.java @@ -26,7 +26,6 @@ import org.elasticsearch.transport.TransportResponse; import java.io.IOException; -import static org.elasticsearch.search.SearchShardTarget.readSearchShardTarget; import static org.elasticsearch.search.query.QuerySearchResult.readQuerySearchResult; /** @@ -56,7 +55,7 @@ public class ScrollQuerySearchResult extends TransportResponse { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - shardTarget = readSearchShardTarget(in); + shardTarget = new SearchShardTarget(in); queryResult = readQuerySearchResult(in); queryResult.shardTarget(shardTarget); } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java index a8a4e9ec26bb..a6cf877a1f82 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; @@ -45,12 +46,12 @@ public final class SuggestParseElement implements SearchParseElement { @Override public void parse(XContentParser parser, SearchContext context) throws Exception { SuggestionSearchContext suggestionSearchContext = parseInternal(parser, context.mapperService(), context.fieldData(), - context.shardTarget().index(), context.shardTarget().shardId()); + context.shardTarget().shardId()); context.suggest(suggestionSearchContext); } - public SuggestionSearchContext parseInternal(XContentParser parser, MapperService mapperService, IndexFieldDataService fieldDataService, - String index, int shardId) throws IOException { + public SuggestionSearchContext parseInternal(XContentParser parser, MapperService mapperService, + IndexFieldDataService fieldDataService, ShardId shardId) throws IOException { SuggestionSearchContext suggestionSearchContext = new SuggestionSearchContext(); BytesRef globalText = null; @@ -119,7 +120,6 @@ public final class SuggestParseElement implements SearchParseElement { SuggestionContext suggestionContext = entry.getValue(); suggestionContext.setShard(shardId); - suggestionContext.setIndex(index); SuggestUtils.verifySuggestion(mapperService, globalText, suggestionContext); suggestionSearchContext.addSuggestion(suggestionName, suggestionContext); } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java index 1d3339e0578b..48e4fb5dc0b8 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.suggest; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.shard.ShardId; import java.util.LinkedHashMap; import java.util.Map; @@ -36,9 +37,9 @@ public class SuggestionSearchContext { public Map suggestions() { return suggestions; } - + public static class SuggestionContext { - + private BytesRef text; private BytesRef prefix; private BytesRef regex; @@ -47,9 +48,8 @@ public class SuggestionSearchContext { private Analyzer analyzer; private int size = 5; private int shardSize = -1; - private int shardId; - private String index; - + private ShardId shardId; + public BytesRef getText() { return text; } @@ -119,20 +119,12 @@ public class SuggestionSearchContext { } this.shardSize = shardSize; } - - public void setShard(int shardId) { + + public void setShard(ShardId shardId) { this.shardId = shardId; } - public void setIndex(String index) { - this.index = index; - } - - public String getIndex() { - return index; - } - - public int getShard() { + public ShardId getShard() { return shardId; } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java index 7838eacd960f..74e7f90600a9 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java @@ -117,7 +117,7 @@ public final class PhraseSuggester extends Suggester { vars.put(SUGGESTION_TEMPLATE_VAR_NAME, spare.toString()); final ExecutableScript executable = scriptService.executable(collateScript, vars); final BytesReference querySource = (BytesReference) executable.run(); - IndexService indexService = indicesService.indexService(suggestion.getIndex()); + IndexService indexService = indicesService.indexService(suggestion.getShard().getIndex()); final ParsedQuery parsedQuery = indexService.newQueryShardContext().parse(querySource); collateMatch = Lucene.exists(searcher, parsedQuery.query()); } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java index 2e39c39cfd2b..c31993ebb812 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java @@ -32,6 +32,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; @@ -157,6 +158,7 @@ public class IndicesShardStoreRequestIT extends ESIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "5") .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) )); + indexRandomData(index); ensureGreen(index); @@ -165,9 +167,10 @@ public class IndicesShardStoreRequestIT extends ESIntegTestCase { logger.info("--> corrupt random shard copies"); Map> corruptedShardIDMap = new HashMap<>(); + Index idx = resolveIndex(index); for (String node : internalCluster().nodesInclude(index)) { IndicesService indexServices = internalCluster().getInstance(IndicesService.class, node); - IndexService indexShards = indexServices.indexServiceSafe(index); + IndexService indexShards = indexServices.indexServiceSafe(idx); for (Integer shardId : indexShards.shardIds()) { IndexShard shard = indexShards.getShard(shardId); if (randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java index cf7b6745c8ed..462a44e08b45 100644 --- a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java @@ -113,7 +113,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { @Override protected ShardIterator shards(ClusterState clusterState, Request request) { - return clusterState.routingTable().index(request.concreteIndex()).shard(request.shardId).primaryShardIt(); + return clusterState.routingTable().index(request.concreteIndex()).shard(request.shardId.getId()).primaryShardIt(); } } @@ -178,7 +178,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { public void testBasicRequestWorks() throws InterruptedException, ExecutionException, TimeoutException { Request request = new Request().index("test"); - request.shardId = 0; + request.shardId = new ShardId("test", "_na_", 0); PlainActionFuture listener = new PlainActionFuture<>(); clusterService.setState(ClusterStateCreationUtils.state("test", randomBoolean(), ShardRoutingState.STARTED)); action.new AsyncSingleAction(request, listener).start(); @@ -189,7 +189,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { public void testFailureWithoutRetry() throws Exception { Request request = new Request().index("test"); - request.shardId = 0; + request.shardId = new ShardId("test", "_na_", 0); PlainActionFuture listener = new PlainActionFuture<>(); clusterService.setState(ClusterStateCreationUtils.state("test", randomBoolean(), ShardRoutingState.STARTED)); @@ -215,7 +215,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { public void testSuccessAfterRetryWithClusterStateUpdate() throws Exception { Request request = new Request().index("test"); - request.shardId = 0; + request.shardId = new ShardId("test", "_na_", 0); PlainActionFuture listener = new PlainActionFuture<>(); boolean local = randomBoolean(); clusterService.setState(ClusterStateCreationUtils.state("test", local, ShardRoutingState.INITIALIZING)); @@ -231,7 +231,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { public void testSuccessAfterRetryWithExceptionFromTransport() throws Exception { Request request = new Request().index("test"); - request.shardId = 0; + request.shardId = new ShardId("test", "_na_", 0); PlainActionFuture listener = new PlainActionFuture<>(); boolean local = randomBoolean(); clusterService.setState(ClusterStateCreationUtils.state("test", local, ShardRoutingState.STARTED)); @@ -250,7 +250,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { public void testRetryOfAnAlreadyTimedOutRequest() throws Exception { Request request = new Request().index("test").timeout(new TimeValue(0, TimeUnit.MILLISECONDS)); - request.shardId = 0; + request.shardId = new ShardId("test", "_na_", 0); PlainActionFuture listener = new PlainActionFuture<>(); clusterService.setState(ClusterStateCreationUtils.state("test", randomBoolean(), ShardRoutingState.STARTED)); action.new AsyncSingleAction(request, listener).start(); @@ -299,7 +299,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { } }; Request request = new Request().index("test"); - request.shardId = 0; + request.shardId = new ShardId("test", "_na_", 0); PlainActionFuture listener = new PlainActionFuture<>(); clusterService.setState(ClusterStateCreationUtils.state("test", randomBoolean(), ShardRoutingState.STARTED)); action.new AsyncSingleAction(request, listener).start(); diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java index cefd3a6703a8..fc43f4154d1e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; @@ -37,6 +38,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import static org.hamcrest.Matchers.equalTo; @@ -220,7 +222,7 @@ public class ClusterChangedEventTests extends ESTestCase { final ClusterState newState = nextState(previousState, changeClusterUUID, addedIndices, delIndices, 0); final ClusterChangedEvent event = new ClusterChangedEvent("_na_", newState, previousState); final List addsFromEvent = event.indicesCreated(); - final List delsFromEvent = event.indicesDeleted(); + final List delsFromEvent = event.indicesDeleted().stream().map((s) -> s.getName()).collect(Collectors.toList()); Collections.sort(addsFromEvent); Collections.sort(delsFromEvent); assertThat(addsFromEvent, equalTo(addedIndices)); diff --git a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java index a7d127a60c8b..aa3da8fc8405 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java @@ -156,10 +156,11 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); ensureGreen(); refresh(); - + Index index = resolveIndex("foo-copy"); for (IndicesService service : internalCluster().getDataNodeInstances(IndicesService.class)) { - if (service.hasIndex("foo-copy")) { - IndexShard shard = service.indexServiceSafe("foo-copy").getShardOrNull(0); + + if (service.hasIndex(index)) { + IndexShard shard = service.indexServiceSafe(index).getShardOrNull(0); if (shard.routingEntry().primary()) { assertFalse(shard instanceof ShadowIndexShard); } else { @@ -201,8 +202,9 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats(IDX).clear().setTranslog(true).get(); assertEquals(2, indicesStatsResponse.getIndex(IDX).getPrimaries().getTranslog().estimatedNumberOfOperations()); assertEquals(2, indicesStatsResponse.getIndex(IDX).getTotal().getTranslog().estimatedNumberOfOperations()); + Index index = resolveIndex(IDX); for (IndicesService service : internalCluster().getInstances(IndicesService.class)) { - IndexService indexService = service.indexService(IDX); + IndexService indexService = service.indexService(index); if (indexService != null) { IndexShard shard = indexService.getShard(0); TranslogStats translogStats = shard.translogStats(); diff --git a/core/src/test/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java b/core/src/test/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java index 1758d95a554d..ec405bd84070 100644 --- a/core/src/test/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java +++ b/core/src/test/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java @@ -68,7 +68,7 @@ public class CustomQueryParserIT extends ESIntegTestCase { private static QueryShardContext queryShardContext() { IndicesService indicesService = internalCluster().getDataNodeInstance(IndicesService.class); - return indicesService.indexServiceSafe("index").newQueryShardContext(); + return indicesService.indexServiceSafe(resolveIndex("index")).newQueryShardContext(); } //see #11120 diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index e70ca9ec6dea..1acf4e3fa1b4 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -193,7 +193,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); NodeEnvironment env = getInstanceFromNode(NodeEnvironment.class); - IndexService test = indicesService.indexService("test"); + IndexService test = indicesService.indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); ShardStateMetaData shardStateMetaData = load(logger, env.availableShardPaths(shard.shardId)); assertEquals(getShardStateMetadata(shard), shardStateMetaData); @@ -226,7 +226,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); NodeEnvironment env = getInstanceFromNode(NodeEnvironment.class); - IndexService test = indicesService.indexService("test"); + IndexService test = indicesService.indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); // fail shard shard.failShard("test shard fail", new CorruptIndexException("", "")); @@ -281,7 +281,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0)).get()); ensureGreen("test"); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService indexService = indicesService.indexServiceSafe("test"); + IndexService indexService = indicesService.indexServiceSafe(resolveIndex("test")); IndexShard indexShard = indexService.getShardOrNull(0); client().admin().indices().prepareDelete("test").get(); assertThat(indexShard.getActiveOperationsCount(), equalTo(0)); @@ -303,7 +303,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0)).get()); ensureGreen("test"); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService indexService = indicesService.indexServiceSafe("test"); + IndexService indexService = indicesService.indexServiceSafe(resolveIndex("test")); IndexShard indexShard = indexService.getShardOrNull(0); assertEquals(0, indexShard.getActiveOperationsCount()); Releasable operation1 = indexShard.acquirePrimaryOperationLock(); @@ -320,11 +320,11 @@ public class IndexShardTests extends ESSingleNodeTestCase { client().prepareIndex("test", "test").setSource("{}").get(); ensureGreen("test"); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - indicesService.indexService("test").getShardOrNull(0).checkIdle(0); + indicesService.indexService(resolveIndex("test")).getShardOrNull(0).checkIdle(0); assertBusy(() -> { IndexStats indexStats = client().admin().indices().prepareStats("test").clear().get().getIndex("test"); assertNotNull(indexStats.getShards()[0].getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID)); - indicesService.indexService("test").getShardOrNull(0).checkIdle(0); + indicesService.indexService(resolveIndex("test")).getShardOrNull(0).checkIdle(0); } ); IndexStats indexStats = client().admin().indices().prepareStats("test").get().getIndex("test"); @@ -345,7 +345,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ensureGreen(); client().prepareIndex("test", "bar", "1").setSource("{}").get(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService test = indicesService.indexService("test"); + IndexService test = indicesService.indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); setDurability(shard, Translog.Durability.REQUEST); assertFalse(shard.getEngine().getTranslog().syncNeeded()); @@ -385,7 +385,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { client().prepareIndex("test", "test").setSource("{}").get(); ensureGreen("test"); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexShard test = indicesService.indexService("test").getShardOrNull(0); + IndexShard test = indicesService.indexService(resolveIndex("test")).getShardOrNull(0); assertEquals(versionCreated.luceneVersion, test.minimumCompatibleVersion()); client().prepareIndex("test", "test").setSource("{}").get(); assertEquals(versionCreated.luceneVersion, test.minimumCompatibleVersion()); @@ -396,7 +396,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { public void testUpdatePriority() { assertAcked(client().admin().indices().prepareCreate("test") .setSettings(IndexMetaData.SETTING_PRIORITY, 200)); - IndexService indexService = getInstanceFromNode(IndicesService.class).indexService("test"); + IndexService indexService = getInstanceFromNode(IndicesService.class).indexService(resolveIndex("test")); assertEquals(200, indexService.getIndexSettings().getSettings().getAsInt(IndexMetaData.SETTING_PRIORITY, 0).intValue()); client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_PRIORITY, 400).build()).get(); assertEquals(400, indexService.getIndexSettings().getSettings().getAsInt(IndexMetaData.SETTING_PRIORITY, 0).intValue()); @@ -410,7 +410,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { SearchResponse response = client().prepareSearch("test").get(); assertHitCount(response, 1L); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService test = indicesService.indexService("test"); + IndexService test = indicesService.indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); ShardPath shardPath = shard.shardPath(); Path dataPath = shardPath.getDataPath(); @@ -530,7 +530,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { createIndex("test"); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService test = indicesService.indexService("test"); + IndexService test = indicesService.indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); ShardStats stats = new ShardStats(shard.routingEntry(), shard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), shard, new CommonStatsFlags()), shard.commitStats()); assertEquals(shard.shardPath().getRootDataPath().toString(), stats.getDataPath()); @@ -570,7 +570,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ensureGreen(); client().prepareIndex("test_iol", "test", "0").setSource("{\"foo\" : \"bar\"}").setRefresh(true).get(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService test = indicesService.indexService("test_iol"); + IndexService test = indicesService.indexService(resolveIndex("test_iol")); IndexShard shard = test.getShardOrNull(0); AtomicInteger preIndex = new AtomicInteger(); AtomicInteger postIndex = new AtomicInteger(); @@ -669,7 +669,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { createIndex("test", settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.REQUEST).build()); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService test = indicesService.indexService("test"); + IndexService test = indicesService.indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); assertFalse(shard.shouldFlush()); client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(133 /* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); @@ -703,7 +703,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { createIndex("test"); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService test = indicesService.indexService("test"); + IndexService test = indicesService.indexService(resolveIndex("test")); final IndexShard shard = test.getShardOrNull(0); assertFalse(shard.shouldFlush()); client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(133/* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); @@ -749,7 +749,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ).get()); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService test = indicesService.indexService("test"); + IndexService test = indicesService.indexService(resolveIndex("test")); final IndexShard shard = test.getShardOrNull(0); CountDownLatch latch = new CountDownLatch(1); Thread recoveryThread = new Thread(() -> { @@ -779,7 +779,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ).get()); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService test = indicesService.indexService("test"); + IndexService test = indicesService.indexService(resolveIndex("test")); final IndexShard shard = test.getShardOrNull(0); final int numThreads = randomIntBetween(2, 4); Thread[] indexThreads = new Thread[numThreads]; @@ -830,7 +830,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { createIndex("test"); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService test = indicesService.indexService("test"); + IndexService test = indicesService.indexService(resolveIndex("test")); final IndexShard shard = test.getShardOrNull(0); int translogOps = 1; client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); @@ -861,7 +861,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { createIndex("test"); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService test = indicesService.indexService("test"); + IndexService test = indicesService.indexService(resolveIndex("test")); final IndexShard shard = test.getShardOrNull(0); client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); if (randomBoolean()) { @@ -892,7 +892,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, Version.CURRENT); - IndexService test = indicesService.indexService("test"); + IndexService test = indicesService.indexService(resolveIndex("test")); final IndexShard shard = test.getShardOrNull(0); client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); @@ -945,7 +945,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { createIndex("test"); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService test = indicesService.indexService("test"); + IndexService test = indicesService.indexService(resolveIndex("test")); final IndexShard shard = test.getShardOrNull(0); ShardRouting origRouting = shard.routingEntry(); assertThat(shard.state(), equalTo(IndexShardState.STARTED)); @@ -967,8 +967,8 @@ public class IndexShardTests extends ESSingleNodeTestCase { createIndex("test_target"); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService test = indicesService.indexService("test"); - IndexService test_target = indicesService.indexService("test_target"); + IndexService test = indicesService.indexService(resolveIndex("test")); + IndexService test_target = indicesService.indexService(resolveIndex("test_target")); final IndexShard test_shard = test.getShardOrNull(0); client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); @@ -1029,7 +1029,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { createIndex("test"); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService indexService = indicesService.indexService("test"); + IndexService indexService = indicesService.indexService(resolveIndex("test")); IndexShard shard = indexService.getShardOrNull(0); client().prepareIndex("test", "test", "0").setSource("{\"foo\" : \"bar\"}").setRefresh(true).get(); client().prepareIndex("test", "test", "1").setSource("{\"foobar\" : \"bar\"}").setRefresh(true).get(); @@ -1078,7 +1078,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { createIndex("test"); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService indexService = indicesService.indexService("test"); + IndexService indexService = indicesService.indexService(resolveIndex("test")); IndexShard shard = indexService.getShardOrNull(0); client().prepareIndex("test", "test", "0").setSource("{\"foo\" : \"bar\"}").setRefresh(true).get(); client().prepareIndex("test", "test", "1").setSource("{\"foobar\" : \"bar\"}").setRefresh(true).get(); @@ -1126,7 +1126,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { createIndex("test"); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService indexService = indicesService.indexService("test"); + IndexService indexService = indicesService.indexService(resolveIndex("test")); IndexShard shard = indexService.getShardOrNull(0); client().prepareIndex("test", "test", "0").setSource("{\"foo\" : \"bar\"}").setRefresh(true).get(); IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { @@ -1179,7 +1179,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { .endObject().endObject().endObject()).get(); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService test = indicesService.indexService("testindexfortranslogsync"); + IndexService test = indicesService.indexService(resolveIndex("testindexfortranslogsync")); IndexShard shard = test.getShardOrNull(0); ShardRouting routing = new ShardRouting(shard.routingEntry()); test.removeShard(0, "b/c britta says so"); @@ -1206,7 +1206,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { .endObject().endObject().endObject()).get(); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService test = indicesService.indexService("index"); + IndexService test = indicesService.indexService(resolveIndex("index")); IndexShard shard = test.getShardOrNull(0); ShardRouting routing = new ShardRouting(shard.routingEntry()); test.removeShard(0, "b/c britta says so"); @@ -1235,7 +1235,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { .endObject().endObject().endObject()).get(); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService test = indicesService.indexService("index"); + IndexService test = indicesService.indexService(resolveIndex("index")); IndexShard shard = test.getShardOrNull(0); ShardRouting routing = new ShardRouting(shard.routingEntry()); test.removeShard(0, "b/c britta says so"); diff --git a/core/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java b/core/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java index afb9673508ad..4f08c4974439 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java @@ -161,7 +161,7 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { public void testShardAdditionAndRemoval() { createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 3).put(SETTING_NUMBER_OF_REPLICAS, 0).build()); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService test = indicesService.indexService("test"); + IndexService test = indicesService.indexService(resolveIndex("test")); MockController controller = new MockController(Settings.builder() .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "4mb").build()); @@ -194,7 +194,7 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0).build()); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService test = indicesService.indexService("test"); + IndexService test = indicesService.indexService(resolveIndex("test")); MockController controller = new MockController(Settings.builder() .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "5mb") @@ -248,7 +248,7 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { public void testThrottling() throws Exception { createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 3).put(SETTING_NUMBER_OF_REPLICAS, 0).build()); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService test = indicesService.indexService("test"); + IndexService test = indicesService.indexService(resolveIndex("test")); MockController controller = new MockController(Settings.builder() .put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "4mb").build()); @@ -316,7 +316,7 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService indexService = indicesService.indexService("index"); + IndexService indexService = indicesService.indexService(resolveIndex("index")); IndexShard shard = indexService.getShardOrNull(0); assertNotNull(shard); @@ -342,7 +342,7 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { @Override protected long getIndexBufferRAMBytesUsed(IndexShard shard) { return shard.getIndexBufferRAMBytesUsed(); - } + } @Override protected void writeIndexingBufferAsync(IndexShard shard) { diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java index e34e1d6bd6bd..367f4cd46ce8 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java @@ -49,8 +49,9 @@ public class IndicesLifecycleListenerSingleNodeTests extends ESSingleNodeTestCas assertAcked(client().admin().indices().prepareCreate("test") .setSettings(SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0)); ensureGreen(); - IndexMetaData metaData = indicesService.indexService("test").getMetaData(); - ShardRouting shardRouting = indicesService.indexService("test").getShard(0).routingEntry(); + Index idx = resolveIndex("test"); + IndexMetaData metaData = indicesService.indexService(idx).getMetaData(); + ShardRouting shardRouting = indicesService.indexService(idx).getShard(0).routingEntry(); final AtomicInteger counter = new AtomicInteger(1); IndexEventListener countingListener = new IndexEventListener() { @Override @@ -89,10 +90,11 @@ public class IndicesLifecycleListenerSingleNodeTests extends ESSingleNodeTestCas counter.incrementAndGet(); } }; - indicesService.deleteIndex("test", "simon says"); + indicesService.deleteIndex(idx, "simon says"); try { NodeServicesProvider nodeServicesProvider = getInstanceFromNode(NodeServicesProvider.class); IndexService index = indicesService.createIndex(nodeServicesProvider, metaData, Arrays.asList(countingListener)); + idx = index.index(); ShardRouting newRouting = new ShardRouting(shardRouting); String nodeId = newRouting.currentNodeId(); ShardRoutingHelper.moveToUnassigned(newRouting, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "boom")); @@ -106,7 +108,7 @@ public class IndicesLifecycleListenerSingleNodeTests extends ESSingleNodeTestCas ShardRoutingHelper.moveToStarted(newRouting); shard.updateRoutingEntry(newRouting, true); } finally { - indicesService.deleteIndex("test", "simon says"); + indicesService.deleteIndex(idx, "simon says"); } assertEquals(7, counter.get()); } diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index e9f1f6be5189..57a7f34e4b73 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -73,12 +73,14 @@ public class IndicesServiceTests extends ESSingleNodeTestCase { IndexMetaData meta = IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas( 1).build(); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", meta.getSettings()); - assertFalse("no shard location", indicesService.canDeleteShardContent(new ShardId("test", "_na_", 0), indexSettings)); + ShardId shardId = new ShardId(meta.getIndex(), 0); + assertFalse("no shard location", indicesService.canDeleteShardContent(shardId, indexSettings)); IndexService test = createIndex("test"); + shardId = new ShardId(test.index(), 0); assertTrue(test.hasShard(0)); - assertFalse("shard is allocated", indicesService.canDeleteShardContent(new ShardId("test", "_na_", 0), indexSettings)); + assertFalse("shard is allocated", indicesService.canDeleteShardContent(shardId, test.getIndexSettings())); test.removeShard(0, "boom"); - assertTrue("shard is removed", indicesService.canDeleteShardContent(new ShardId("test", "_na_", 0), indexSettings)); + assertTrue("shard is removed", indicesService.canDeleteShardContent(shardId, test.getIndexSettings())); } public void testDeleteIndexStore() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java index 239cb7a9096e..936e8ac600a5 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java @@ -42,7 +42,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { public void testModificationPreventsFlushing() throws InterruptedException { createIndex("test"); client().prepareIndex("test", "test", "1").setSource("{}").get(); - IndexService test = getInstanceFromNode(IndicesService.class).indexService("test"); + IndexService test = getInstanceFromNode(IndicesService.class).indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); @@ -86,7 +86,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { public void testSingleShardSuccess() throws InterruptedException { createIndex("test"); client().prepareIndex("test", "test", "1").setSource("{}").get(); - IndexService test = getInstanceFromNode(IndicesService.class).indexService("test"); + IndexService test = getInstanceFromNode(IndicesService.class).indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); @@ -106,7 +106,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { public void testSyncFailsIfOperationIsInFlight() throws InterruptedException { createIndex("test"); client().prepareIndex("test", "test", "1").setSource("{}").get(); - IndexService test = getInstanceFromNode(IndicesService.class).indexService("test"); + IndexService test = getInstanceFromNode(IndicesService.class).indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); @@ -126,7 +126,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { public void testSyncFailsOnIndexClosedOrMissing() throws InterruptedException { createIndex("test"); - IndexService test = getInstanceFromNode(IndicesService.class).indexService("test"); + IndexService test = getInstanceFromNode(IndicesService.class).indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); @@ -159,7 +159,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { public void testFailAfterIntermediateCommit() throws InterruptedException { createIndex("test"); client().prepareIndex("test", "test", "1").setSource("{}").get(); - IndexService test = getInstanceFromNode(IndicesService.class).indexService("test"); + IndexService test = getInstanceFromNode(IndicesService.class).indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); @@ -192,7 +192,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { public void testFailWhenCommitIsMissing() throws InterruptedException { createIndex("test"); client().prepareIndex("test", "test", "1").setSource("{}").get(); - IndexService test = getInstanceFromNode(IndicesService.class).indexService("test"); + IndexService test = getInstanceFromNode(IndicesService.class).indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 155032f1d8c7..98d4f84c6ef6 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -37,6 +37,7 @@ import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationComman import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.index.Index; import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.store.Store; import org.elasticsearch.indices.IndicesService; @@ -261,14 +262,16 @@ public class IndexRecoveryIT extends ESIntegTestCase { .execute().actionGet().getState(); logger.info("--> waiting for recovery to start both on source and target"); + final Index index = resolveIndex(INDEX_NAME); assertBusy(new Runnable() { @Override public void run() { + IndicesService indicesService = internalCluster().getInstance(IndicesService.class, nodeA); - assertThat(indicesService.indexServiceSafe(INDEX_NAME).getShard(0).recoveryStats().currentAsSource(), + assertThat(indicesService.indexServiceSafe(index).getShard(0).recoveryStats().currentAsSource(), equalTo(1)); indicesService = internalCluster().getInstance(IndicesService.class, nodeB); - assertThat(indicesService.indexServiceSafe(INDEX_NAME).getShard(0).recoveryStats().currentAsTarget(), + assertThat(indicesService.indexServiceSafe(index).getShard(0).recoveryStats().currentAsTarget(), equalTo(1)); } }); diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index 78d5e2203f5a..d85849570cf1 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -65,7 +65,7 @@ public class UpdateSettingsIT extends ESIntegTestCase { IndexMetaData indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertEquals(indexMetaData.getSettings().get("index.refresh_interval"), "-1"); for (IndicesService service : internalCluster().getInstances(IndicesService.class)) { - IndexService indexService = service.indexService("test"); + IndexService indexService = service.indexService(resolveIndex("test")); if (indexService != null) { assertEquals(indexService.getIndexSettings().getRefreshInterval().millis(), -1); assertEquals(indexService.getIndexSettings().getFlushThresholdSize().bytes(), 1024); @@ -79,7 +79,7 @@ public class UpdateSettingsIT extends ESIntegTestCase { indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertNull(indexMetaData.getSettings().get("index.refresh_interval")); for (IndicesService service : internalCluster().getInstances(IndicesService.class)) { - IndexService indexService = service.indexService("test"); + IndexService indexService = service.indexService(resolveIndex("test")); if (indexService != null) { assertEquals(indexService.getIndexSettings().getRefreshInterval().millis(), 1000); assertEquals(indexService.getIndexSettings().getFlushThresholdSize().bytes(), 1024); diff --git a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java index 8a9fa1918549..35624085c940 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java @@ -47,6 +47,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.gateway.GatewayAllocator; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; @@ -376,12 +377,13 @@ public class RareClusterStateIT extends ESIntegTestCase { putMappingResponse.set(e); } }); + final Index index = resolveIndex("index"); // Wait for mappings to be available on master assertBusy(new Runnable() { @Override public void run() { final IndicesService indicesService = internalCluster().getInstance(IndicesService.class, master); - final IndexService indexService = indicesService.indexServiceSafe("index"); + final IndexService indexService = indicesService.indexServiceSafe(index); assertNotNull(indexService); final MapperService mapperService = indexService.mapperService(); DocumentMapper mapper = mapperService.documentMapper("type"); diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index 948c005bf330..b1f94f203e48 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -336,10 +336,11 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { // we have to do this in two steps as we now do async shard fetching before assigning, so the change to the // allocation filtering may not have immediate effect // TODO: we should add an easier to do this. It's too much of a song and dance.. + Index index = resolveIndex("test"); assertBusy(new Runnable() { @Override public void run() { - assertTrue(internalCluster().getInstance(IndicesService.class, node4).hasIndex("test")); + assertTrue(internalCluster().getInstance(IndicesService.class, node4).hasIndex(index)); } }); diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java b/core/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java index a47217e30483..4b514763f725 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java +++ b/core/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java @@ -165,7 +165,7 @@ public class RecoveriesCollectionTests extends ESSingleNodeTestCase { long startRecovery(RecoveriesCollection collection, RecoveryTargetService.RecoveryListener listener, TimeValue timeValue) { IndicesService indexServices = getInstanceFromNode(IndicesService.class); - IndexShard indexShard = indexServices.indexServiceSafe("test").getShardOrNull(0); + IndexShard indexShard = indexServices.indexServiceSafe(resolveIndex("test")).getShardOrNull(0); final DiscoveryNode sourceNode = new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT); return collection.startRecovery(indexShard, sourceNode, listener, timeValue); } diff --git a/core/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingIT.java b/core/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingIT.java index 0c7c069ec342..8afbdca8c2e2 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DocumentMapper; @@ -143,6 +144,7 @@ public class ParentFieldLoadingIT extends ESIntegTestCase { .setUpdateAllTypes(true) .get(); assertAcked(putMappingResponse); + Index test = resolveIndex("test"); assertBusy(new Runnable() { @Override public void run() { @@ -152,7 +154,7 @@ public class ParentFieldLoadingIT extends ESIntegTestCase { boolean verified = false; IndicesService indicesService = internalCluster().getInstance(IndicesService.class, nodeName); - IndexService indexService = indicesService.indexService("test"); + IndexService indexService = indicesService.indexService(test); if (indexService != null) { MapperService mapperService = indexService.mapperService(); DocumentMapper documentMapper = mapperService.documentMapper("child"); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java index 98a23b3e1fd0..8a86a0a1fb4b 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java @@ -76,7 +76,7 @@ public class GeoShapeIntegrationTests extends ESIntegTestCase { // left orientation test IndicesService indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName)); - IndexService indexService = indicesService.indexService(idxName); + IndexService indexService = indicesService.indexService(resolveIndex(idxName)); MappedFieldType fieldType = indexService.mapperService().fullName("location"); assertThat(fieldType, instanceOf(GeoShapeFieldMapper.GeoShapeFieldType.class)); @@ -88,7 +88,7 @@ public class GeoShapeIntegrationTests extends ESIntegTestCase { // right orientation test indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName+"2")); - indexService = indicesService.indexService(idxName+"2"); + indexService = indicesService.indexService(resolveIndex((idxName+"2"))); fieldType = indexService.mapperService().fullName("location"); assertThat(fieldType, instanceOf(GeoShapeFieldMapper.GeoShapeFieldType.class)); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index a3161f4090f9..aea35a3acd0f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -41,6 +41,7 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; @@ -95,6 +96,7 @@ import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.env.Environment; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.MockEngineFactoryPlugin; import org.elasticsearch.index.IndexSettings; @@ -836,7 +838,7 @@ public abstract class ESIntegTestCase extends ESTestCase { assertThat(nodes, Matchers.not(Matchers.emptyIterable())); for (String node : nodes) { IndicesService indicesService = internalCluster().getInstance(IndicesService.class, node); - IndexService indexService = indicesService.indexService(index); + IndexService indexService = indicesService.indexService(resolveIndex(index)); assertThat("index service doesn't exists on " + node, indexService, notNullValue()); DocumentMapper documentMapper = indexService.mapperService().documentMapper(type); assertThat("document mapper doesn't exists on " + node, documentMapper, notNullValue()); @@ -2041,7 +2043,7 @@ public abstract class ESIntegTestCase extends ESTestCase { * of the provided index. */ protected String routingKeyForShard(String index, String type, int shard) { - return internalCluster().routingKeyForShard(index, type, shard, getRandom()); + return internalCluster().routingKeyForShard(resolveIndex(index), type, shard, getRandom()); } /** @@ -2144,4 +2146,11 @@ public abstract class ESIntegTestCase extends ESTestCase { public @interface SuppressNetworkMode { } + public static Index resolveIndex(String index) { + GetIndexResponse getIndexResponse = client().admin().indices().prepareGetIndex().setIndices(index).get(); + assertTrue("index " + index + " not found", getIndexResponse.getSettings().containsKey(index)); + String uuid = getIndexResponse.getSettings().get(index).get(IndexMetaData.SETTING_INDEX_UUID); + return new Index(index, uuid); + } + } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index fc7134002623..6e16d60eafc0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -22,6 +22,7 @@ import org.apache.lucene.util.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; @@ -38,6 +39,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.env.Environment; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.node.MockNode; @@ -255,7 +257,14 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { assertThat(health.getStatus(), lessThanOrEqualTo(ClusterHealthStatus.YELLOW)); assertThat("Cluster must be a single node cluster", health.getNumberOfDataNodes(), equalTo(1)); IndicesService instanceFromNode = getInstanceFromNode(IndicesService.class); - return instanceFromNode.indexServiceSafe(index); + return instanceFromNode.indexServiceSafe(resolveIndex(index)); + } + + public Index resolveIndex(String index) { + GetIndexResponse getIndexResponse = client().admin().indices().prepareGetIndex().setIndices(index).get(); + assertTrue("index " + index + " not found", getIndexResponse.getSettings().containsKey(index)); + String uuid = getIndexResponse.getSettings().get(index).get(IndexMetaData.SETTING_INDEX_UUID); + return new Index(index, uuid); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 04548eb85c96..82c7db11d69c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -66,6 +66,7 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.CommitStats; @@ -1697,7 +1698,7 @@ public final class InternalTestCluster extends TestCluster { } } - synchronized String routingKeyForShard(String index, String type, int shard, Random random) { + synchronized String routingKeyForShard(Index index, String type, int shard, Random random) { assertThat(shard, greaterThanOrEqualTo(0)); assertThat(shard, greaterThanOrEqualTo(0)); for (NodeAndClient n : nodes.values()) { @@ -1710,7 +1711,7 @@ public final class InternalTestCluster extends TestCluster { OperationRouting operationRouting = getInstanceFromNode(OperationRouting.class, node); while (true) { String routing = RandomStrings.randomAsciiOfLength(random, 10); - final int targetShard = operationRouting.indexShards(clusterService.state(), index, type, null, routing).shardId().getId(); + final int targetShard = operationRouting.indexShards(clusterService.state(), index.getName(), type, null, routing).shardId().getId(); if (shard == targetShard) { return routing; } From 12a6f36a341209c2e7e3958ce6f3dbca381c6ff8 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 9 Mar 2016 15:13:23 -0500 Subject: [PATCH 135/320] Log shard after translog snapshot during recovery --- .../elasticsearch/indices/recovery/RecoverySourceHandler.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index b92e2066af29..15b9b59dd28f 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -137,7 +137,7 @@ public class RecoverySourceHandler { } } - logger.trace("snapshot translog for recovery. current size is [{}]", translogView.totalOperations()); + logger.trace("{} snapshot translog for recovery. current size is [{}]", shard.shardId(), translogView.totalOperations()); try { phase2(translogView.snapshot()); } catch (Throwable e) { From c48e6b86f6df52bf73be724596ece6e95e86f807 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 9 Mar 2016 13:24:46 -0800 Subject: [PATCH 136/320] Limit update to fields and objects which were actually modified, and simplify root update creation. --- .../index/mapper/DocumentParser.java | 73 +++++++++++++------ .../index/mapper/DocumentParserTests.java | 35 ++------- .../index/mapper/MockFieldMapper.java | 4 +- 3 files changed, 58 insertions(+), 54 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 59a382fbc0c4..ef4fc5dd4a0d 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -24,6 +24,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Set; @@ -223,13 +224,13 @@ final class DocumentParser implements Closeable { // will be processed in a contiguous block. When the prefix is no longer seen, we pop the extra elements // off the stack, merging them upwards into the existing mappers. Collections.sort(dynamicMappers, (Mapper o1, Mapper o2) -> o1.name().compareTo(o2.name())); + Iterator dynamicMapperItr = dynamicMappers.iterator(); List parentMappers = new ArrayList<>(); - // create an empty root object which updates will be propagated into - RootObjectMapper.Builder rootBuilder = new RootObjectMapper.Builder(docMapper.type()); - RootObjectMapper.BuilderContext context = new RootObjectMapper.BuilderContext(Settings.EMPTY, new ContentPath()); - parentMappers.add(rootBuilder.build(context)); + Mapper firstUpdate = dynamicMapperItr.next(); + parentMappers.add(createUpdate(mapping.root(), firstUpdate.name().split("\\."), 0, firstUpdate)); Mapper previousMapper = null; - for (Mapper newMapper : dynamicMappers) { + while (dynamicMapperItr.hasNext()) { + Mapper newMapper = dynamicMapperItr.next(); if (previousMapper != null && newMapper.name().equals(previousMapper.name())) { // We can see the same mapper more than once, for example, if we had foo.bar and foo.baz, where // foo did not yet exist. This will create 2 copies in dynamic mappings, which should be identical. @@ -245,52 +246,76 @@ final class DocumentParser implements Closeable { parentMappers.get(keepBefore).simpleName().equals(nameParts[keepBefore - 1])) { ++keepBefore; } - popMappers(parentMappers, keepBefore); + popMappers(parentMappers, keepBefore, true); - // Add parent mappers that don't exist in dynamic mappers - while (keepBefore < nameParts.length) { - ObjectMapper parent = parentMappers.get(parentMappers.size() - 1); - Mapper newLast = parent.getMapper(nameParts[keepBefore - 1]); - if (newLast == null) { - String objectName = nameParts[keepBefore - 1]; + if (keepBefore < nameParts.length) { + String updateParentName = nameParts[keepBefore - 1]; + final ObjectMapper lastParent = parentMappers.get(parentMappers.size() - 1); + Mapper updateParent = lastParent.getMapper(updateParentName); + if (updateParent == null) { + // the parent we need is not on the stack, so look it up in the full mappings if (keepBefore > 1) { // only prefix with parent mapper if the parent mapper isn't the root (which has a fake name) - objectName = parent.name() + '.' + objectName; + updateParentName = lastParent.name() + '.' + updateParentName; } - newLast = docMapper.objectMappers().get(objectName); + updateParent = docMapper.objectMappers().get(updateParentName); } - assert newLast instanceof ObjectMapper; - parentMappers.add((ObjectMapper)newLast); - ++keepBefore; + assert updateParent instanceof ObjectMapper; + newMapper = createUpdate((ObjectMapper)updateParent, nameParts, keepBefore, newMapper); } if (newMapper instanceof ObjectMapper) { parentMappers.add((ObjectMapper)newMapper); } else { - addToLastMapper(parentMappers, newMapper); + addToLastMapper(parentMappers, newMapper, true); } } - popMappers(parentMappers, 1); + popMappers(parentMappers, 1, true); assert parentMappers.size() == 1; return mapping.mappingUpdate(parentMappers.get(0)); } - private static void popMappers(List parentMappers, int keepBefore) { + private static void popMappers(List parentMappers, int keepBefore, boolean merge) { assert keepBefore >= 1; // never remove the root mapper // pop off parent mappers not needed by the current mapper, // merging them backwards since they are immutable for (int i = parentMappers.size() - 1; i >= keepBefore; --i) { - addToLastMapper(parentMappers, parentMappers.remove(i)); + addToLastMapper(parentMappers, parentMappers.remove(i), merge); } } - private static void addToLastMapper(List parentMappers, Mapper mapper) { + /** + * Adds a mapper as an update into the last mapper. If merge is true, the new mapper + * will be merged in with other child mappers of the last parent, otherwise it will be a new update. + */ + private static void addToLastMapper(List parentMappers, Mapper mapper, boolean merge) { assert parentMappers.size() >= 1; int lastIndex = parentMappers.size() - 1; ObjectMapper withNewMapper = parentMappers.get(lastIndex).mappingUpdate(mapper); - ObjectMapper merged = parentMappers.get(lastIndex).merge(withNewMapper, false); - parentMappers.set(lastIndex, merged); + if (merge) { + withNewMapper = parentMappers.get(lastIndex).merge(withNewMapper, false); + } + parentMappers.set(lastIndex, withNewMapper); + } + + /** Build an update for the parent which will contain the given mapper and any intermediate fields. */ + private static ObjectMapper createUpdate(ObjectMapper parent, String[] nameParts, int i, Mapper mapper) { + List parentMappers = new ArrayList<>(); + ObjectMapper previousIntermediate = parent; + for (; i < nameParts.length - 1; ++i) { + Mapper intermediate = previousIntermediate.getMapper(nameParts[i]); + assert intermediate instanceof ObjectMapper; + parentMappers.add((ObjectMapper)intermediate); + previousIntermediate = (ObjectMapper)intermediate; + } + if (parentMappers.isEmpty() == false) { + // add the new mapper to the stack, and pop down to the original parent level + addToLastMapper(parentMappers, mapper, false); + popMappers(parentMappers, 1, false); + mapper = parentMappers.get(0); + } + return parent.mappingUpdate(mapper); } static void parseObjectOrNested(ParseContext context, ObjectMapper mapper, boolean atRoot) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index 48684d50399a..cbc858b642db 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -70,38 +70,12 @@ public class DocumentParserTests extends ESSingleNodeTestCase { assertNotNull(doc.rootDoc().getField(UidFieldMapper.NAME)); } - public void testDotsAsObject() throws Exception { - DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("foo").startObject("properties") - .startObject("bar").startObject("properties") - .startObject("baz").field("type", "integer") - .endObject().endObject().endObject().endObject().endObject().endObject().endObject().endObject().string(); - DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - - BytesReference bytes = XContentFactory.jsonBuilder() - .startObject() - .field("foo.bar.baz", 123) - .startObject("foo") - .field("bar.baz", 456) - .endObject() - .startObject("foo.bar") - .field("baz", 789) - .endObject() - .endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "1", bytes); - String[] values = doc.rootDoc().getValues("foo.bar.baz"); - assertEquals(3, values.length); - assertEquals("123", values[0]); - assertEquals("456", values[1]); - assertEquals("789", values[2]); - } - DocumentMapper createDummyMapping(MapperService mapperService) throws Exception { String mapping = jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("a").startObject("properties") - .startObject("b").field("type", "object") - .endObject().endObject().endObject().endObject().endObject().endObject().string(); + .startObject("b").field("type", "object").startObject("properties") + .startObject("c").field("type", "object") + .endObject().endObject().endObject().endObject().endObject().endObject().endObject().endObject().string(); DocumentMapper defaultMapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping)); return defaultMapper; @@ -141,6 +115,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { assertNotNull(aMapper); assertTrue(aMapper instanceof ObjectMapper); assertNotNull(((ObjectMapper)aMapper).getMapper("foo")); + assertNull(((ObjectMapper)aMapper).getMapper("b")); } public void testMultipleSubfieldMappingUpdate() throws Exception { @@ -154,6 +129,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { assertTrue(aMapper instanceof ObjectMapper); assertNotNull(((ObjectMapper)aMapper).getMapper("foo")); assertNotNull(((ObjectMapper)aMapper).getMapper("bar")); + assertNull(((ObjectMapper)aMapper).getMapper("b")); } public void testDeepSubfieldMappingUpdate() throws Exception { @@ -166,6 +142,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { Mapper bMapper = ((ObjectMapper)aMapper).getMapper("b"); assertTrue(bMapper instanceof ObjectMapper); assertNotNull(((ObjectMapper)bMapper).getMapper("foo")); + assertNull(((ObjectMapper)bMapper).getMapper("c")); } public void testObjectMappingUpdate() throws Exception { diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java index fec3f312e5c5..8a28a16220cd 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java @@ -24,6 +24,7 @@ import java.util.List; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; // this sucks how much must be overridden just do get a dummy field mapper... @@ -35,7 +36,8 @@ public class MockFieldMapper extends FieldMapper { } public MockFieldMapper(String fullName, MappedFieldType fieldType) { - super(findSimpleName(fullName), setName(fullName, fieldType), setName(fullName, fieldType), dummySettings, null, null); + super(findSimpleName(fullName), setName(fullName, fieldType), setName(fullName, fieldType), dummySettings, + MultiFields.empty(), new CopyTo.Builder().build()); } static MappedFieldType setName(String fullName, MappedFieldType fieldType) { From 0344af07507c1fe46be12fbfa1987545456630f8 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 9 Mar 2016 13:26:33 -0800 Subject: [PATCH 137/320] Remove unnecessary comment --- .../main/java/org/elasticsearch/index/mapper/DocumentParser.java | 1 - 1 file changed, 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index ef4fc5dd4a0d..36c1cf106f4a 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -502,7 +502,6 @@ final class DocumentParser implements Closeable { } else if (dynamic == ObjectMapper.Dynamic.TRUE) { Mapper.Builder builder = context.root().findTemplateBuilder(context, arrayFieldName, "object"); if (builder == null) { - // TODO: shouldn't this create a default object mapper builder? parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName); return; } From 38241a5d8baf887e3098c27fc0f425fc06774afe Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 9 Mar 2016 10:08:58 -0500 Subject: [PATCH 138/320] [reindex] Implement CompositeIndicesRequest Implements CompositeIndicesRequest on UpdateByQueryRequest and ReindexRequest so that plugins can reason about the request. In both cases this implementation is imperfect but useful because instead of listing all requests that make up the request it instead attempts to make dummy requests that represent the requests that it will later make. --- .../index/reindex/ReindexRequest.java | 32 +++++++++++- .../index/reindex/UpdateByQueryRequest.java | 38 ++++++++++++-- .../reindex/UpdateByQueryRequestTests.java | 49 +++++++++++++++++++ 3 files changed, 114 insertions(+), 5 deletions(-) create mode 100644 modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryRequestTests.java diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java index 1ac6117d02bc..d51fb7e8bc18 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java @@ -19,19 +19,31 @@ package org.elasticsearch.index.reindex; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.CompositeIndicesRequest; +import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.uid.Versions; -import java.io.IOException; +import static java.util.Collections.unmodifiableList; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.index.VersionType.INTERNAL; -public class ReindexRequest extends AbstractBulkIndexByScrollRequest { +/** + * Request to reindex some documents from one index to another. This implements CompositeIndicesRequest but in a misleading way. Rather than + * returning all the subrequests that it will make it tries to return a representative set of subrequests. This is best-effort for a bunch + * of reasons, not least of which that scripts are allowed to change the destination request in drastic ways, including changing the index + * to which documents are written. + */ +public class ReindexRequest extends AbstractBulkIndexByScrollRequest implements CompositeIndicesRequest { /** * Prototype for index requests. */ @@ -123,4 +135,20 @@ public class ReindexRequest extends AbstractBulkIndexByScrollRequestnot + * accurate since it returns a prototype {@link IndexRequest} and not the actual requests that will be issued as part of the + * execution of this request. Additionally, scripts can modify the underlying {@link IndexRequest} and change values such as the index, + * type, {@link org.elasticsearch.action.support.IndicesOptions}. In short - only use this for very course reasoning about the request. + * + * @return a list comprising of the {@link SearchRequest} and the prototype {@link IndexRequest} + */ + @Override + public List subRequests() { + assert getSearchRequest() != null; + assert getDestination() != null; + return unmodifiableList(Arrays.asList(getSearchRequest(), getDestination())); + } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequest.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequest.java index b27753938773..915921d60772 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequest.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequest.java @@ -19,13 +19,23 @@ package org.elasticsearch.index.reindex; +import java.util.ArrayList; +import java.util.List; + +import org.elasticsearch.action.CompositeIndicesRequest; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; +import static java.util.Collections.unmodifiableList; + /** - * Request to reindex a set of documents where they are without changing their - * locations or IDs. + * Request to update some documents. That means you can't change their type, id, index, or anything like that. This implements + * CompositeIndicesRequest but in a misleading way. Rather than returning all the subrequests that it will make it tries to return a + * representative set of subrequests. This is best-effort but better than {@linkplain ReindexRequest} because scripts can't change the + * destination index and things. */ -public class UpdateByQueryRequest extends AbstractBulkIndexByScrollRequest { +public class UpdateByQueryRequest extends AbstractBulkIndexByScrollRequest implements CompositeIndicesRequest { /** * Ingest pipeline to set on index requests made by this action. */ @@ -64,4 +74,26 @@ public class UpdateByQueryRequest extends AbstractBulkIndexByScrollRequestnot + * accurate since it returns dummy {@link IndexRequest}s and not the actual requests that will be issued as part of the + * execution of this request. + * + * @return a list comprising of the {@link SearchRequest} and dummy {@link IndexRequest}s + */ + @Override + public List subRequests() { + assert getSearchRequest() != null; + List subRequests = new ArrayList<>(); + // One dummy IndexRequest per destination index. + for (String index : getSearchRequest().indices()) { + IndexRequest request = new IndexRequest(); + request.index(index); + subRequests.add(request); + } + subRequests.add(getSearchRequest()); + return unmodifiableList(subRequests); + }; } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryRequestTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryRequestTests.java new file mode 100644 index 000000000000..f67807291431 --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryRequestTests.java @@ -0,0 +1,49 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import java.util.List; + +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.test.ESTestCase; + +import static org.apache.lucene.util.TestUtil.randomSimpleString; +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.sameInstance; + +public class UpdateByQueryRequestTests extends ESTestCase { + public void testUpdateByQueryRequestImplementsCompositeIndicesRequestWithDummies() { + int numIndices = between(1, 100); + String[] indices = new String[numIndices]; + for (int i = 0; i < numIndices; i++) { + indices[i] = randomSimpleString(random(), 1, 30); + } + UpdateByQueryRequest request = new UpdateByQueryRequest(new SearchRequest(indices)); + List subRequests = request.subRequests(); + assertThat(subRequests, hasSize(numIndices + 1)); + for (int i = 0; i < numIndices; i++) { + assertThat(subRequests.get(i).indices(), arrayWithSize(1)); + assertEquals(indices[i], subRequests.get(i).indices()[0]); + } + assertThat(subRequests.get(numIndices), sameInstance(request.getSearchRequest())); + } +} From 61f39e6c92ddbfb7dcf117f790e92abed93c91a4 Mon Sep 17 00:00:00 2001 From: Nicholas Knize Date: Thu, 19 Nov 2015 11:14:12 -0600 Subject: [PATCH 139/320] GeoPointV2 update docs and query builders This commit updates the documentation for GeoPointField by removing all references to the coerce and doc_values parameters. DocValues are enabled in lucene GeoPointField by default (required for boundary filtering). The QueryBuilders are updated to automatically normalize points (ignoring the coerce parameter) for any index created onOrAfter version 2.2. --- .../index/query/GeoBoundingBoxQueryBuilder.java | 4 ++-- .../index/query/GeoDistanceQueryBuilder.java | 4 ++-- .../index/query/GeoDistanceRangeQueryBuilder.java | 2 +- .../index/query/GeoPolygonQueryBuilder.java | 4 ++-- docs/reference/mapping/types/geo-point.asciidoc | 11 ----------- .../query-dsl/geo-bounding-box-query.asciidoc | 3 --- docs/reference/query-dsl/geo-distance-query.asciidoc | 5 ----- docs/reference/query-dsl/geo-polygon-query.asciidoc | 3 --- 8 files changed, 7 insertions(+), 29 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java index 05c2a74bb9f4..2c906dc7cb1f 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java @@ -250,7 +250,8 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder use prefix encoded postings format diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java index 784c924efcf9..b11b57df1758 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java @@ -219,18 +219,18 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder>:: - - Normalize longitude and latitude values to a standard -180:180 / -90:90 - coordinate system. Accepts `true` and `false` (default). - -<>:: - - Should the field be stored on disk in a column-stride fashion, so that it - can later be used for sorting, aggregations, or scripting? Accepts `true` - (default) or `false`. - <>:: Should the geo-point also be indexed as a geohash in the `.geohash` diff --git a/docs/reference/query-dsl/geo-bounding-box-query.asciidoc b/docs/reference/query-dsl/geo-bounding-box-query.asciidoc index c52bcb93e7da..90ae7367197f 100644 --- a/docs/reference/query-dsl/geo-bounding-box-query.asciidoc +++ b/docs/reference/query-dsl/geo-bounding-box-query.asciidoc @@ -52,9 +52,6 @@ Then the following simple query can be executed with a |Option |Description |`_name` |Optional name field to identify the filter -|`coerce` |Set to `true` to normalize longitude and latitude values to a -standard -180:180 / -90:90 coordinate system. (default is `false`). - |`ignore_malformed` |Set to `true` to accept geo points with invalid latitude or longitude (default is `false`). diff --git a/docs/reference/query-dsl/geo-distance-query.asciidoc b/docs/reference/query-dsl/geo-distance-query.asciidoc index c5b6029dc2f2..7ea380bdad29 100644 --- a/docs/reference/query-dsl/geo-distance-query.asciidoc +++ b/docs/reference/query-dsl/geo-distance-query.asciidoc @@ -162,11 +162,6 @@ The following are options allowed on the filter: Optional name field to identify the query -`coerce`:: - - Set to `true` to normalize longitude and latitude values to a standard -180:180 / -90:90 - coordinate system. (default is `false`). - `ignore_malformed`:: Set to `true` to accept geo points with invalid latitude or diff --git a/docs/reference/query-dsl/geo-polygon-query.asciidoc b/docs/reference/query-dsl/geo-polygon-query.asciidoc index 306b2dd2d849..269aeed09cae 100644 --- a/docs/reference/query-dsl/geo-polygon-query.asciidoc +++ b/docs/reference/query-dsl/geo-polygon-query.asciidoc @@ -34,9 +34,6 @@ points. Here is an example: |Option |Description |`_name` |Optional name field to identify the filter -|`coerce` |Set to `true` to normalize longitude and latitude values to a -standard -180:180 / -90:90 coordinate system. (default is `false`). - |`ignore_malformed` |Set to `true` to accept geo points with invalid latitude or longitude (default is `false`). |======================================================================= From 55635d5de126e27d68f83e28ac43fba95a172324 Mon Sep 17 00:00:00 2001 From: Nicholas Knize Date: Thu, 3 Dec 2015 09:53:19 -0600 Subject: [PATCH 140/320] update coerce and breaking changes documentation --- docs/reference/mapping/params/coerce.asciidoc | 1 - docs/reference/migration/migrate_2_2.asciidoc | 10 ++++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/docs/reference/mapping/params/coerce.asciidoc b/docs/reference/mapping/params/coerce.asciidoc index c9491607a6b5..0121c307230e 100644 --- a/docs/reference/mapping/params/coerce.asciidoc +++ b/docs/reference/mapping/params/coerce.asciidoc @@ -12,7 +12,6 @@ For instance: * Strings will be coerced to numbers. * Floating points will be truncated for integer values. -* Lon/lat geo-points will be normalized to a standard -180:180 / -90:90 coordinate system. For instance: diff --git a/docs/reference/migration/migrate_2_2.asciidoc b/docs/reference/migration/migrate_2_2.asciidoc index 39c059e7f479..9611d86a2ac4 100644 --- a/docs/reference/migration/migrate_2_2.asciidoc +++ b/docs/reference/migration/migrate_2_2.asciidoc @@ -4,6 +4,16 @@ This section discusses the changes that you need to be aware of when migrating your application to Elasticsearch 2.2. +[[float]] +=== Mapping APIs + +==== Geo Point Type + +The `geo_point` format has been changed to reduce index size and the time required to both index and query +geo point data. To make these performance improvements possible both `doc_values` are `coerce` are required +and therefore cannot be changed. For this reason the `doc_values` and `coerce` parameters have been removed +from the <> field mapping. + [float] === Scripting and security From 06487b0ac51633c4f192e6982d3682be1d2babd3 Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Mon, 29 Feb 2016 10:02:04 -0500 Subject: [PATCH 141/320] Change internal representation of suggesters to instances of SuggestBuilder instead of raw bytes. --- .../resources/checkstyle_suppressions.xml | 4 - .../action/suggest/ShardSuggestRequest.java | 14 +- .../action/suggest/SuggestRequest.java | 58 ++---- .../action/suggest/SuggestRequestBuilder.java | 15 +- .../suggest/TransportSuggestAction.java | 33 ++-- .../org/elasticsearch/client/Requests.java | 6 +- .../action/search/RestMultiSearchAction.java | 13 +- .../rest/action/search/RestSearchAction.java | 13 +- .../action/suggest/RestSuggestAction.java | 27 ++- .../rest/action/support/RestActions.java | 8 +- .../elasticsearch/search/SearchModule.java | 12 +- .../elasticsearch/search/SearchService.java | 28 +-- .../search/builder/SearchSourceBuilder.java | 45 ++--- .../search/suggest/SuggestBuilder.java | 7 +- .../search/suggest/Suggester.java | 2 +- .../search/suggest/Suggesters.java | 2 +- .../suggest/term/TermSuggestionBuilder.java | 5 +- .../search/MultiSearchRequestTests.java | 10 +- .../search/SearchModuleTests.java | 6 +- .../builder/SearchSourceBuilderTests.java | 37 ++-- .../AbstractSuggestionBuilderTestCase.java | 7 +- .../suggest/CompletionSuggestSearchIT.java | 31 +-- .../ContextCompletionSuggestSearchIT.java | 3 + .../search/suggest/CustomSuggester.java | 1 + .../search/suggest/CustomSuggesterPlugin.java | 2 +- .../suggest/CustomSuggesterSearchIT.java | 80 ++++++-- .../search/suggest/SuggestBuilderTests.java | 182 ++++++++++++++---- .../CompletionSuggesterBuilderTests.java | 6 +- .../phrase/PhraseSuggestionBuilderTests.java | 1 + .../term/TermSuggestionBuilderTests.java | 14 +- .../search/MockSearchService.java | 5 +- 31 files changed, 411 insertions(+), 266 deletions(-) diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index e06a653d09c0..c7e6eec0a339 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -208,7 +208,6 @@ - @@ -269,7 +268,6 @@ - @@ -1399,7 +1397,6 @@ - @@ -1431,7 +1428,6 @@ - diff --git a/core/src/main/java/org/elasticsearch/action/suggest/ShardSuggestRequest.java b/core/src/main/java/org/elasticsearch/action/suggest/ShardSuggestRequest.java index 80facf74878a..ad15bd27440a 100644 --- a/core/src/main/java/org/elasticsearch/action/suggest/ShardSuggestRequest.java +++ b/core/src/main/java/org/elasticsearch/action/suggest/ShardSuggestRequest.java @@ -20,10 +20,10 @@ package org.elasticsearch.action.suggest; import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.suggest.SuggestBuilder; import java.io.IOException; @@ -32,29 +32,29 @@ import java.io.IOException; */ public final class ShardSuggestRequest extends BroadcastShardRequest { - private BytesReference suggestSource; + private SuggestBuilder suggest; public ShardSuggestRequest() { } ShardSuggestRequest(ShardId shardId, SuggestRequest request) { super(shardId, request); - this.suggestSource = request.suggest(); + this.suggest = request.suggest(); } - public BytesReference suggest() { - return suggestSource; + public SuggestBuilder suggest() { + return suggest; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - suggestSource = in.readBytesReference(); + suggest = SuggestBuilder.PROTOTYPE.readFrom(in); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeBytesReference(suggestSource); + suggest.writeTo(out); } } diff --git a/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java b/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java index 5dcb39fa14bb..7f3f701f3e86 100644 --- a/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java +++ b/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java @@ -21,28 +21,25 @@ package org.elasticsearch.action.suggest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.broadcast.BroadcastRequest; -import org.elasticsearch.client.Requests; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.search.suggest.SuggestBuilder; -import org.elasticsearch.search.suggest.SuggestionBuilder; import java.io.IOException; import java.util.Arrays; +import java.util.Objects; /** * A request to get suggestions for corrections of phrases. Best created with * {@link org.elasticsearch.client.Requests#suggestRequest(String...)}. *

      - * The request requires the suggest query source to be set either using - * {@link #suggest(org.elasticsearch.common.bytes.BytesReference)} / {@link #suggest(org.elasticsearch.common.bytes.BytesReference)} - * or by using {@link #suggest(org.elasticsearch.search.suggest.SuggestBuilder)} - * (Best created using the {link @org.elasticsearch.search.suggest.SuggestBuilders)}). + * The request requires the suggest query source to be set using + * {@link #suggest(org.elasticsearch.search.suggest.SuggestBuilder)} * * @see SuggestResponse * @see org.elasticsearch.client.Client#suggest(SuggestRequest) @@ -57,7 +54,7 @@ public final class SuggestRequest extends BroadcastRequest { @Nullable private String preference; - private BytesReference suggestSource; + private SuggestBuilder suggestSource; public SuggestRequest() { } @@ -77,40 +74,21 @@ public final class SuggestRequest extends BroadcastRequest { } /** - * The Phrase to get correction suggestions for + * The Phrase to get correction suggestions for */ - public BytesReference suggest() { + public SuggestBuilder suggest() { return suggestSource; } - + /** - * set a new source for the suggest query + * set a new source for the suggest query */ - public SuggestRequest suggest(BytesReference suggestSource) { + public SuggestRequest suggest(SuggestBuilder suggestSource) { + Objects.requireNonNull(suggestSource, "suggestSource must not be null"); this.suggestSource = suggestSource; return this; } - /** - * set a new source using a {@link org.elasticsearch.search.suggest.SuggestBuilder} - * for phrase and term suggestion lookup - */ - public SuggestRequest suggest(SuggestBuilder suggestBuilder) { - return suggest(suggestBuilder.buildAsBytes(Requests.CONTENT_TYPE)); - } - - /** - * set a new source using a {@link org.elasticsearch.search.suggest.SuggestionBuilder} - * for completion suggestion lookup - */ - public SuggestRequest suggest(SuggestionBuilder suggestionBuilder) { - return suggest(suggestionBuilder.buildAsBytes(Requests.CONTENT_TYPE)); - } - - public SuggestRequest suggest(String source) { - return suggest(new BytesArray(source)); - } - /** * A comma separated list of routing values to control the shards the search will be executed on. */ @@ -148,22 +126,26 @@ public final class SuggestRequest extends BroadcastRequest { super.readFrom(in); routing = in.readOptionalString(); preference = in.readOptionalString(); - suggest(in.readBytesReference()); + suggest(SuggestBuilder.PROTOTYPE.readFrom(in)); } @Override public void writeTo(StreamOutput out) throws IOException { + Objects.requireNonNull(suggestSource, "suggestSource must not be null"); super.writeTo(out); out.writeOptionalString(routing); out.writeOptionalString(preference); - out.writeBytesReference(suggestSource); + suggestSource.writeTo(out); } @Override public String toString() { + Objects.requireNonNull(suggestSource, "suggestSource must not be null"); String sSource = "_na_"; try { - sSource = XContentHelper.convertToJson(suggestSource, false); + XContentBuilder builder = JsonXContent.contentBuilder(); + builder = suggestSource.toXContent(builder, ToXContent.EMPTY_PARAMS); + sSource = builder.string(); } catch (Exception e) { // ignore } diff --git a/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequestBuilder.java index cd2163fe91e1..b64745bf4003 100644 --- a/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequestBuilder.java @@ -19,18 +19,11 @@ package org.elasticsearch.action.suggest; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.client.Requests; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.SuggestionBuilder; -import java.io.IOException; - /** * A suggest action request builder. */ @@ -86,13 +79,7 @@ public class SuggestRequestBuilder extends BroadcastOperationRequestBuilder { +public class TransportSuggestAction + extends TransportBroadcastAction { private final IndicesService indicesService; private final SuggestPhase suggestPhase; @Inject - public TransportSuggestAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, - IndicesService indicesService, SuggestPhase suggestPhase, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver) { + public TransportSuggestAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + TransportService transportService, IndicesService indicesService, SuggestPhase suggestPhase, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, SuggestAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, SuggestRequest::new, ShardSuggestRequest::new, ThreadPool.Names.SUGGEST); this.indicesService = indicesService; @@ -85,7 +84,8 @@ public class TransportSuggestAction extends TransportBroadcastAction> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, request.routing(), request.indices()); + Map> routingMap = + indexNameExpressionResolver.resolveSearchRouting(clusterState, request.routing(), request.indices()); return clusterService.operationRouting().searchShards(clusterState, concreteIndices, routingMap, request.preference()); } @@ -124,7 +124,8 @@ public class TransportSuggestAction extends TransportBroadcastAction 0) { - parser = XContentFactory.xContent(suggest).createParser(suggest); - if (parser.nextToken() != XContentParser.Token.START_OBJECT) { - throw new IllegalArgumentException("suggest content missing"); - } - final SuggestionSearchContext context = suggestPhase.parseElement().parseInternal(parser, indexService.newQueryShardContext()); + SuggestBuilder suggest = request.suggest(); + if (suggest != null) { + final SuggestionSearchContext context = suggest.build(indexService.newQueryShardContext()); final Suggest result = suggestPhase.execute(context, searcher.searcher()); return new ShardSuggestResponse(request.shardId(), result); } @@ -150,9 +146,6 @@ public class TransportSuggestAction extends TransportBroadcastActionindices. - * The suggest query has to be set using the JSON source using {@link org.elasticsearch.action.suggest.SuggestRequest#suggest(org.elasticsearch.common.bytes.BytesReference)}. + * The suggest query has to be set using {@link org.elasticsearch.action.suggest.SuggestRequest#suggest(SuggestBuilder)}. * @param indices The indices to suggest from. Use null or _all to execute against all indices * @see org.elasticsearch.client.Client#suggest(org.elasticsearch.action.suggest.SuggestRequest) */ @@ -342,7 +343,8 @@ public class Requests { /** * Creates a cluster health request. * - * @param indices The indices to provide additional cluster health information for. Use null or _all to execute against all indices + * @param indices The indices to provide additional cluster health information for. + * Use null or _all to execute against all indices * @return The cluster health request * @see org.elasticsearch.client.ClusterAdminClient#health(org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest) */ diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index 5d9ac118831e..10258aaaee48 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -44,6 +44,7 @@ import org.elasticsearch.rest.action.support.RestToXContentListener; import org.elasticsearch.script.Template; import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.suggest.Suggesters; import java.util.Map; @@ -60,13 +61,14 @@ public class RestMultiSearchAction extends BaseRestHandler { private final boolean allowExplicitIndex; private final IndicesQueriesRegistry indicesQueriesRegistry; private final AggregatorParsers aggParsers; - + private final Suggesters suggesters; @Inject public RestMultiSearchAction(Settings settings, RestController controller, Client client, IndicesQueriesRegistry indicesQueriesRegistry, - AggregatorParsers aggParsers) { + AggregatorParsers aggParsers, Suggesters suggesters) { super(settings, client); this.aggParsers = aggParsers; + this.suggesters = suggesters; controller.registerHandler(GET, "/_msearch", this); controller.registerHandler(POST, "/_msearch", this); @@ -97,7 +99,7 @@ public class RestMultiSearchAction extends BaseRestHandler { IndicesOptions indicesOptions = IndicesOptions.fromRequest(request, multiSearchRequest.indicesOptions()); parseRequest(multiSearchRequest, RestActions.getRestContent(request), isTemplateRequest, indices, types, request.param("search_type"), request.param("routing"), indicesOptions, allowExplicitIndex, indicesQueriesRegistry, - parseFieldMatcher, aggParsers); + parseFieldMatcher, aggParsers, suggesters); client.multiSearch(multiSearchRequest, new RestToXContentListener<>(channel)); } @@ -112,7 +114,8 @@ public class RestMultiSearchAction extends BaseRestHandler { @Nullable String routing, IndicesOptions indicesOptions, boolean allowExplicitIndex, IndicesQueriesRegistry indicesQueriesRegistry, - ParseFieldMatcher parseFieldMatcher, AggregatorParsers aggParsers) throws Exception { + ParseFieldMatcher parseFieldMatcher, AggregatorParsers aggParsers, + Suggesters suggesters) throws Exception { XContent xContent = XContentFactory.xContent(data); int from = 0; int length = data.length(); @@ -193,7 +196,7 @@ public class RestMultiSearchAction extends BaseRestHandler { } else { try (XContentParser requestParser = XContentFactory.xContent(slice).createParser(slice)) { queryParseContext.reset(requestParser); - searchRequest.source(SearchSourceBuilder.parseSearchSource(requestParser, queryParseContext, aggParsers)); + searchRequest.source(SearchSourceBuilder.parseSearchSource(requestParser, queryParseContext, aggParsers, suggesters)); } } // move pointers diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 4f431ebb81a1..6eedb0aea044 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -49,6 +49,7 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; +import org.elasticsearch.search.suggest.Suggesters; import java.io.IOException; import java.util.Arrays; @@ -65,13 +66,15 @@ public class RestSearchAction extends BaseRestHandler { private final IndicesQueriesRegistry queryRegistry; private final AggregatorParsers aggParsers; + private final Suggesters suggesters; @Inject public RestSearchAction(Settings settings, RestController controller, Client client, IndicesQueriesRegistry queryRegistry, - AggregatorParsers aggParsers) { + AggregatorParsers aggParsers, Suggesters suggesters) { super(settings, client); this.queryRegistry = queryRegistry; this.aggParsers = aggParsers; + this.suggesters = suggesters; controller.registerHandler(GET, "/_search", this); controller.registerHandler(POST, "/_search", this); controller.registerHandler(GET, "/{index}/_search", this); @@ -89,7 +92,7 @@ public class RestSearchAction extends BaseRestHandler { @Override public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) throws IOException { SearchRequest searchRequest = new SearchRequest(); - RestSearchAction.parseSearchRequest(searchRequest, queryRegistry, request, parseFieldMatcher, aggParsers, null); + RestSearchAction.parseSearchRequest(searchRequest, queryRegistry, request, parseFieldMatcher, aggParsers, suggesters, null); client.search(searchRequest, new RestStatusToXContentListener<>(channel)); } @@ -103,7 +106,9 @@ public class RestSearchAction extends BaseRestHandler { * RestAction.hasBodyContent. */ public static void parseSearchRequest(SearchRequest searchRequest, IndicesQueriesRegistry indicesQueriesRegistry, RestRequest request, - ParseFieldMatcher parseFieldMatcher, AggregatorParsers aggParsers, BytesReference restContent) throws IOException { + ParseFieldMatcher parseFieldMatcher, AggregatorParsers aggParsers, Suggesters suggesters, BytesReference restContent) + throws IOException { + if (searchRequest.source() == null) { searchRequest.source(new SearchSourceBuilder()); } @@ -127,7 +132,7 @@ public class RestSearchAction extends BaseRestHandler { } } else { RestActions.parseRestSearchSource(searchRequest.source(), restContent, indicesQueriesRegistry, parseFieldMatcher, - aggParsers); + aggParsers, suggesters); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java b/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java index 4e6b88b68b8d..df947b492ad3 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java @@ -24,9 +24,14 @@ import org.elasticsearch.action.suggest.SuggestResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; @@ -37,6 +42,10 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.search.suggest.SuggestBuilder; +import org.elasticsearch.search.suggest.Suggesters; + +import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; @@ -47,9 +56,15 @@ import static org.elasticsearch.rest.action.support.RestActions.buildBroadcastSh */ public class RestSuggestAction extends BaseRestHandler { + private final IndicesQueriesRegistry queryRegistry; + private final Suggesters suggesters; + @Inject - public RestSuggestAction(Settings settings, RestController controller, Client client) { + public RestSuggestAction(Settings settings, RestController controller, Client client, + IndicesQueriesRegistry queryRegistry, Suggesters suggesters) { super(settings, client); + this.queryRegistry = queryRegistry; + this.suggesters = suggesters; controller.registerHandler(POST, "/_suggest", this); controller.registerHandler(GET, "/_suggest", this); controller.registerHandler(POST, "/{index}/_suggest", this); @@ -57,11 +72,17 @@ public class RestSuggestAction extends BaseRestHandler { } @Override - public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { + public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) throws IOException { SuggestRequest suggestRequest = new SuggestRequest(Strings.splitStringByCommaToArray(request.param("index"))); suggestRequest.indicesOptions(IndicesOptions.fromRequest(request, suggestRequest.indicesOptions())); if (RestActions.hasBodyContent(request)) { - suggestRequest.suggest(RestActions.getRestContent(request)); + final BytesReference sourceBytes = RestActions.getRestContent(request); + try (XContentParser parser = XContentFactory.xContent(sourceBytes).createParser(sourceBytes)) { + final QueryParseContext context = new QueryParseContext(queryRegistry); + context.reset(parser); + context.parseFieldMatcher(parseFieldMatcher); + suggestRequest.suggest(SuggestBuilder.fromXContent(context, suggesters, true)); + } } else { throw new IllegalArgumentException("no content or source provided to execute suggestion"); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java index 692a9dc34025..950828639f7b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java @@ -42,6 +42,7 @@ import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.suggest.Suggesters; import java.io.IOException; @@ -114,14 +115,15 @@ public class RestActions { return queryBuilder; } - public static void parseRestSearchSource(SearchSourceBuilder source, BytesReference sourceBytes, IndicesQueriesRegistry queryRegistry, - ParseFieldMatcher parseFieldMatcher, AggregatorParsers aggParsers) + public static void parseRestSearchSource(SearchSourceBuilder source, BytesReference sourceBytes, + IndicesQueriesRegistry queryRegistry, ParseFieldMatcher parseFieldMatcher, + AggregatorParsers aggParsers, Suggesters suggesters) throws IOException { XContentParser parser = XContentFactory.xContent(sourceBytes).createParser(sourceBytes); QueryParseContext queryParseContext = new QueryParseContext(queryRegistry); queryParseContext.reset(parser); queryParseContext.parseFieldMatcher(parseFieldMatcher); - source.parseXContent(parser, queryParseContext, aggParsers); + source.parseXContent(parser, queryParseContext, aggParsers, suggesters); } /** diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index 2b39a2174b33..55c343ba874a 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -228,7 +228,11 @@ import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.Suggesters; import org.elasticsearch.search.suggest.SuggestionBuilder; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; +import org.elasticsearch.search.suggest.phrase.Laplace; +import org.elasticsearch.search.suggest.phrase.LinearInterpolation; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; +import org.elasticsearch.search.suggest.phrase.SmoothingModel; +import org.elasticsearch.search.suggest.phrase.StupidBackoff; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import java.util.ArrayList; @@ -281,8 +285,9 @@ public class SearchModule extends AbstractModule { highlighters.registerExtension(key, clazz); } - public void registerSuggester(String key, Class suggester) { - suggesters.registerExtension(key, suggester); + public void registerSuggester(String key, Suggester suggester) { + suggesters.registerExtension(key, suggester.getClass()); + namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, suggester.getBuilderPrototype()); } /** @@ -378,6 +383,9 @@ public class SearchModule extends AbstractModule { namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, TermSuggestionBuilder.PROTOTYPE); namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, PhraseSuggestionBuilder.PROTOTYPE); namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, CompletionSuggestionBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(SmoothingModel.class, Laplace.PROTOTYPE); + namedWriteableRegistry.registerPrototype(SmoothingModel.class, LinearInterpolation.PROTOTYPE); + namedWriteableRegistry.registerPrototype(SmoothingModel.class, StupidBackoff.PROTOTYPE); } protected void configureHighlighters() { diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 8dbc521876d8..e5cd7653a153 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -55,7 +55,6 @@ import org.elasticsearch.index.search.stats.StatsGroupsParseElement; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.indices.IndicesRequestCache; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; @@ -92,6 +91,7 @@ import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.search.query.ScrollQuerySearchResult; import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.searchafter.SearchAfterBuilder; +import org.elasticsearch.search.suggest.Suggesters; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; @@ -150,14 +150,16 @@ public class SearchService extends AbstractLifecycleComponent imp private final Map elementParsers; private final ParseFieldMatcher parseFieldMatcher; - private AggregatorParsers aggParsers; + private final AggregatorParsers aggParsers; + private final Suggesters suggesters; @Inject public SearchService(Settings settings, ClusterSettings clusterSettings, ClusterService clusterService, IndicesService indicesService, - ThreadPool threadPool, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, DfsPhase dfsPhase, - QueryPhase queryPhase, FetchPhase fetchPhase, AggregatorParsers aggParsers) { + ThreadPool threadPool, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, DfsPhase dfsPhase, + QueryPhase queryPhase, FetchPhase fetchPhase, AggregatorParsers aggParsers, Suggesters suggesters) { super(settings); this.aggParsers = aggParsers; + this.suggesters = suggesters; this.parseFieldMatcher = new ParseFieldMatcher(settings); this.threadPool = threadPool; this.clusterService = clusterService; @@ -556,7 +558,7 @@ public class SearchService extends AbstractLifecycleComponent imp QueryParseContext queryParseContext = new QueryParseContext(indicesService.getIndicesQueryRegistry()); queryParseContext.reset(parser); queryParseContext.parseFieldMatcher(parseFieldMatcher); - parseSource(context, SearchSourceBuilder.parseSearchSource(parser, queryParseContext, aggParsers)); + parseSource(context, SearchSourceBuilder.parseSearchSource(parser, queryParseContext, aggParsers, suggesters)); } } parseSource(context, request.source()); @@ -719,20 +721,10 @@ public class SearchService extends AbstractLifecycleComponent imp } } if (source.suggest() != null) { - XContentParser suggestParser = null; try { - suggestParser = XContentFactory.xContent(source.suggest()).createParser(source.suggest()); - suggestParser.nextToken(); - this.elementParsers.get("suggest").parse(suggestParser, context); - } catch (Exception e) { - String sSource = "_na_"; - try { - sSource = source.toString(); - } catch (Throwable e1) { - // ignore - } - XContentLocation location = suggestParser != null ? suggestParser.getTokenLocation() : null; - throw new SearchParseException(context, "failed to parse suggest source [" + sSource + "]", location, e); + context.suggest(source.suggest().build(queryShardContext)); + } catch (IOException e) { + throw new SearchContextException(context, "failed to create SuggestionSearchContext", e); } } if (source.rescores() != null) { diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 0cb83dbd2f96..bb66216e6abe 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -55,6 +55,7 @@ import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.SuggestBuilder; +import org.elasticsearch.search.suggest.Suggesters; import java.io.IOException; import java.util.ArrayList; @@ -105,9 +106,10 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ return PROTOTYPE.readFrom(in); } - public static SearchSourceBuilder parseSearchSource(XContentParser parser, QueryParseContext context, AggregatorParsers aggParsers) + public static SearchSourceBuilder parseSearchSource(XContentParser parser, QueryParseContext context, + AggregatorParsers aggParsers, Suggesters suggesters) throws IOException { - return PROTOTYPE.fromXContent(parser, context, aggParsers); + return PROTOTYPE.fromXContent(parser, context, aggParsers, suggesters); } /** @@ -156,7 +158,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ private HighlightBuilder highlightBuilder; - private BytesReference suggestBuilder; + private SuggestBuilder suggestBuilder; private BytesReference innerHitsBuilder; @@ -475,20 +477,14 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ } public SearchSourceBuilder suggest(SuggestBuilder suggestBuilder) { - try { - XContentBuilder builder = XContentFactory.jsonBuilder(); - suggestBuilder.toXContent(builder, EMPTY_PARAMS); - this.suggestBuilder = builder.bytes(); - return this; - } catch (IOException e) { - throw new RuntimeException(e); - } + this.suggestBuilder = suggestBuilder; + return this; } /** - * Gets the bytes representing the suggester builder for this request. + * Gets the suggester builder for this request. */ - public BytesReference suggest() { + public SuggestBuilder suggest() { return suggestBuilder; } @@ -736,19 +732,22 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ /** * Create a new SearchSourceBuilder with attributes set by an xContent. */ - public SearchSourceBuilder fromXContent(XContentParser parser, QueryParseContext context, AggregatorParsers aggParsers) + public SearchSourceBuilder fromXContent(XContentParser parser, QueryParseContext context, + AggregatorParsers aggParsers, Suggesters suggesters) throws IOException { SearchSourceBuilder builder = new SearchSourceBuilder(); - builder.parseXContent(parser, context, aggParsers); + builder.parseXContent(parser, context, aggParsers, suggesters); return builder; } /** * Parse some xContent into this SearchSourceBuilder, overwriting any values specified in the xContent. Use this if you need to set up * different defaults than a regular SearchSourceBuilder would have and use - * {@link #fromXContent(XContentParser, QueryParseContext, AggregatorParsers)} if you have normal defaults. + * {@link #fromXContent(XContentParser, QueryParseContext, AggregatorParsers, Suggesters)} if you have normal defaults. */ - public void parseXContent(XContentParser parser, QueryParseContext context, AggregatorParsers aggParsers) throws IOException { + public void parseXContent(XContentParser parser, QueryParseContext context, AggregatorParsers aggParsers, Suggesters suggesters) + throws IOException { + XContentParser.Token token = parser.currentToken(); String currentFieldName = null; if (token != XContentParser.Token.START_OBJECT && (token = parser.nextToken()) != XContentParser.Token.START_OBJECT) { @@ -852,8 +851,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser); innerHitsBuilder = xContentBuilder.bytes(); } else if (context.parseFieldMatcher().match(currentFieldName, SUGGEST_FIELD)) { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser); - suggestBuilder = xContentBuilder.bytes(); + suggestBuilder = SuggestBuilder.fromXContent(context, suggesters, false); } else if (context.parseFieldMatcher().match(currentFieldName, SORT_FIELD)) { sorts = new ArrayList<>(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser); @@ -1050,10 +1048,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ } if (suggestBuilder != null) { - builder.field(SUGGEST_FIELD.getPreferredName()); - XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(suggestBuilder); - parser.nextToken(); - builder.copyCurrentStructure(parser); + builder.field(SUGGEST_FIELD.getPreferredName(), suggestBuilder); } if (rescoreBuilders != null) { @@ -1232,7 +1227,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ builder.stats = stats; } if (in.readBoolean()) { - builder.suggestBuilder = in.readBytesReference(); + builder.suggestBuilder = SuggestBuilder.PROTOTYPE.readFrom(in); } builder.terminateAfter = in.readVInt(); builder.timeoutInMillis = in.readLong(); @@ -1348,7 +1343,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ boolean hasSuggestBuilder = suggestBuilder != null; out.writeBoolean(hasSuggestBuilder); if (hasSuggestBuilder) { - out.writeBytesReference(suggestBuilder); + suggestBuilder.writeTo(out); } out.writeVInt(terminateAfter); out.writeLong(timeoutInMillis); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java index f8a8ccdf685a..bff33982da14 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java @@ -112,13 +112,18 @@ public class SuggestBuilder extends ToXContentToBytes implements Writeable getBuilderPrototype(); + public abstract SuggestionBuilder getBuilderPrototype(); public Suggest.Suggestion> execute(String name, T suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java b/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java index 9857a06da687..0bce98c72079 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java @@ -59,7 +59,7 @@ public final class Suggesters extends ExtensionPoint.ClassMap { return parsers.get(type); } - public SuggestionBuilder getSuggestionPrototype(String suggesterName) { + public SuggestionBuilder getSuggestionPrototype(String suggesterName) { Suggester suggester = parsers.get(suggesterName); if (suggester == null) { throw new IllegalArgumentException("suggester with name [" + suggesterName + "] not supported"); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java index a06b834527bf..35e530d62189 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java @@ -398,9 +398,12 @@ public class TermSuggestionBuilder extends SuggestionBuilder randomSuggestion) { + randomSuggestion.text(randomAsciiOfLengthBetween(2, 20)); // have to set the text because we don't know if the global text was set maybeSet(randomSuggestion::prefix, randomAsciiOfLengthBetween(2, 20)); maybeSet(randomSuggestion::regex, randomAsciiOfLengthBetween(2, 20)); maybeSet(randomSuggestion::analyzer, randomAsciiOfLengthBetween(2, 20)); maybeSet(randomSuggestion::size, randomIntBetween(1, 20)); maybeSet(randomSuggestion::shardSize, randomIntBetween(1, 20)); - return randomSuggestion; } /** diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index bfb31de216c9..3aa0d11ea309 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -23,8 +23,8 @@ import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.apache.lucene.analysis.TokenStreamToAutomaton; import org.apache.lucene.search.suggest.document.ContextSuggestField; +import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; -import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; @@ -33,7 +33,6 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.suggest.SuggestRequest; import org.elasticsearch.action.suggest.SuggestResponse; import org.elasticsearch.client.Requests; import org.elasticsearch.common.settings.Settings; @@ -81,6 +80,8 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; +// nocommit +@LuceneTestCase.AwaitsFix(bugUrl = "waiting on completion suggestion builder refactoring") @SuppressCodecs("*") // requires custom completion format public class CompletionSuggestSearchIT extends ESIntegTestCase { private final String INDEX = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT); @@ -211,32 +212,6 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { } } - public void testMalformedRequestPayload() throws Exception { - final CompletionMappingBuilder mapping = new CompletionMappingBuilder(); - createIndexAndMapping(mapping); - SuggestRequest request = new SuggestRequest(INDEX); - XContentBuilder suggest = jsonBuilder().startObject() - .startObject("bad-payload") - .field("prefix", "sug") - .startObject("completion") - .field("field", FIELD) - .startArray("payload") - .startObject() - .field("payload", "field") - .endObject() - .endArray() - .endObject() - .endObject().endObject(); - request.suggest(suggest.bytes()); - ensureGreen(); - - SuggestResponse suggestResponse = client().suggest(request).get(); - assertThat(suggestResponse.getSuccessfulShards(), equalTo(0)); - for (ShardOperationFailedException exception : suggestResponse.getShardFailures()) { - assertThat(exception.reason(), containsString("ParsingException[[completion] failed to parse field [payload]]; nested: IllegalStateException[Can't get text on a START_OBJECT")); - } - } - public void testMissingPayloadField() throws Exception { final CompletionMappingBuilder mapping = new CompletionMappingBuilder(); createIndexAndMapping(mapping); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java index 58458c9d2449..c137c031ad97 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.suggest; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.apache.lucene.spatial.util.GeoHashUtils; +import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.suggest.SuggestResponse; @@ -52,6 +53,8 @@ import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +// nocommit +@LuceneTestCase.AwaitsFix(bugUrl = "waiting on completion suggestion builder refactoring") @SuppressCodecs("*") // requires custom completion format public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java index 68e62983b54a..42eb9fc182f4 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java @@ -32,6 +32,7 @@ import java.util.Map; */ public class CustomSuggester extends Suggester { + public static CustomSuggester PROTOTYPE = new CustomSuggester(); // This is a pretty dumb implementation which returns the original text + fieldName + custom config option + 12 or 123 @Override diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterPlugin.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterPlugin.java index c5e36da2ea7a..19d6ed4e098b 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterPlugin.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterPlugin.java @@ -37,7 +37,7 @@ public class CustomSuggesterPlugin extends Plugin { } public void onModule(SearchModule searchModule) { - searchModule.registerSuggester("custom", CustomSuggester.class); + searchModule.registerSuggester("custom", CustomSuggester.PROTOTYPE); } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java index 178e353b7a9e..9f79f6a2ac90 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java @@ -20,10 +20,15 @@ package org.elasticsearch.search.suggest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.plugins.Plugin; @@ -75,16 +80,20 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { // TODO: infer type once JI-9019884 is fixed // TODO: see also JDK-8039214 - List> suggestions - = CollectionUtils.>iterableAsArrayList(searchResponse.getSuggest().getSuggestion("someName")); + List> suggestions = + CollectionUtils.>iterableAsArrayList( + searchResponse.getSuggest().getSuggestion("someName")); assertThat(suggestions, hasSize(2)); - assertThat(suggestions.get(0).getText().string(), is(String.format(Locale.ROOT, "%s-%s-%s-12", randomText, randomField, randomSuffix))); - assertThat(suggestions.get(1).getText().string(), is(String.format(Locale.ROOT, "%s-%s-%s-123", randomText, randomField, randomSuffix))); + assertThat(suggestions.get(0).getText().string(), + is(String.format(Locale.ROOT, "%s-%s-%s-12", randomText, randomField, randomSuffix))); + assertThat(suggestions.get(1).getText().string(), + is(String.format(Locale.ROOT, "%s-%s-%s-123", randomText, randomField, randomSuffix))); } - static class CustomSuggestionBuilder extends SuggestionBuilder { + public static class CustomSuggestionBuilder extends SuggestionBuilder { public final static CustomSuggestionBuilder PROTOTYPE = new CustomSuggestionBuilder("_na_", "_na_"); + protected static final ParseField RANDOM_SUFFIX_FIELD = new ParseField("suffix"); private String randomSuffix; @@ -95,7 +104,7 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { @Override protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { - builder.field("suffix", randomSuffix); + builder.field(RANDOM_SUFFIX_FIELD.getPreferredName(), randomSuffix); return builder; } @@ -125,19 +134,64 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { } @Override - protected CustomSuggestionBuilder innerFromXContent(QueryParseContext parseContext) - throws IOException { - // TODO some parsing - return new CustomSuggestionBuilder(field(), randomSuffix); + protected CustomSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException { + XContentParser parser = parseContext.parser(); + ParseFieldMatcher parseFieldMatcher = parseContext.parseFieldMatcher(); + XContentParser.Token token; + String currentFieldName = null; + String fieldname = null; + String suffix = null; + String analyzer = null; + int sizeField = -1; + int shardSize = -1; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.ANALYZER_FIELD)) { + analyzer = parser.text(); + } else if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.FIELDNAME_FIELD)) { + fieldname = parser.text(); + } else if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.SIZE_FIELD)) { + sizeField = parser.intValue(); + } else if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.SHARDSIZE_FIELD)) { + shardSize = parser.intValue(); + } else if (parseFieldMatcher.match(currentFieldName, RANDOM_SUFFIX_FIELD)) { + suffix = parser.text(); + } + } else { + throw new ParsingException(parser.getTokenLocation(), + "suggester[custom] doesn't support field [" + currentFieldName + "]"); + } + } + + // now we should have field name, check and copy fields over to the suggestion builder we return + if (fieldname == null) { + throw new ParsingException(parser.getTokenLocation(), "the required field option is missing"); + } + CustomSuggestionBuilder builder = new CustomSuggestionBuilder(fieldname, suffix); + if (analyzer != null) { + builder.analyzer(analyzer); + } + if (sizeField != -1) { + builder.size(sizeField); + } + if (shardSize != -1) { + builder.shardSize(shardSize); + } + return builder; } @Override protected SuggestionContext innerBuild(QueryShardContext context) throws IOException { Map options = new HashMap<>(); - options.put("field", field()); - options.put("suffix", randomSuffix); - CustomSuggester.CustomSuggestionsContext customSuggestionsContext = new CustomSuggester.CustomSuggestionsContext(context, options); + options.put(FIELDNAME_FIELD.getPreferredName(), field()); + options.put(RANDOM_SUFFIX_FIELD.getPreferredName(), randomSuffix); + CustomSuggester.CustomSuggestionsContext customSuggestionsContext = + new CustomSuggester.CustomSuggestionsContext(context, options); customSuggestionsContext.setField(field()); + assert text != null; + customSuggestionsContext.setText(BytesRefs.toBytesRef(text)); return customSuggestionsContext; } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java index 33745ae942da..f1e52dba2aa1 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.suggest; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -30,6 +31,8 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; import org.elasticsearch.search.suggest.completion.WritableTestCase; import org.elasticsearch.search.suggest.phrase.Laplace; @@ -39,26 +42,116 @@ import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilderTests; import org.elasticsearch.search.suggest.phrase.SmoothingModel; import org.elasticsearch.search.suggest.phrase.StupidBackoff; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; +import org.elasticsearch.search.suggest.term.TermSuggestionBuilderTests; +import org.junit.AfterClass; +import org.junit.BeforeClass; import java.io.IOException; import java.util.Collections; +import java.util.HashMap; +import java.util.Locale; import java.util.Map.Entry; +import static org.hamcrest.Matchers.containsString; + public class SuggestBuilderTests extends WritableTestCase { + private static NamedWriteableRegistry namedWriteableRegistry; + private static IndicesQueriesRegistry queriesRegistry; + private static ParseFieldMatcher parseFieldMatcher; + private static Suggesters suggesters; + + /** + * Setup for the whole base test class. + */ + @BeforeClass + public static void init() { + NamedWriteableRegistry nwRegistry = new NamedWriteableRegistry(); + nwRegistry.registerPrototype(SuggestionBuilder.class, TermSuggestionBuilder.PROTOTYPE); + nwRegistry.registerPrototype(SuggestionBuilder.class, PhraseSuggestionBuilder.PROTOTYPE); + nwRegistry.registerPrototype(SuggestionBuilder.class, CompletionSuggestionBuilder.PROTOTYPE); + nwRegistry.registerPrototype(SmoothingModel.class, Laplace.PROTOTYPE); + nwRegistry.registerPrototype(SmoothingModel.class, LinearInterpolation.PROTOTYPE); + nwRegistry.registerPrototype(SmoothingModel.class, StupidBackoff.PROTOTYPE); + namedWriteableRegistry = nwRegistry; + queriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).buildQueryParserRegistry(); + suggesters = new Suggesters(new HashMap<>()); + parseFieldMatcher = ParseFieldMatcher.STRICT; + } + + @AfterClass + public static void afterClass() { + namedWriteableRegistry = null; + queriesRegistry = null; + suggesters = null; + parseFieldMatcher = null; + } @Override protected NamedWriteableRegistry provideNamedWritableRegistry() { - NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); - namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, TermSuggestionBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, PhraseSuggestionBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, CompletionSuggestionBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(SmoothingModel.class, Laplace.PROTOTYPE); - namedWriteableRegistry.registerPrototype(SmoothingModel.class, LinearInterpolation.PROTOTYPE); - namedWriteableRegistry.registerPrototype(SmoothingModel.class, StupidBackoff.PROTOTYPE); return namedWriteableRegistry; } + /** + * Test that valid JSON suggestion request passes. + */ + public void testValidJsonRequestPayload() throws Exception { + final String field = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT); + String payload = "{\n" + + " \"valid-suggestion\" : {\n" + + " \"text\" : \"the amsterdma meetpu\",\n" + + " \"term\" : {\n" + + " \"field\" : \"" + field + "\"\n" + + " }\n" + + " }\n" + + "}"; + try { + final SuggestBuilder suggestBuilder = SuggestBuilder.fromXContent(newParseContext(payload), suggesters, true); + assertNotNull(suggestBuilder); + } catch (Exception e) { + fail("Parsing valid json should not have thrown exception: " + e.getMessage()); + } + } + + /** + * Test that a malformed JSON suggestion request fails. + */ + public void testMalformedJsonRequestPayload() throws Exception { + final String field = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT); + // {"bad-payload":{"prefix":"sug","completion":{"field":"ytnahgylcc","payload":[{"payload":"field"}]}}} + String payload = "{\n" + + " \"bad-payload\" : {\n" + + " \"text\" : \"the amsterdma meetpu\",\n" + + " \"term\" : {\n" + + " \"field\" : { \"" + field + "\" : \"bad-object\" }\n" + + " }\n" + + " }\n" + + "}"; + try { + final SuggestBuilder suggestBuilder = SuggestBuilder.fromXContent(newParseContext(payload), suggesters, true); + fail("Should not have been able to create SuggestBuilder from malformed JSON: " + suggestBuilder); + } catch (Exception e) { + assertThat(e.getMessage(), containsString("parsing failed")); + } + + // nocommit TODO: awaits completion suggester + /*payload = "{\n" + + " \"bad-payload\" : { \n" + + " \"prefix\" : \"sug\",\n" + + " \"completion\" : { \n" + + " \"field\" : \"" + field + "\",\n " + + " \"payload\" : [ {\"payload\":\"field\"} ]\n" + + " }\n" + + " }\n" + + "}\n"; + try { + final SuggestBuilder suggestBuilder = SuggestBuilder.fromXContent(newParseContext(payload), suggesters); + fail("Should not have been able to create SuggestBuilder from malformed JSON: " + suggestBuilder); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("encountered invalid token")); + }*/ + } + /** * creates random suggestion builder, renders it to xContent and back to new instance that should be equal to original */ @@ -75,26 +168,34 @@ public class SuggestBuilderTests extends WritableTestCase { suggestBuilder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); XContentParser parser = XContentHelper.createParser(xContentBuilder.bytes()); context.reset(parser); - parser.nextToken(); - SuggestBuilder secondSuggestBuilder = SuggestBuilder.fromXContent(context, suggesters); + SuggestBuilder secondSuggestBuilder = SuggestBuilder.fromXContent(context, suggesters, true); assertNotSame(suggestBuilder, secondSuggestBuilder); assertEquals(suggestBuilder, secondSuggestBuilder); assertEquals(suggestBuilder.hashCode(), secondSuggestBuilder.hashCode()); } } + public void testIllegalSuggestionName() { + try { + new SuggestBuilder().addSuggestion(null, PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder()); + fail("exception expected"); + } catch (NullPointerException e) { + assertEquals("every suggestion needs a name", e.getMessage()); + } + + try { + new SuggestBuilder().addSuggestion("my-suggest", PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder()) + .addSuggestion("my-suggest", PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder()); + fail("exception expected"); + } catch (IllegalArgumentException e) { + assertEquals("already added another suggestion with name [my-suggest]", e.getMessage()); + } + } + @Override protected SuggestBuilder createTestModel() { - SuggestBuilder suggestBuilder = new SuggestBuilder(); - if (randomBoolean()) { - suggestBuilder.setGlobalText(randomAsciiOfLengthBetween(5, 50)); - } - int numberOfSuggestions = randomIntBetween(0, 5); - for (int i = 0; i < numberOfSuggestions; i++) { - suggestBuilder.addSuggestion(randomAsciiOfLength(10), PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder()); - } - return suggestBuilder; + return randomSuggestBuilder(); } @Override @@ -111,26 +212,37 @@ public class SuggestBuilderTests extends WritableTestCase { return mutation; } - public void testIllegalSuggestionName() { - try { - new SuggestBuilder().addSuggestion(null, PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder()); - fail("exception expected"); - } catch (NullPointerException e) { - assertEquals("every suggestion needs a name", e.getMessage()); - } - - try { - new SuggestBuilder().addSuggestion("my-suggest", PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder()) - .addSuggestion("my-suggest", PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder()); - fail("exception expected"); - } catch (IllegalArgumentException e) { - assertEquals("already added another suggestion with name [my-suggest]", e.getMessage()); - } - } - @Override protected SuggestBuilder readFrom(StreamInput in) throws IOException { return SuggestBuilder.PROTOTYPE.readFrom(in); } + public static SuggestBuilder randomSuggestBuilder() { + SuggestBuilder builder = new SuggestBuilder(); + if (randomBoolean()) { + builder.setGlobalText(randomAsciiOfLengthBetween(1, 20)); + } + final int numSuggestions = randomIntBetween(1, 5); + for (int i = 0; i < numSuggestions; i++) { + builder.addSuggestion(randomAsciiOfLengthBetween(5, 10), randomSuggestionBuilder()); + } + return builder; + } + + private static SuggestionBuilder randomSuggestionBuilder() { + switch (randomIntBetween(0, 2)) { + case 0: return TermSuggestionBuilderTests.randomTermSuggestionBuilder(); + case 1: return PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder(); + //norelease TODO: uncomment case 2: return CompletionSuggesterBuilderTests.randomCompletionSuggestionBuilder(); + default: return TermSuggestionBuilderTests.randomTermSuggestionBuilder(); + } + } + + private static QueryParseContext newParseContext(final String xcontent) throws IOException { + final QueryParseContext parseContext = new QueryParseContext(queriesRegistry); + parseContext.reset(XContentFactory.xContent(xcontent).createParser(xcontent)); + parseContext.parseFieldMatcher(parseFieldMatcher); + return parseContext; + } + } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java index 1a8efbb748de..69871fa3fed2 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -21,12 +21,8 @@ package org.elasticsearch.search.suggest.completion; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; -import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.completion.context.CategoryContextMapping; @@ -36,7 +32,6 @@ import org.elasticsearch.search.suggest.completion.context.ContextMappings; import org.elasticsearch.search.suggest.completion.context.GeoContextMapping; import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; import org.elasticsearch.search.suggest.completion.context.QueryContext; -import org.junit.BeforeClass; import java.io.IOException; import java.util.ArrayList; @@ -64,6 +59,7 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe private BuilderAndInfo randomSuggestionBuilderWithContextInfo() { final BuilderAndInfo builderAndInfo = new BuilderAndInfo(); CompletionSuggestionBuilder testBuilder = new CompletionSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); + setCommonPropertiesOnRandomBuilder(testBuilder); switch (randomIntBetween(0, 3)) { case 0: testBuilder.prefix(randomAsciiOfLength(10)); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java index 3011acee57ae..a34eeb298939 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java @@ -48,6 +48,7 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC public static PhraseSuggestionBuilder randomPhraseSuggestionBuilder() { PhraseSuggestionBuilder testBuilder = new PhraseSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); + setCommonPropertiesOnRandomBuilder(testBuilder); maybeSet(testBuilder::maxErrors, randomFloat()); maybeSet(testBuilder::separator, randomAsciiOfLengthBetween(1, 10)); maybeSet(testBuilder::realWordErrorLikelihood, randomFloat()); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java index e0c2a33664af..d120577c4557 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java @@ -46,7 +46,15 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas */ @Override protected TermSuggestionBuilder randomSuggestionBuilder() { + return randomTermSuggestionBuilder(); + } + + /** + * Creates a random TermSuggestionBuilder + */ + public static TermSuggestionBuilder randomTermSuggestionBuilder() { TermSuggestionBuilder testBuilder = new TermSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); + setCommonPropertiesOnRandomBuilder(testBuilder); maybeSet(testBuilder::suggestMode, randomSuggestMode()); maybeSet(testBuilder::accuracy, randomFloat()); maybeSet(testBuilder::sort, randomSort()); @@ -60,7 +68,7 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas return testBuilder; } - private SuggestMode randomSuggestMode() { + private static SuggestMode randomSuggestMode() { final int randomVal = randomIntBetween(0, 2); switch (randomVal) { case 0: return SuggestMode.MISSING; @@ -70,7 +78,7 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas } } - private SortBy randomSort() { + private static SortBy randomSort() { int randomVal = randomIntBetween(0, 1); switch (randomVal) { case 0: return SortBy.SCORE; @@ -79,7 +87,7 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas } } - private StringDistanceImpl randomStringDistance() { + private static StringDistanceImpl randomStringDistance() { int randomVal = randomIntBetween(0, 4); switch (randomVal) { case 0: return StringDistanceImpl.INTERNAL; diff --git a/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java index 7496bfb8263c..41750b5c25d9 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java +++ b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java @@ -33,6 +33,7 @@ import org.elasticsearch.search.dfs.DfsPhase; import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.query.QueryPhase; +import org.elasticsearch.search.suggest.Suggesters; import org.elasticsearch.threadpool.ThreadPool; import java.util.HashMap; @@ -69,9 +70,9 @@ public class MockSearchService extends SearchService { public MockSearchService(Settings settings, ClusterSettings clusterSettings, ClusterService clusterService, IndicesService indicesService, ThreadPool threadPool, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, DfsPhase dfsPhase, QueryPhase queryPhase, FetchPhase fetchPhase, - AggregatorParsers aggParsers) { + AggregatorParsers aggParsers, Suggesters suggesters) { super(settings, clusterSettings, clusterService, indicesService, threadPool, scriptService, pageCacheRecycler, bigArrays, dfsPhase, - queryPhase, fetchPhase, aggParsers); + queryPhase, fetchPhase, aggParsers, suggesters); } @Override From a6662d78dff818c3a73e35099c3289d026003cfb Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Tue, 8 Mar 2016 11:37:11 -0500 Subject: [PATCH 142/320] Improves the organization of the suggestion builder tests --- .../action/suggest/SuggestRequest.java | 24 ++--- .../action/suggest/RestSuggestAction.java | 2 +- .../search/builder/SearchSourceBuilder.java | 2 +- .../search/suggest/SuggestBuilder.java | 9 +- .../AbstractSuggestionBuilderTestCase.java | 17 +++- .../suggest/CompletionSuggestSearchIT.java | 3 - .../ContextCompletionSuggestSearchIT.java | 3 - .../search/suggest/SuggestBuilderTests.java | 88 +------------------ .../CompletionSuggesterBuilderTests.java | 26 ++++++ .../term/TermSuggestionBuilderTests.java | 22 +++++ 10 files changed, 85 insertions(+), 111 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java b/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java index 7f3f701f3e86..1398dd1dcf1b 100644 --- a/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java +++ b/core/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java @@ -54,7 +54,7 @@ public final class SuggestRequest extends BroadcastRequest { @Nullable private String preference; - private SuggestBuilder suggestSource; + private SuggestBuilder suggest; public SuggestRequest() { } @@ -74,18 +74,18 @@ public final class SuggestRequest extends BroadcastRequest { } /** - * The Phrase to get correction suggestions for + * The suggestion query to get correction suggestions for */ public SuggestBuilder suggest() { - return suggestSource; + return suggest; } /** * set a new source for the suggest query */ - public SuggestRequest suggest(SuggestBuilder suggestSource) { - Objects.requireNonNull(suggestSource, "suggestSource must not be null"); - this.suggestSource = suggestSource; + public SuggestRequest suggest(SuggestBuilder suggest) { + Objects.requireNonNull(suggest, "suggest must not be null"); + this.suggest = suggest; return this; } @@ -126,29 +126,29 @@ public final class SuggestRequest extends BroadcastRequest { super.readFrom(in); routing = in.readOptionalString(); preference = in.readOptionalString(); - suggest(SuggestBuilder.PROTOTYPE.readFrom(in)); + suggest = SuggestBuilder.PROTOTYPE.readFrom(in); } @Override public void writeTo(StreamOutput out) throws IOException { - Objects.requireNonNull(suggestSource, "suggestSource must not be null"); + Objects.requireNonNull(suggest, "suggest must not be null"); super.writeTo(out); out.writeOptionalString(routing); out.writeOptionalString(preference); - suggestSource.writeTo(out); + suggest.writeTo(out); } @Override public String toString() { - Objects.requireNonNull(suggestSource, "suggestSource must not be null"); + Objects.requireNonNull(suggest, "suggest must not be null"); String sSource = "_na_"; try { XContentBuilder builder = JsonXContent.contentBuilder(); - builder = suggestSource.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder = suggest.toXContent(builder, ToXContent.EMPTY_PARAMS); sSource = builder.string(); } catch (Exception e) { // ignore } - return "[" + Arrays.toString(indices) + "]" + ", suggestSource[" + sSource + "]"; + return "[" + Arrays.toString(indices) + "]" + ", suggest[" + sSource + "]"; } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java b/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java index df947b492ad3..291eb69254b6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java @@ -81,7 +81,7 @@ public class RestSuggestAction extends BaseRestHandler { final QueryParseContext context = new QueryParseContext(queryRegistry); context.reset(parser); context.parseFieldMatcher(parseFieldMatcher); - suggestRequest.suggest(SuggestBuilder.fromXContent(context, suggesters, true)); + suggestRequest.suggest(SuggestBuilder.fromXContent(context, suggesters)); } } else { throw new IllegalArgumentException("no content or source provided to execute suggestion"); diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index bb66216e6abe..e256838b756c 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -851,7 +851,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser); innerHitsBuilder = xContentBuilder.bytes(); } else if (context.parseFieldMatcher().match(currentFieldName, SUGGEST_FIELD)) { - suggestBuilder = SuggestBuilder.fromXContent(context, suggesters, false); + suggestBuilder = SuggestBuilder.fromXContent(context, suggesters); } else if (context.parseFieldMatcher().match(currentFieldName, SORT_FIELD)) { sorts = new ArrayList<>(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java index bff33982da14..9306cb4cbdee 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java @@ -112,18 +112,17 @@ public class SuggestBuilder extends ToXContentToBytes implements Writeable { private static NamedWriteableRegistry namedWriteableRegistry; - private static IndicesQueriesRegistry queriesRegistry; - private static ParseFieldMatcher parseFieldMatcher; - private static Suggesters suggesters; /** * Setup for the whole base test class. @@ -74,17 +65,11 @@ public class SuggestBuilderTests extends WritableTestCase { nwRegistry.registerPrototype(SmoothingModel.class, LinearInterpolation.PROTOTYPE); nwRegistry.registerPrototype(SmoothingModel.class, StupidBackoff.PROTOTYPE); namedWriteableRegistry = nwRegistry; - queriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).buildQueryParserRegistry(); - suggesters = new Suggesters(new HashMap<>()); - parseFieldMatcher = ParseFieldMatcher.STRICT; } @AfterClass public static void afterClass() { namedWriteableRegistry = null; - queriesRegistry = null; - suggesters = null; - parseFieldMatcher = null; } @Override @@ -92,66 +77,6 @@ public class SuggestBuilderTests extends WritableTestCase { return namedWriteableRegistry; } - /** - * Test that valid JSON suggestion request passes. - */ - public void testValidJsonRequestPayload() throws Exception { - final String field = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT); - String payload = "{\n" + - " \"valid-suggestion\" : {\n" + - " \"text\" : \"the amsterdma meetpu\",\n" + - " \"term\" : {\n" + - " \"field\" : \"" + field + "\"\n" + - " }\n" + - " }\n" + - "}"; - try { - final SuggestBuilder suggestBuilder = SuggestBuilder.fromXContent(newParseContext(payload), suggesters, true); - assertNotNull(suggestBuilder); - } catch (Exception e) { - fail("Parsing valid json should not have thrown exception: " + e.getMessage()); - } - } - - /** - * Test that a malformed JSON suggestion request fails. - */ - public void testMalformedJsonRequestPayload() throws Exception { - final String field = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT); - // {"bad-payload":{"prefix":"sug","completion":{"field":"ytnahgylcc","payload":[{"payload":"field"}]}}} - String payload = "{\n" + - " \"bad-payload\" : {\n" + - " \"text\" : \"the amsterdma meetpu\",\n" + - " \"term\" : {\n" + - " \"field\" : { \"" + field + "\" : \"bad-object\" }\n" + - " }\n" + - " }\n" + - "}"; - try { - final SuggestBuilder suggestBuilder = SuggestBuilder.fromXContent(newParseContext(payload), suggesters, true); - fail("Should not have been able to create SuggestBuilder from malformed JSON: " + suggestBuilder); - } catch (Exception e) { - assertThat(e.getMessage(), containsString("parsing failed")); - } - - // nocommit TODO: awaits completion suggester - /*payload = "{\n" + - " \"bad-payload\" : { \n" + - " \"prefix\" : \"sug\",\n" + - " \"completion\" : { \n" + - " \"field\" : \"" + field + "\",\n " + - " \"payload\" : [ {\"payload\":\"field\"} ]\n" + - " }\n" + - " }\n" + - "}\n"; - try { - final SuggestBuilder suggestBuilder = SuggestBuilder.fromXContent(newParseContext(payload), suggesters); - fail("Should not have been able to create SuggestBuilder from malformed JSON: " + suggestBuilder); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("encountered invalid token")); - }*/ - } - /** * creates random suggestion builder, renders it to xContent and back to new instance that should be equal to original */ @@ -169,7 +94,7 @@ public class SuggestBuilderTests extends WritableTestCase { XContentParser parser = XContentHelper.createParser(xContentBuilder.bytes()); context.reset(parser); - SuggestBuilder secondSuggestBuilder = SuggestBuilder.fromXContent(context, suggesters, true); + SuggestBuilder secondSuggestBuilder = SuggestBuilder.fromXContent(context, suggesters); assertNotSame(suggestBuilder, secondSuggestBuilder); assertEquals(suggestBuilder, secondSuggestBuilder); assertEquals(suggestBuilder.hashCode(), secondSuggestBuilder.hashCode()); @@ -233,16 +158,9 @@ public class SuggestBuilderTests extends WritableTestCase { switch (randomIntBetween(0, 2)) { case 0: return TermSuggestionBuilderTests.randomTermSuggestionBuilder(); case 1: return PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder(); - //norelease TODO: uncomment case 2: return CompletionSuggesterBuilderTests.randomCompletionSuggestionBuilder(); + case 2: return CompletionSuggesterBuilderTests.randomCompletionSuggestionBuilder(); default: return TermSuggestionBuilderTests.randomTermSuggestionBuilder(); } } - private static QueryParseContext newParseContext(final String xcontent) throws IOException { - final QueryParseContext parseContext = new QueryParseContext(queriesRegistry); - parseContext.reset(XContentFactory.xContent(xcontent).createParser(xcontent)); - parseContext.parseFieldMatcher(parseFieldMatcher); - return parseContext; - } - } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java index 69871fa3fed2..2126e6d1c81d 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -19,11 +19,13 @@ package org.elasticsearch.search.suggest.completion; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; +import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.completion.context.CategoryContextMapping; import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; @@ -38,10 +40,12 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.stream.Collectors; import static org.hamcrest.core.IsInstanceOf.instanceOf; +import static org.hamcrest.Matchers.containsString; public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTestCase { @@ -168,4 +172,26 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe throw new IllegalStateException("should not through"); } } + + /** + * Test that a malformed JSON suggestion request fails. + */ + public void testMalformedJsonRequestPayload() throws Exception { + final String field = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT); + final String payload = "{\n" + + " \"bad-payload\" : { \n" + + " \"prefix\" : \"sug\",\n" + + " \"completion\" : { \n" + + " \"field\" : \"" + field + "\",\n " + + " \"payload\" : [ {\"payload\":\"field\"} ]\n" + + " }\n" + + " }\n" + + "}\n"; + try { + final SuggestBuilder suggestBuilder = SuggestBuilder.fromXContent(newParseContext(payload), suggesters); + fail("Should not have been able to create SuggestBuilder from malformed JSON: " + suggestBuilder); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("parsing failed")); + } + } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java index d120577c4557..419253e79e84 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java @@ -19,14 +19,17 @@ package org.elasticsearch.search.suggest.term; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; import org.elasticsearch.search.suggest.DirectSpellcheckerSettings; import org.elasticsearch.search.suggest.SortBy; +import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.StringDistanceImpl; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; import java.io.IOException; +import java.util.Locale; import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_ACCURACY; import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MAX_EDITS; @@ -35,6 +38,7 @@ import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAUL import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MIN_DOC_FREQ; import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MIN_WORD_LENGTH; import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_PREFIX_LENGTH; +import static org.hamcrest.Matchers.containsString; /** * Test the {@link TermSuggestionBuilder} class. @@ -280,6 +284,24 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas assertEquals(SuggestMode.MISSING, builder.suggestMode()); } + public void testMalformedJson() { + final String field = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT); + String suggest = "{\n" + + " \"bad-payload\" : {\n" + + " \"text\" : \"the amsterdma meetpu\",\n" + + " \"term\" : {\n" + + " \"field\" : { \"" + field + "\" : \"bad-object\" }\n" + + " }\n" + + " }\n" + + "}"; + try { + final SuggestBuilder suggestBuilder = SuggestBuilder.fromXContent(newParseContext(suggest), suggesters); + fail("Should not have been able to create SuggestBuilder from malformed JSON: " + suggestBuilder); + } catch (Exception e) { + assertThat(e.getMessage(), containsString("parsing failed")); + } + } + @Override protected void assertSuggestionContext(SuggestionContext oldSuggestion, SuggestionContext newSuggestion) { @SuppressWarnings("unchecked") From b4db26eaf9a07948e4da8d1197aadfb414699562 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 8 Mar 2016 18:37:20 +0100 Subject: [PATCH 143/320] Sort: Move up `order` field to SortBuilder Currently all SortBuilder implementations have their separate order field. This PR moves this up to SortBuilder, together with setter and getter and makes sure the default is set to SortOrder.ASC except for `_score` sorting where the default is SortOrder.DESC. --- .../search/sort/FieldSortBuilder.java | 17 +--- .../search/sort/GeoDistanceSortBuilder.java | 77 +++++++------------ .../search/sort/ScoreSortBuilder.java | 27 ++----- .../search/sort/ScriptSortBuilder.java | 19 +---- .../search/sort/SortBuilder.java | 24 +++++- .../builder/SearchSourceBuilderTests.java | 8 +- 6 files changed, 61 insertions(+), 111 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index 67ceb75a29cb..e805e21eff53 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -27,12 +27,10 @@ import java.io.IOException; /** * A sort builder to sort based on a document field. */ -public class FieldSortBuilder extends SortBuilder { +public class FieldSortBuilder extends SortBuilder { private final String fieldName; - private SortOrder order; - private Object missing; private String unmappedType; @@ -55,15 +53,6 @@ public class FieldSortBuilder extends SortBuilder { this.fieldName = fieldName; } - /** - * The order of sorting. Defaults to {@link SortOrder#ASC}. - */ - @Override - public FieldSortBuilder order(SortOrder order) { - this.order = order; - return this; - } - /** * Sets the value when a field is missing in a doc. Can also be set to _last or * _first to sort missing last or first respectively. @@ -118,9 +107,7 @@ public class FieldSortBuilder extends SortBuilder { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(fieldName); - if (order != null) { - builder.field("order", order.toString()); - } + builder.field(ORDER_FIELD.getPreferredName(), order); if (missing != null) { builder.field("missing", missing); } diff --git a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 708152af1f04..b5a10e238b71 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -44,7 +44,7 @@ import java.util.Objects; /** * A geo distance based sorting on a geo point like field. */ -public class GeoDistanceSortBuilder extends SortBuilder +public class GeoDistanceSortBuilder extends SortBuilder implements ToXContent, NamedWriteable, SortElementParserTemp { public static final String NAME = "_geo_distance"; public static final boolean DEFAULT_COERCE = false; @@ -57,14 +57,13 @@ public class GeoDistanceSortBuilder extends SortBuilder private GeoDistance geoDistance = GeoDistance.DEFAULT; private DistanceUnit unit = DistanceUnit.DEFAULT; - private SortOrder order = SortOrder.ASC; - + // TODO there is an enum that covers that parameter which we should be using here private String sortMode = null; @SuppressWarnings("rawtypes") private QueryBuilder nestedFilter; private String nestedPath; - + // TODO switch to GeoValidationMethod enum private boolean coerce = DEFAULT_COERCE; private boolean ignoreMalformed = DEFAULT_IGNORE_MALFORMED; @@ -109,7 +108,7 @@ public class GeoDistanceSortBuilder extends SortBuilder } this.fieldName = fieldName; } - + /** * Copy constructor. * */ @@ -125,7 +124,7 @@ public class GeoDistanceSortBuilder extends SortBuilder this.coerce = original.coerce; this.ignoreMalformed = original.ignoreMalformed; } - + /** * Returns the geo point like field the distance based sort operates on. * */ @@ -153,7 +152,7 @@ public class GeoDistanceSortBuilder extends SortBuilder this.points.addAll(Arrays.asList(points)); return this; } - + /** * Returns the points to create the range distance facets from. */ @@ -163,7 +162,7 @@ public class GeoDistanceSortBuilder extends SortBuilder /** * The geohash of the geo point to create the range distance facets from. - * + * * Deprecated - please use points(GeoPoint... points) instead. */ @Deprecated @@ -173,7 +172,7 @@ public class GeoDistanceSortBuilder extends SortBuilder } return this; } - + /** * The geo distance type used to compute the distance. */ @@ -181,7 +180,7 @@ public class GeoDistanceSortBuilder extends SortBuilder this.geoDistance = geoDistance; return this; } - + /** * Returns the geo distance type used to compute the distance. */ @@ -204,20 +203,6 @@ public class GeoDistanceSortBuilder extends SortBuilder return this.unit; } - /** - * The order of sorting. Defaults to {@link SortOrder#ASC}. - */ - @Override - public GeoDistanceSortBuilder order(SortOrder order) { - this.order = order; - return this; - } - - /** Returns the order of sorting. */ - public SortOrder order() { - return this.order; - } - /** * Defines which distance to use for sorting in the case a document contains multiple geo points. * Possible values: min and max @@ -240,16 +225,16 @@ public class GeoDistanceSortBuilder extends SortBuilder * Sets the nested filter that the nested objects should match with in order to be taken into account * for sorting. */ - public GeoDistanceSortBuilder setNestedFilter(QueryBuilder nestedFilter) { + public GeoDistanceSortBuilder setNestedFilter(QueryBuilder nestedFilter) { this.nestedFilter = nestedFilter; return this; } - /** + /** * Returns the nested filter that the nested objects should match with in order to be taken into account - * for sorting. + * for sorting. **/ - public QueryBuilder getNestedFilter() { + public QueryBuilder getNestedFilter() { return this.nestedFilter; } @@ -261,7 +246,7 @@ public class GeoDistanceSortBuilder extends SortBuilder this.nestedPath = nestedPath; return this; } - + /** * Returns the nested path if sorting occurs on a field that is inside a nested object. By default when sorting on a * field inside a nested object, the nearest upper nested object is selected as nested path. @@ -285,7 +270,7 @@ public class GeoDistanceSortBuilder extends SortBuilder } return this; } - + public boolean ignoreMalformed() { return this.ignoreMalformed; } @@ -302,11 +287,7 @@ public class GeoDistanceSortBuilder extends SortBuilder builder.field("unit", unit); builder.field("distance_type", geoDistance.name().toLowerCase(Locale.ROOT)); - if (order == SortOrder.DESC) { - builder.field("reverse", true); - } else { - builder.field("reverse", false); - } + builder.field(ORDER_FIELD.getPreferredName(), order); if (sortMode != null) { builder.field("mode", sortMode); @@ -363,7 +344,7 @@ public class GeoDistanceSortBuilder extends SortBuilder public void writeTo(StreamOutput out) throws IOException { out.writeString(fieldName); out.writeGenericValue(points); - + geoDistance.writeTo(out); unit.writeTo(out); order.writeTo(out); @@ -382,10 +363,10 @@ public class GeoDistanceSortBuilder extends SortBuilder @Override public GeoDistanceSortBuilder readFrom(StreamInput in) throws IOException { String fieldName = in.readString(); - - ArrayList points = (ArrayList) in.readGenericValue(); + + ArrayList points = (ArrayList) in.readGenericValue(); GeoDistanceSortBuilder result = new GeoDistanceSortBuilder(fieldName, points.toArray(new GeoPoint[points.size()])); - + result.geoDistance(GeoDistance.readGeoDistanceFrom(in)); result.unit(DistanceUnit.readDistanceUnit(in)); result.order(SortOrder.readOrderFrom(in)); @@ -409,9 +390,9 @@ public class GeoDistanceSortBuilder extends SortBuilder List geoPoints = new ArrayList<>(); DistanceUnit unit = DistanceUnit.DEFAULT; GeoDistance geoDistance = GeoDistance.DEFAULT; - boolean reverse = false; + SortOrder order = SortOrder.ASC; MultiValueMode sortMode = null; - QueryBuilder nestedFilter = null; + QueryBuilder nestedFilter = null; String nestedPath = null; boolean coerce = GeoDistanceSortBuilder.DEFAULT_COERCE; @@ -429,8 +410,8 @@ public class GeoDistanceSortBuilder extends SortBuilder } else if (token == XContentParser.Token.START_OBJECT) { // the json in the format of -> field : { lat : 30, lon : 12 } if ("nested_filter".equals(currentName) || "nestedFilter".equals(currentName)) { - // TODO Note to remember: while this is kept as a QueryBuilder internally, - // we need to make sure to call toFilter() on it once on the shard + // TODO Note to remember: while this is kept as a QueryBuilder internally, + // we need to make sure to call toFilter() on it once on the shard // (e.g. in the new build() method) nestedFilter = context.parseInnerQueryBuilder(); } else { @@ -441,9 +422,9 @@ public class GeoDistanceSortBuilder extends SortBuilder } } else if (token.isValue()) { if ("reverse".equals(currentName)) { - reverse = parser.booleanValue(); + order = parser.booleanValue() ? SortOrder.DESC : SortOrder.ASC; } else if ("order".equals(currentName)) { - reverse = "desc".equals(parser.text()); + order = SortOrder.fromString(parser.text()); } else if ("unit".equals(currentName)) { unit = DistanceUnit.fromString(parser.text()); } else if ("distance_type".equals(currentName) || "distanceType".equals(currentName)) { @@ -474,11 +455,7 @@ public class GeoDistanceSortBuilder extends SortBuilder GeoDistanceSortBuilder result = new GeoDistanceSortBuilder(fieldName, geoPoints.toArray(new GeoPoint[geoPoints.size()])); result.geoDistance(geoDistance); result.unit(unit); - if (reverse) { - result.order(SortOrder.DESC); - } else { - result.order(SortOrder.ASC); - } + result.order(order); if (sortMode != null) { result.sortMode(sortMode.name()); } diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java index 5d1a0d82987c..6b1bc054ee77 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java @@ -35,38 +35,24 @@ import java.util.Objects; /** * A sort builder allowing to sort by score. */ -public class ScoreSortBuilder extends SortBuilder implements NamedWriteable, +public class ScoreSortBuilder extends SortBuilder implements NamedWriteable, SortElementParserTemp { private static final String NAME = "_score"; static final ScoreSortBuilder PROTOTYPE = new ScoreSortBuilder(); public static final ParseField REVERSE_FIELD = new ParseField("reverse"); public static final ParseField ORDER_FIELD = new ParseField("order"); - private SortOrder order = SortOrder.DESC; - /** - * The order of sort scoring. By default, its {@link SortOrder#DESC}. - */ - @Override - public ScoreSortBuilder order(SortOrder order) { - Objects.requireNonNull(order, "sort order cannot be null."); - this.order = order; - return this; + public ScoreSortBuilder() { + // order defaults to desc when sorting on the _score + order(SortOrder.DESC); } - /** - * Get the order of sort scoring. By default, its {@link SortOrder#DESC}. - */ - public SortOrder order() { - return this.order; - } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); - if (order == SortOrder.ASC) { - builder.field(REVERSE_FIELD.getPreferredName(), true); - } + builder.field(ORDER_FIELD.getPreferredName(), order); builder.endObject(); return builder; } @@ -124,7 +110,8 @@ public class ScoreSortBuilder extends SortBuilder implements NamedWriteable { private Script script; private final String type; - private SortOrder order; - private String sortMode; private QueryBuilder nestedFilter; @@ -53,15 +51,6 @@ public class ScriptSortBuilder extends SortBuilder { this.type = type; } - /** - * Sets the sort order. - */ - @Override - public ScriptSortBuilder order(SortOrder order) { - this.order = order; - return this; - } - /** * Defines which distance to use for sorting in the case a document contains multiple geo points. * Possible values: min and max @@ -75,7 +64,7 @@ public class ScriptSortBuilder extends SortBuilder { * Sets the nested filter that the nested objects should match with in order to be taken into account * for sorting. */ - public ScriptSortBuilder setNestedFilter(QueryBuilder nestedFilter) { + public ScriptSortBuilder setNestedFilter(QueryBuilder nestedFilter) { this.nestedFilter = nestedFilter; return this; } @@ -94,9 +83,7 @@ public class ScriptSortBuilder extends SortBuilder { builder.startObject("_script"); builder.field("script", script); builder.field("type", type); - if (order == SortOrder.DESC) { - builder.field("reverse", true); - } + builder.field(ORDER_FIELD.getPreferredName(), order); if (sortMode != null) { builder.field("mode", sortMode); } diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/SortBuilder.java index 0935b76ece95..7852af4e97ed 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortBuilder.java @@ -20,14 +20,20 @@ package org.elasticsearch.search.sort; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import java.util.Objects; + /** * */ -public abstract class SortBuilder implements ToXContent { +public abstract class SortBuilder> implements ToXContent { + + protected SortOrder order = SortOrder.ASC; + public static final ParseField ORDER_FIELD = new ParseField("order"); @Override public String toString() { @@ -42,7 +48,19 @@ public abstract class SortBuilder implements ToXContent { } /** - * The order of sorting. Defaults to {@link SortOrder#ASC}. + * Set the order of sorting. */ - public abstract SortBuilder order(SortOrder order); + @SuppressWarnings("unchecked") + public T order(SortOrder order) { + Objects.requireNonNull(order, "sort order cannot be null."); + this.order = order; + return (T) this; + } + + /** + * Return the {@link SortOrder} used for this {@link SortBuilder}. + */ + public SortOrder order() { + return this.order; + } } diff --git a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index 6b29cabe3f64..2cb4554ed085 100644 --- a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -49,15 +49,9 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.EnvironmentModule; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.AbstractQueryTestCase; -import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.EmptyQueryBuilder; -import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryRewriteContext; -import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.index.query.WrapperQueryBuilder; -import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -554,7 +548,7 @@ public class SearchSourceBuilderTests extends ESTestCase { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.parseSearchSource(parser, createParseContext(parser), aggParsers); assertEquals(1, searchSourceBuilder.sorts().size()); - assertEquals("{\"foo\":{}}", searchSourceBuilder.sorts().get(0).toUtf8()); + assertEquals("{\"foo\":{\"order\":\"asc\"}}", searchSourceBuilder.sorts().get(0).toUtf8()); } } From 6deabac8e8549cc7f01584144f40001492baa7cf Mon Sep 17 00:00:00 2001 From: David Pilato Date: Thu, 10 Mar 2016 10:57:59 +0100 Subject: [PATCH 144/320] Can not extract text from Office documents (`.docx` extension) Add REST test for: * `.doc` * `.docx` The later fails with: ``` ==> Test Info: seed=DB93397128B876D4; jvm=1; suite=1 Suite: org.elasticsearch.ingest.attachment.IngestAttachmentRestIT 2> REPRODUCE WITH: gradle :plugins:ingest-attachment:integTest -Dtests.seed=DB93397128B876D4 -Dtests.class=org.elasticsearch.ingest.attachment.IngestAttachmentRestIT -Dtests.method="test {yaml=ingest_attachment/30_files_supported/Test ingest attachment processor with .docx file}" -Des.logger.level=WARN -Dtests.security.manager=true -Dtests.locale=bg -Dtests.timezone=Europe/Athens FAILURE 4.53s | IngestAttachmentRestIT.test {yaml=ingest_attachment/30_files_supported/Test ingest attachment processor with .docx file} <<< FAILURES! > Throwable #1: java.lang.AssertionError: expected [2xx] status code but api [index] returned [400 Bad Request] [{"error":{"root_cause":[{"type":"parse_exception","reason":"Error parsing document in field [field1]"}],"type":"parse_exception","reason":"Error parsing document in field [field1]","caused_by":{"type":"tika_exception","reason":"Unexpected RuntimeException from org.apache.tika.parser.microsoft.ooxml.OOXMLParser@7f85baa5","caused_by":{"type":"illegal_state_exception","reason":"access denied (\"java.lang.RuntimePermission\" \"getClassLoader\")","caused_by":{"type":"access_control_exception","reason":"access denied (\"java.lang.RuntimePermission\" \"getClassLoader\")"}}}},"status":400}] > at __randomizedtesting.SeedInfo.seed([DB93397128B876D4:53C706AB86441B2C]:0) > at org.elasticsearch.test.rest.section.DoSection.execute(DoSection.java:107) > at org.elasticsearch.test.rest.ESRestTestCase.test(ESRestTestCase.java:395) > at java.lang.Thread.run(Thread.java:745) ``` Related to #16864 --- .../ingest_attachment/30_files_supported.yaml | 79 +++++++++++++++++++ 1 file changed, 79 insertions(+) create mode 100644 plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/30_files_supported.yaml diff --git a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/30_files_supported.yaml b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/30_files_supported.yaml new file mode 100644 index 000000000000..48fded555b6b --- /dev/null +++ b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/30_files_supported.yaml @@ -0,0 +1,79 @@ +--- +"Test ingest attachment processor with .doc file": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "attachment" : { + "source_field" : "field1" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: { field1: "0M8R4KGxGuEAAAAAAAAAAAAAAAAAAAAAPgADAP7/CQAGAAAAAAAAAAAAAAAEAAAAjAEAAAAAAAAAEAAAjgEAAAEAAAD+////AAAAAIgBAACJAQAAigEAAIsBAAD////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////spcEAg+kMBAAA8BK/AAAAAAABEQABAAEACAAAEwgAAA4AYmpiaoI4gjgAAAAAAAAAAAAAAAAAAAAAAAAMBBYANA4AAOBSAADgUgAAEwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//w8AAAAAAAAAAAD//w8AAAAAAAAAAAD//w8AAAAAAAAAAAAAAAAAAAAAALcAAAAAAFAHAAAAAAAAUAcAAMcUAAAAAAAAxxQAAAAAAADHFAAAAAAAAMcUAAAAAAAAxxQAABQAAAAAAAAAAAAAAP////8AAAAA2xQAAAAAAADbFAAAAAAAANsUAAAAAAAA2xQAAAwAAADnFAAADAAAANsUAAAAAAAA3hUAADABAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAAVRUAAAIAAABXFQAAAAAAAFcVAAAAAAAAVxUAAAAAAABXFQAAAAAAAFcVAAAAAAAAVxUAACwAAAAOFwAAtgIAAMQZAABaAAAAgxUAABUAAAAAAAAAAAAAAAAAAAAAAAAAxxQAAAAAAADzFAAAAAAAAAAAAAAAAAAAAAAAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAIMVAAAAAAAAGRUAAAAAAADHFAAAAAAAAMcUAAAAAAAA8xQAAAAAAAAAAAAAAAAAAPMUAAAAAAAAmBUAABYAAAAZFQAAAAAAABkVAAAAAAAAGRUAAAAAAADzFAAAFgAAAMcUAAAAAAAA8xQAAAAAAADHFAAAAAAAAPMUAAAAAAAAVRUAAAAAAAAAAAAAAAAAABkVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8xQAAAAAAABVFQAAAAAAAAAAAAAAAAAAGRUAAAAAAAAAAAAAAAAAABkVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGRUAAAAAAAAAAAAAAAAAAP////8AAAAAgI6XYKZ60QEAAAAAAAAAAP////8AAAAACRUAABAAAAAZFQAAAAAAAAAAAAAAAAAAQRUAABQAAACuFQAAMAAAAN4VAAAAAAAAGRUAAAAAAAAeGgAAAAAAABkVAAAAAAAAHhoAAAAAAAAZFQAAAAAAABkVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADHFAAAAAAAABkVAAAoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAAgxUAAAAAAACDFQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGRUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAN4VAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAAAAAAAAAAAAAP////8AAAAA/////wAAAAD/////AAAAAAAAAAAAAAAA/////wAAAAD/////AAAAAP////8AAAAA/////wAAAAD/////AAAAAP////8AAAAA/////wAAAAD/////AAAAAP////8AAAAA/////wAAAAD/////AAAAAP////8AAAAA/////wAAAAD/////AAAAAB4aAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAABQBwAAPQwAAI0TAAA6AQAABwAMAQ8ADQEAAAwEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFRlc3QgZWxhc3RpY3NlYXJjaA0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAABIIAAATCAAA/PgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYWaJVGuQAABhZo3wiGAAIACAAAEwgAAP0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAATIAMZBoATpwpBeqAB+wfC4gsMhBIbCJBSKwiQUjkIkFJJCJBSWwAAAXsMQCGLDEAgyQxAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALgYPABIAAQB8AQ8ACAADAAMAAwAAAAQACAAAAJgAAACeAAAAngAAAJ4AAACeAAAAngAAAJ4AAACeAAAAngAAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAAHYCAAB2AgAAdgIAAHYCAAB2AgAAdgIAAHYCAAB2AgAAdgIAADYGAAA2BgAANgYAADYGAAA2BgAANgYAAD4CAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAACoAAAANgYAADYGAAAWAAAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAC4AAAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAAaAEAAEgBAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAAHACAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAAMgYAABgAAADGAwAA1gMAAOYDAAD2AwAABgQAABYEAAAmBAAANgQAAEYEAABWBAAAZgQAAHYEAACGBAAAlgQAAMYDAADWAwAA5gMAAPYDAAAGBAAAFgQAADIGAAAoAgAA2AEAAOgBAAAmBAAANgQAAEYEAABWBAAAZgQAAHYEAACGBAAAlgQAAMYDAADWAwAA5gMAAPYDAAAGBAAAFgQAACYEAAA2BAAARgQAAFYEAABmBAAAdgQAAIYEAACWBAAAxgMAANYDAADmAwAA9gMAAAYEAAAWBAAAJgQAADYEAABGBAAAVgQAAGYEAAB2BAAAhgQAAJYEAADGAwAA1gMAAOYDAAD2AwAABgQAABYEAAAmBAAANgQAAEYEAABWBAAAZgQAAHYEAACGBAAAlgQAAMYDAADWAwAA5gMAAPYDAAAGBAAAFgQAACYEAAA2BAAARgQAAFYEAABmBAAAdgQAAIYEAACWBAAAxgMAANYDAADmAwAA9gMAAAYEAAAWBAAAJgQAADYEAABGBAAAVgQAAGYEAAB2BAAAhgQAAJYEAAA4AQAAWAEAAPgBAAAIAgAAGAIAAFYCAAB+AgAAkAIAAKACAACwAgAAwAIAANACAACAAgAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAADgAgAA8AIAAAADAAAQAwAAIAMAADADAABAAwAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAADgAgAA8AIAAAADAAAQAwAAIAMAADADAABAAwAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAADgAgAA8AIAAAADAAAQAwAAIAMAADADAABAAwAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAADgAgAA8AIAAAADAAAQAwAAIAMAADADAABAAwAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAAAgAAAAT0oDAFBKAwBRSgMAX0gBBG1IDARuSAwEc0gMBHRIDAQAAAAAQAAAYPH/AgBAAAwQAAAAAAAAAAAGAE4AbwByAG0AYQBsAAAAAgAAABgAQ0oYAF9IAQRhShgAbUgMBHNIDAR0SAkEAAAAAAAAAAAAAAAAAAAAAAAAOgBBIPL/oQA6AAwNAAAAAAAAEAARAFAAbwBsAGkAYwBlACAAcABhAHIAIABkAOkAZgBhAHUAdAAAAAAAVgBpAPP/swBWAAwNAAAAAAAAMAYOAFQAYQBiAGwAZQBhAHUAIABOAG8AcgBtAGEAbAAAABwAF/YDAAA01gYAAQoDbAA01gYAAQUDAABh9gMAAAIACwAAADIAayD0/8EAMgAADQAAAAAAADAGDABBAHUAYwB1AG4AZQAgAGwAaQBzAHQAZQAAAAIADAAAAAAAUEsDBBQABgAIAAAAIQCb6HBP/AAAABwCAAATAAAAW0NvbnRlbnRfVHlwZXNdLnhtbKyRy2rDMBBF94X+g9C22HK6KKXYzqKPXR+L9AMGeWyL2CMhTULy9x07LpQSAoVuBNLMvffMqFwfxkHtMSbnqdKrvNAKyfrGUVfpz81Ldq9VYqAGBk9Y6SMmva6vr8rNMWBSoqZU6Z45PBiTbI8jpNwHJKm0Po7Aco2dCWC30KG5LYo7Yz0xEmc8eei6fMIWdgOr54M8n0hErtXjqW+KqjSEMDgLLKBmqpqzuohDuiDcU/OLLlvIclHO5ql3Id0sCe+ymugaVB8Q+Q1G4TAsQ+LP8xVIRov5ZeYz0b5tncXG290o68hn48XsTwCr/4n+zjTz39ZfAAAA//8DAFBLAwQUAAYACAAAACEApdan58AAAAA2AQAACwAAAF9yZWxzLy5yZWxzhI/PasMwDIfvhb2D0X1R0sMYJXYvpZBDL6N9AOEof2giG9sb69tPxwYKuwiEpO/3qT3+rov54ZTnIBaaqgbD4kM/y2jhdj2/f4LJhaSnJQhbeHCGo3vbtV+8UNGjPM0xG6VItjCVEg+I2U+8Uq5CZNHJENJKRds0YiR/p5FxX9cfmJ4Z4DZM0/UWUtc3YK6PqMn/s8MwzJ5PwX+vLOVFBG43lExp5GKhqC/jU72QqGWq1B7Qtbj51v0BAAD//wMAUEsDBBQABgAIAAAAIQBreZYWgwAAAIoAAAAcAAAAdGhlbWUvdGhlbWUvdGhlbWVNYW5hZ2VyLnhtbAzMTQrDIBBA4X2hd5DZN2O7KEVissuuu/YAQ5waQceg0p/b1+XjgzfO3xTVm0sNWSycBw2KZc0uiLfwfCynG6jaSBzFLGzhxxXm6XgYybSNE99JyHNRfSPVkIWttd0g1rUr1SHvLN1euSRqPYtHV+jT9yniResrJgoCOP0BAAD//wMAUEsDBBQABgAIAAAAIQBtTVmryAYAAI4aAAAWAAAAdGhlbWUvdGhlbWUvdGhlbWUxLnhtbOxZ3YrbRhS+L/QdhO4d/0n+WeINtmxv2uwmIXbS5nJWHkuTHWmMZrwbEwJ9gkIhLb0p9K6F3gTaN+i7pLTpQ/TMSJZn7HH2hy2E0jUs8vg7Z7455+g7I83dey8T6pzjjBOW9tz6nZrr4DRkM5JGPffpdFzpuA4XKJ0hylLcc1eYu/cOP/3kLjoQMU6wA/YpP0A9NxZicVCt8hCGEb/DFjiF3+YsS5CAr1lUnWXoAvwmtNqo1VrVBJHUdVKUgNtp/PvP4OzRfE5C7B6uvY8oTJEKLgdCmk2kb1yYDJYZRkuFnZ3VJYKveEAz5xzRngsTzdjFFL8UrkMRF/BDz62pP7d6eLeKDgojKvbYanZj9VfYFQazs4aaM4tOy0k9z/da/dK/AlCxixu1R61Rq/SnACgMYaU5F92nP+gOhn6B1UD5pcX3sD1s1g285r+5w7nvy4+BV6Dcv7eDH48DiKKBV6Ac7+/gPa/dCDwDr0A5vrWDb9f6Q69t4BUopiQ920HX/FYzWK+2hMwZvW+Fd31v3G4UzjcoqIayuuQUc5aKfbWWoBcsGwNAAikSJHXEaoHnKIQyDhAlpxlxjkkUQ+EtUMo4DNcatXGtCf/lx1NXKiLoACPNWvICJnxnSPJxeJiRhei5n4NXV4M8XzpHTMQkLGZVTgyL+yiNdIv3P33z9w9fOX/9+uP7N9/mk27juY4f4jT6kqD0QxPAajdhePfd2z9+e/vu+6///OWNxX8/Q6c6fEoSzJ2H+MJ5whJYnGUF+DS7nsU0RkS36KcRRymSs1j8jyB+OvrhClFkwQ0gEjruWQYyYwMeLV8YhCdxthTE4vFBnBjAE8bogGXWKDyQc2lhni7TyD55ttRxTxA6t80doNTI82i5AH0lNpdBjA2ajylKBYpwioUjf2NnGFtW95wQI64nJMwYZ3PhPCfOABFrSKbk1KimjdF9kkBeVjaCkG8jNifPnAGjtlUP8bmJhLsDUQv5KaZGGI/QUqDE5nKKEqoH/BiJ2EZysspCHTfiAjIdYcqc0QxzbrN5lMF6taQ/AImxp/2ErhITmQlyZvN5jBjTkUN2FsQoWdiwE5LGOvYzfgYlipzHTNjgJ8y8Q+R3yAOIx750PyPYSPflavAU1FWntCkQ+csys+TyCDOjficrOkdYSQ2Iv6HpCUkvFfgtaff/PWk/IWkYM8uKbkvU7a6NjFxTzvsZsd5P97dEfB9uW7oDls3Ix6/cQ7RMH2O4WXbb1//C/b9wu/954d53P9++XG8UGsRbbl3zzbrauid7d+5zQulErCg+5mrzzqEvzcYwKO3UYysun+QWMVzKOxkmMHBRhpSNkzHxBRHxJEYL2OHXXekk4oXriDsLxmHjr4atviWeLpMTNssfWOt1+XCaiwdHYjNe88txeNgQObrV3jyEle4V20g9LK8JSNvrkNAmM0k0LSTa60EZJPVoDkGzkFAruxUWXQuLjnS/TtUOC6BWZgU2Tg5st3qu74EJGMEzFaJ4JvOUp3qdXZXM28z0vmAaFQC7iHUFbDLdlVz3Lk+uLi+1K2TaIKGVm0lCRUb1MB6jGS6qU45ehcZ1c93dpNSgJ0Oh5oPS2tBodz7E4qa5BrttbaCprhQ0dS56bqvpQ8mEaNFz5/DgD5fJAmqHyw0vohG8PgtFlt/wN1GWRcbFEPE4D7gSnVwNEiJw5lCS9Fy5/DINNFUaorjVGyAIHy25LsjKx0YOkm4mGc/nOBR62rURGen8Kyh8rhXWX5X5zcHSki0h3ZN4duGc0mX2BEGJ+e26DOCMcHj/U8+jOSPwQrMUsk39bTWmQnb1N4qqhvJxRBcxKjqKLuY5XEl5SUd9K2OgfSvWDAHVQlI0wtNINlg9qEY3LbtGzmFv173cSEZOE81NzzRURXZNu4oZM6zbwFYsb9bkNVbrEIOm6R0+l+5tye2utW5rn1B2CQh4GT9L171CQ9CobSYzqEnGuzIsNbsYNXvHeoGXULtKk9BUv7V2uxW3skdYp4PBG3V+sNuuWhiar/eVKtLq6EM/nGCnL0A8hvAaeEkFV6mEo4cMwYZoovYkuWzALfJSFLcGXDnLjPTcVzW/7wUNP6jUOv6o4jW9WqXj95uVvu836yO/XhsOGq+hsYg4qfv5scsYXkTRVXH4osZ3DmCS9bu2OyFLqkydrFQVcXUAU28YBzD5yYszlQcsrkNAdF61GuNusztoVbrN/rjiDQedSjdoDSrDVtAejoeB3+mOX7vOuQJ7/WbgtUadSqseBBWvVZP0O91K22s0+l673xl5/dfFNgZWnstHEQsIr+J1+A8AAAD//wMAUEsDBBQABgAIAAAAIQAN0ZCftgAAABsBAAAnAAAAdGhlbWUvdGhlbWUvX3JlbHMvdGhlbWVNYW5hZ2VyLnhtbC5yZWxzhI9NCsIwFIT3gncIb2/TuhCRJt2I0K3UA4TkNQ02PyRR7O0NriwILodhvplpu5edyRNjMt4xaKoaCDrplXGawW247I5AUhZOidk7ZLBggo5vN+0VZ5FLKE0mJFIoLjGYcg4nSpOc0IpU+YCuOKOPVuQio6ZByLvQSPd1faDxmwF8xSS9YhB71QAZllCa/7P9OBqJZy8fFl3+UUFz2YUFKKLGzOAjm6pMBMpburrE3wAAAP//AwBQSwECLQAUAAYACAAAACEAm+hwT/wAAAAcAgAAEwAAAAAAAAAAAAAAAAAAAAAAW0NvbnRlbnRfVHlwZXNdLnhtbFBLAQItABQABgAIAAAAIQCl1qfnwAAAADYBAAALAAAAAAAAAAAAAAAAAC0BAABfcmVscy8ucmVsc1BLAQItABQABgAIAAAAIQBreZYWgwAAAIoAAAAcAAAAAAAAAAAAAAAAABYCAAB0aGVtZS90aGVtZS90aGVtZU1hbmFnZXIueG1sUEsBAi0AFAAGAAgAAAAhAG1NWavIBgAAjhoAABYAAAAAAAAAAAAAAAAA0wIAAHRoZW1lL3RoZW1lL3RoZW1lMS54bWxQSwECLQAUAAYACAAAACEADdGQn7YAAAAbAQAAJwAAAAAAAAAAAAAAAADPCQAAdGhlbWUvdGhlbWUvX3JlbHMvdGhlbWVNYW5hZ2VyLnhtbC5yZWxzUEsFBgAAAAAFAAUAXQEAAMoKAAAAADw/eG1sIHZlcnNpb249IjEuMCIgZW5jb2Rpbmc9IlVURi04IiBzdGFuZGFsb25lPSJ5ZXMiPz4NCjxhOmNsck1hcCB4bWxuczphPSJodHRwOi8vc2NoZW1hcy5vcGVueG1sZm9ybWF0cy5vcmcvZHJhd2luZ21sLzIwMDYvbWFpbiIgYmcxPSJsdDEiIHR4MT0iZGsxIiBiZzI9Imx0MiIgdHgyPSJkazIiIGFjY2VudDE9ImFjY2VudDEiIGFjY2VudDI9ImFjY2VudDIiIGFjY2VudDM9ImFjY2VudDMiIGFjY2VudDQ9ImFjY2VudDQiIGFjY2VudDU9ImFjY2VudDUiIGFjY2VudDY9ImFjY2VudDYiIGhsaW5rPSJobGluayIgZm9sSGxpbms9ImZvbEhsaW5rIi8+AAAAABMAAAAUAAAOAAAIAP////8ACAAAEwgAAAUAAAAACAAAEwgAAAYAAAAAAAAABQAAABIAAAAVAAAABwAEAAcAAAAAABIAAAAVAAAABAAHAAQAAAAEAAAACAAAAOUAAAAAAAAAAwAAAN8IhgCkF6oAlUa5AH419AAAAAAAEwAAABUAAAAAAAAAAQAAAP9AAIABABIAAAASAAAAAEBDewEAAQASAAAAAAAAABIAAAAAAAAAAAAAAAAAAAACEAAAAAAAAAATAAAAoAAAEABAAAD//wEAAAAHAFUAbgBrAG4AbwB3AG4A//8BAAgAAAAAAAAAAAAAAP//AQAAAAAA//8AAAIA//8AAAAA//8AAAIA//8AAAAABQAAAEcOkAEAAAICBgMFBAUCAwTvKgDgQXgAwAkAAAAAAAAA/wEAAAAAAABUAGkAbQBlAHMAIABOAGUAdwAgAFIAbwBtAGEAbgAAADUOkAECAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAgAAAAABTAHkAbQBiAG8AbAAAADMOkAEAAAILBgQCAgICAgT/KgDgQ3gAwAkAAAAAAAAA/wEAAAAAAABBAHIAaQBhAGwAAAA3DpABAAACDwUCAgIEAwIE/wIA4P+sAEABAAAAAAAAAJ8BAAAAAAAAQwBhAGwAaQBiAHIAaQAAAEESkAEBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABDAGEAbQBiAHIAaQBhACAATQBhAHQAaAAAACAABADxCIgIAPDEAgAAqQEAAAAAWVJDh1lSQ4cAAAAAAgABAAAAAgAAABEAAAABAAEAAAAEAAOQAQAAAAIAAAARAAAAAQABAAAAAQAAAAAAAAAhAwDwEAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAClBsAHtAC0AIGBcjAAAAAAAAAAAAAAAAAAABIAAAASAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAAAAABAAAAA8BAACAD8/QEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACSFAAAAAACfH/DwAAJFAAABAnAAD///9/////f////3////9/////f////3////9/3wiGAAAEAAAyAAAAAAAAAAAAAAAAAAAAAAAAAAAAIQQAAAAAAAAAAAAAAAAAAAAAAAAQHAAABAAAAAAAAAAAAHgAAAB4AAAAAAAAAAAAAACgBQAAGkjOCAsAAAAAAAAA3AAAAAEAAAD//xIAAAAAAAAAAAAAAAAAAAAMAEQAYQB2AGkAZAAgAFAAaQBsAGEAdABvAAwARABhAHYAaQBkACAAUABpAGwAYQB0AG8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP7/AAADCgEAAAAAAAAAAAAAAAAAAAAAAAEAAADghZ/y+U9oEKuRCAArJ7PZMAAAANzSAgASAAAAAQAAAJgAAAACAAAAoAAAAAMAAACsAAAABAAAALgAAAAFAAAA0AAAAAYAAADcAAAABwAAAOgAAAAIAAAA/AAAAAkAAAAUAQAAEgAAACABAAAKAAAARAEAAAwAAABQAQAADQAAAFwBAAAOAAAAaAEAAA8AAABwAQAAEAAAAHgBAAATAAAAgAEAABEAAACIAQAAAgAAABAnAAAeAAAABAAAAAAAAAAeAAAABAAAAAAAAAAeAAAAEAAAAERhdmlkIFBpbGF0bwAAAAAeAAAABAAAAAAAAAAeAAAABAAAAAAAAAAeAAAADAAAAE5vcm1hbC5kb3RtAB4AAAAQAAAARGF2aWQgUGlsYXRvAAAAAB4AAAAEAAAAMgAAAB4AAAAcAAAATWljcm9zb2Z0IE1hY2ludG9zaCBXb3JkAAAAAEAAAAAARsMjAAAAAEAAAAAAFjZWpnrRAUAAAAAAFjZWpnrRAQMAAAABAAAAAwAAAAIAAAADAAAAEQAAAAMAAAAAAAAARwAAAEzRAgD/////DgAAAAEAAABsAAAAAAAAAAAAAAD/AAAAswAAAAAAAAAAAAAAZhkAANsRAAAgRU1GAAABAETRAgAIAAAAAQAAAAAAAAAAAAAAAAAAAOwEAACxAwAAQAEAAPAAAAAAAAAAAAAAAAAAAAAA4gQAgKkDABEAAAAMAAAACAAAAAoAAAAQAAAAAAAAAAAAAAAJAAAAEAAAAAABAAC0AAAADAAAABAAAAAAAAAAAAAAAAsAAAAQAAAAAAEAALQAAABRAAAAeNACAAAAAAAAAAAA/wAAALMAAAAAAAAAAAAAAAAAAAAAAAAAAAEAALQAAABQAAAAKAAAAHgAAAAA0AIAAAAAACAAzAAAAQAAtAAAACgAAAAAAQAAtAAAAAEAIAAAAAAAANACAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////vr6+/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/76+vv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////7vf//+rz7v/Yzc3/0NLY/+DX2f/N4PL/3tXI/8jV4v/Q0cX/1tDI/9ve2f/U0tX/0NLQ/83I0P/I2N7/4tnI/9LZ4v/v6tz/5eXl////9////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////83g9//e3M3/vrG3/8TCxv/Xwrz/vdfu/8W/rv/K1tX/x8bB/8LJxv/Oxb7/yMTE/8vCwv+3scH/zd7Z/9DNyP/BwcT/z97X/82xq/////v////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////u9/v/+/Lu////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////++vr7/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/vr6+//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////8OAAAAFAAAAAAAAAAQAAAAFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD+/wAAAwoBAAAAAAAAAAAAAAAAAAAAAAABAAAAAtXN1ZwuGxCTlwgAKyz5rjAAAADUAAAACwAAAAEAAABgAAAABQAAAGgAAAAGAAAAcAAAABEAAAB4AAAAFwAAAIAAAAALAAAAiAAAABAAAACQAAAAEwAAAJgAAAAWAAAAoAAAAA0AAACoAAAADAAAALUAAAACAAAAECcAAAMAAAABAAAAAwAAAAEAAAADAAAAEgAAAAMAAAAAAA8ACwAAAAAAAAALAAAAAAAAAAsAAAAAAAAACwAAAAAAAAAeEAAAAQAAAAEAAAAADBAAAAIAAAAeAAAABgAAAFRpdHJlAAMAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAIAAAADAAAABAAAAAUAAAAGAAAABwAAAP7///8JAAAACgAAAAsAAAAMAAAADQAAAA4AAAAPAAAAEAAAABEAAAASAAAAEwAAABQAAAAVAAAA/v///xcAAAAYAAAAGQAAABoAAAAbAAAAHAAAAB0AAAAeAAAAHwAAACAAAAAhAAAAIgAAACMAAAAkAAAAJQAAACYAAAAnAAAAKAAAACkAAAAqAAAAKwAAACwAAAAtAAAALgAAAC8AAAAwAAAAMQAAADIAAAAzAAAANAAAADUAAAA2AAAANwAAADgAAAA5AAAAOgAAADsAAAA8AAAAPQAAAD4AAAA/AAAAQAAAAEEAAABCAAAAQwAAAEQAAABFAAAARgAAAEcAAABIAAAASQAAAEoAAABLAAAATAAAAE0AAABOAAAATwAAAFAAAABRAAAAUgAAAFMAAABUAAAAVQAAAFYAAABXAAAAWAAAAFkAAABaAAAAWwAAAFwAAABdAAAAXgAAAF8AAABgAAAAYQAAAGIAAABjAAAAZAAAAGUAAABmAAAAZwAAAGgAAABpAAAAagAAAGsAAABsAAAAbQAAAG4AAABvAAAAcAAAAHEAAAByAAAAcwAAAHQAAAB1AAAAdgAAAHcAAAB4AAAAeQAAAHoAAAB7AAAAfAAAAH0AAAB+AAAAfwAAAIAAAACBAAAAggAAAIMAAACEAAAAhQAAAIYAAACHAAAAiAAAAIkAAACKAAAAiwAAAIwAAACNAAAAjgAAAI8AAACQAAAAkQAAAJIAAACTAAAAlAAAAJUAAACWAAAAlwAAAJgAAACZAAAAmgAAAJsAAACcAAAAnQAAAJ4AAACfAAAAoAAAAKEAAACiAAAAowAAAKQAAAClAAAApgAAAKcAAACoAAAAqQAAAKoAAACrAAAArAAAAK0AAACuAAAArwAAALAAAACxAAAAsgAAALMAAAC0AAAAtQAAALYAAAC3AAAAuAAAALkAAAC6AAAAuwAAALwAAAC9AAAAvgAAAL8AAADAAAAAwQAAAMIAAADDAAAAxAAAAMUAAADGAAAAxwAAAMgAAADJAAAAygAAAMsAAADMAAAAzQAAAM4AAADPAAAA0AAAANEAAADSAAAA0wAAANQAAADVAAAA1gAAANcAAADYAAAA2QAAANoAAADbAAAA3AAAAN0AAADeAAAA3wAAAOAAAADhAAAA4gAAAOMAAADkAAAA5QAAAOYAAADnAAAA6AAAAOkAAADqAAAA6wAAAOwAAADtAAAA7gAAAO8AAADwAAAA8QAAAPIAAADzAAAA9AAAAPUAAAD2AAAA9wAAAPgAAAD5AAAA+gAAAPsAAAD8AAAA/QAAAP4AAAD/AAAAAAEAAAEBAAACAQAAAwEAAAQBAAAFAQAABgEAAAcBAAAIAQAACQEAAAoBAAALAQAADAEAAA0BAAAOAQAADwEAABABAAARAQAAEgEAABMBAAAUAQAAFQEAABYBAAAXAQAAGAEAABkBAAAaAQAAGwEAABwBAAAdAQAAHgEAAB8BAAAgAQAAIQEAACIBAAAjAQAAJAEAACUBAAAmAQAAJwEAACgBAAApAQAAKgEAACsBAAAsAQAALQEAAC4BAAAvAQAAMAEAADEBAAAyAQAAMwEAADQBAAA1AQAANgEAADcBAAA4AQAAOQEAADoBAAA7AQAAPAEAAD0BAAA+AQAAPwEAAEABAABBAQAAQgEAAEMBAABEAQAARQEAAEYBAABHAQAASAEAAEkBAABKAQAASwEAAEwBAABNAQAATgEAAE8BAABQAQAAUQEAAFIBAABTAQAAVAEAAFUBAABWAQAAVwEAAFgBAABZAQAAWgEAAFsBAABcAQAAXQEAAF4BAABfAQAAYAEAAGEBAABiAQAAYwEAAGQBAABlAQAAZgEAAGcBAABoAQAAaQEAAGoBAABrAQAAbAEAAG0BAABuAQAAbwEAAHABAABxAQAAcgEAAHMBAAB0AQAAdQEAAHYBAAB3AQAAeAEAAHkBAAB6AQAAewEAAHwBAAB9AQAAfgEAAH8BAAD+////gQEAAIIBAACDAQAAhAEAAIUBAACGAQAAhwEAAP7////9/////f////3////9////jQEAAP7////+/////v////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////9SAG8AbwB0ACAARQBuAHQAcgB5AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFgAFAf//////////AwAAAAYJAgAAAAAAwAAAAAAAAEYAAAAAAAAAAAAAAAAgFZlgpnrRAY8BAACAAAAAAAAAADEAVABhAGIAbABlAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOAAIB/////wUAAAD/////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAB4aAAAAAAAAVwBvAHIAZABEAG8AYwB1AG0AZQBuAHQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABoAAgEBAAAA//////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAFAFMAdQBtAG0AYQByAHkASQBuAGYAbwByAG0AYQB0AGkAbwBuAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKAACAQIAAAAEAAAA/////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABYAAAAM0wIAAAAAAAUARABvAGMAdQBtAGUAbgB0AFMAdQBtAG0AYQByAHkASQBuAGYAbwByAG0AYQB0AGkAbwBuAAAAAAAAAAAAAAA4AAIB////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAEAAAAQAAAAAAAAAQBDAG8AbQBwAE8AYgBqAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABIAAgD///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAcgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAP7///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////8BAP7/AwoAAP////8GCQIAAAAAAMAAAAAAAABGIAAAAERvY3VtZW50IE1pY3Jvc29mdCBXb3JkIDk3LTIwMDQACgAAAE1TV29yZERvYwAQAAAAV29yZC5Eb2N1bWVudC44APQ5snEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==" } + + - do: + get: + index: test + type: test + id: 1 + - length: { _source.attachment: 6 } + - match: { _source.attachment.content: "Test elasticsearch" } + - match: { _source.attachment.language: "et" } + - match: { _source.attachment.author: "David Pilato" } + - match: { _source.attachment.date: "2016-03-10T08:25:00Z" } + - match: { _source.attachment.content_length: "19" } + - match: { _source.attachment.content_type: "application/msword" } + + +--- +"Test ingest attachment processor with .docx file": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "attachment" : { + "source_field" : "field1" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: { field1: "UEsDBBQABgAIAAAAIQBtiidLZgEAAFQFAAATAAgCW0NvbnRlbnRfVHlwZXNdLnhtbCCiBAIooAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAC0lMtugzAQRfeV+g/I2wqcdFFVVUgWfSzbSE0/wLEH4tYv2c7r7ztAgqooAalJNkgwc+89A3hGk41WyQp8kNbkZJgNSAKGWyFNmZOv2Vv6SJIQmRFMWQM52UIgk/HtzWi2dRASVJuQk0WM7onSwBegWcisA4OVwnrNIt76kjrGf1gJ9H4weKDcmggmprHyIOPRCxRsqWLyusHHDQnKSfLc9FVROWHOKclZxDKtqvSozoMKHcKVEQd06Y4sQ2XdExbShbvTCd8OyoMEqavR6gJqPvB1eikgmTIf35nGBrq2XlBh+VKjKOse7gijLQrJodVXbs5bDiHgd9IqayuaSbNnP8kR4lZBuDxF49sfDzGi4BoAO+dehDXMP69G8ce8F6TA3BmbK7g8RmvdCxHx1EJzHZ7NUdt0RWLn1FsXcAv4f4y9P66VOsWBHfgou/+6NhGtz54Pqk0gQBzJpvVOHP8CAAD//wMAUEsDBBQABgAIAAAAIQDHwie8/wAAAN8CAAALAAgCX3JlbHMvLnJlbHMgogQCKKAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAArJLNSgMxEIDvgu8Q5t7NtoqINNuLCL2JrA8wJtPd6OaHZKrt2xtF1IVlEexx/j6+SWa9ObhBvFLKNngFy6oGQV4HY32n4LG9W1yDyIze4BA8KThShk1zfrZ+oAG5DOXexiwKxWcFPXO8kTLrnhzmKkTypbILySGXMHUyon7BjuSqrq9k+s2AZsQUW6Mgbc0FiPYY6X9s6YjRIKPUIdEipjKd2JZdRIupI1Zggr4v6fzZURUyyGmhy78Lhd3OaroNeu/I85QXHZi8ITOvhDHOGS1PaTTu+JF5C8lI85Wes1md9sO437snj3aYeJfvWvUcqfsQkqOzbN4BAAD//wMAUEsDBBQABgAIAAAAIQATqj6H9gAAADEDAAAcAAgBd29yZC9fcmVscy9kb2N1bWVudC54bWwucmVscyCiBAEooAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKySy2rDMBBF94X+g5h9LTt9UELkbEoh29b9AEUeP6gsCc304b+vaEjr0GC68PJeMfeeQbPZfg5WvGOk3jsFRZaDQGd83btWwUv1eHUPgli7WlvvUMGIBNvy8mLzhFZzGqKuDyRSiiMFHXNYS0mmw0FT5gO69NL4OGhOMrYyaPOqW5SrPL+TcZoB5Umm2NUK4q6+BlGNAf+T7ZumN/jgzduAjs9UyA/cPyNzWo5SrI4tsoKJmaVEkOdBbpYEabzjSu8t/mL8WHMQt0tCcJqdAHzLg1nMMRRLMhCPFiefcdBz9atF6/9cw9E5IsiTQy+/AAAA//8DAFBLAwQUAAYACAAAACEA9WKOYGUCAAAOBwAAEQAAAHdvcmQvZG9jdW1lbnQueG1spFXfb9owEH6ftP8h8jtNwijQiFDR0qI+TKpK9zwZx0ksYp9lGyj763dOIGSbVtGSh9j367vv7mJncvsmq2DLjRWgUhJfRSTgikEmVJGSH6+PvTEJrKMqoxUonpI9t+R2+vXLZJdkwDaSKxcghLLJTrOUlM7pJAwtK7mk9koKZsBC7q4YyBDyXDAe7sBkYT+Ko3qnDTBuLea7p2pLLTnASTgPTVJ23PajaIyyUC3Gv4xAc4XGHIykDkVTYIRZb3QPMTV1YiUq4fYea9jCbFOyMSo5YPRaHj4mQQLJVlZHZ3jPtyF6WI4R5hySTcj80PKaXmh4hYRB2VLoU98+i4bG8gjybsGdYnc6Hlw29LmhO1xOgOfQz5ogWTXM30eMozMm4iHaiHMo/JnzyKT78e0+15pOc+PrjwH0/wbQxWXDWRjY6BOauAztSa1bLH+VfADrMORuafYyMsuSajyBkiVPhQJDVxUywpEF2PXAf9ZkilfcCrK9XzWqB4mmhj5lKRmNhg/X9/GI1FrH31yjbR7UJnidZi8piaK7m8Hw5rpVzXlON5XzlvEwGs8f6yzGv9z0lVsX4JG2TjDLqWHlJPR6/65dVgBrf1ktHTUOIQVmjTy2ohLZ/1zAHWVrEnZ9H1TWeoY1lPZmy5l7Nv9nukS7185m8WjW9EIXy19oxdMRxzdRnbfE/XA8qJG9w3fqIR3gIY4HdX8SI4rSncQVOAfyJFc871hLTjOO1+EoGnsxB3Adsdi4WjykY1BZ1FpNGW98ajX+lRZG+KIrofizcAxZfhseq28Kr7fNcMPTj2z6GwAA//8DAFBLAwQUAAYACAAAACEAbU1ZqyEGAACOGgAAFQAAAHdvcmQvdGhlbWUvdGhlbWUxLnhtbOxZy47bNhTdF+g/ENo7lm3Jj0E8gS3bSZuZJMg4abOkJVpihhINkpoZIwjQLyhQIC26KdBdC3QToP2D/kuKNv2IUpRlkzbdQToOEBSxAYuPcy8P7yUPJev2nauUgAvEOKZZ32ncch2AspBGOIv7zpPppNZ1ABcwiyChGeo7S8SdO8effnIbHokEpQhI+4wfwb6TCLE4qtd5KJshv0UXKJN9c8pSKGSVxfWIwUvpNyX1puu26ynEmQMymEq30+T3n6Wzh/M5DpFzXHkfE/mTCV40hISdFb7RymSYMwRzhY3OG8WFL3lAGLiApO/IgSJ6OUVXwgEEciE7+o6rPk79+HZ9bUTEHlvNbqI+K7uVQXTeVHYsnq0NPc/32oO1fwUgYhc37ozb4/banwLAMJQzLbnoWH/YG478FVYDlUWL71Fn1GoYeM1/awc/8IuvgVegsujt4CeTYBNDDVQWfUtMOs3AM/AKVBbbO/iOOxh5HQOvQAnB2fkO2vXbraCa7Royp+SeFd7zvUmnuYJvUHVtdZX2mdi31lL4nLKJBKjkQoEzIJYLNIehxAWQ4BnD4ATHiVx4C5hRLpvdpjtxW/K3+HqqpCICjxDUrMumkO80FXwADxleiL7zufTqaJBnObhLRYLD1ai7FvdgFusWb3/65u8fvgJ//frj21ff2vFcx49QFn+JYfZvAwjd4M13r//47fWb77/+85dXFviAwZkOn+IUcfAAXYLHNJWTswyAZuzdLKYJxLrFIIs5zGBhY0GPZfx09IMlJNCCGyIzkk+ZlAob8G7+3CB8lrBcYAvwfpIawFNKyZAy65zuF2PpUciz2D44y3XcYwgvbGMHW3ke5wu55rHNZZAgg+YjIlMOY5QhAYo+eo6QxewZxkZcT3HIKKdzAZ5hMITYGpIpnhmraWN0D6cyL0sbQZlvIzanT8GQEpv7EbowkXJ3QGJziYgRxrswFzC1MoYp0ZEnUCQ2kmdLFhoB50JmOkaEgnGEOLfZPGRLg+59KTH2tJ+SZWoimcDnNuQJpNTY4PQ8SGC6sHLGWaJjP+PncolC8IgKKwlq7pCiLvMgxWNfup9iZKT7+r39RMqQfYEUPTmzbQlEzf24JHOIlPP6lqanOLtW4Lek3X9/0n6KszChds09iKjboTeR8wHD1v20LeL7cNvSHVAW4Q9fuUcwzx4huVks0I/C/VG4//fCvW8/H16uNwqtbuOrm3XlJt175z7HhJyJJUEnXGk7l9OLJrJRVZTR+kFhkcjiajgDFzOoyoBR8QUWyVkCF3KYhhoh5ivXMQcLyuXpoJqtvosOkqenNCpbG43q2VQaQLFpl6dL1S7PIlG2tjubh7C1e1WL1cNyRaCwfRcS2mAmiZaFRKdqvIaEmtlBWPQsLLqF+70s1GWVFbn/ACz+1/C9kpFcb5CgqMhTaV9l9+CZ3hdMc9pNy/R6BdfDZNogoS03k4S2DBMYoe3mA+e6t0mpQa8IxS6NTvd95LoQkS1tIJlZA5dyz7V86SaEi74zl/eFspgupD9e6CYkcdZ3QrEK9H9RlgXjYgR5UsJUVzn/FAvEAMGpXOt6Gki24dZodoo5fqDkeu6HFzl10ZOM5nMUij0tm6rsK51Ye28ILio0l6TPkugSzEjOHkMZKL/TKAIYYS7W0Yww0xb3JopbcrXaisZ/ZpstCskigasTRRfzEq7KazraPBTT7VmZ9dVkZnGRpBufutcbFR2aaO45QIpT064f7++Q11htdN9gVUr3ttb1Kq3bd0rc/EDQqG0GM6gVjC3UNq0mtQPeEGjDrZfmvjPi0KfB9qotDojqvlLVdl5O0NlzufJH8nY1J4IrquhKPiME1d/KpRKo1kpdrgTIGe47L1x/4AVNP6i5XX9c81qeW+v6g1Zt4PutxthvuKNh86UMikjShl+OPZHPM2S5evmi2ndewKTVbfatkKZ1qt6s1JWxegHTaBovYMo3L2Ba9DsAy8i8aDcnvVZv2K71WoNJzRsNu7Ve0B7WRu2gM5qMAr/bm7x0wIUCe4NW4LXH3Vq7EQQ1r+0W9Lu9WsdrNgdeZ9Ade4OXq1jLmVfXKryK1/E/AAAA//8DAFBLAwQKAAAAAAAAACEAvOgH/fQnAAD0JwAAFwAAAGRvY1Byb3BzL3RodW1ibmFpbC5qcGVn/9j/4AAQSkZJRgABAQAASABIAAD/4QCARXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUAAAABAAAARgEoAAMAAAABAAIAAIdpAAQAAAABAAAATgAAAAAAAABIAAAAAQAAAEgAAAABAAOgAQADAAAAAQABAACgAgAEAAAAAQAAAWmgAwAEAAAAAQAAAgAAAAAA/+0AOFBob3Rvc2hvcCAzLjAAOEJJTQQEAAAAAAAAOEJJTQQlAAAAAAAQ1B2M2Y8AsgTpgAmY7PhCfv/AABEIAgABaQMBEQACEQEDEQH/xAAfAAABBQEBAQEBAQAAAAAAAAAAAQIDBAUGBwgJCgv/xAC1EAACAQMDAgQDBQUEBAAAAX0BAgMABBEFEiExQQYTUWEHInEUMoGRoQgjQrHBFVLR8CQzYnKCCQoWFxgZGiUmJygpKjQ1Njc4OTpDREVGR0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4eLj5OXm5+jp6vHy8/T19vf4+fr/xAAfAQADAQEBAQEBAQEBAAAAAAAAAQIDBAUGBwgJCgv/xAC1EQACAQIEBAMEBwUEBAABAncAAQIDEQQFITEGEkFRB2FxEyIygQgUQpGhscEJIzNS8BVictEKFiQ04SXxFxgZGiYnKCkqNTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqCg4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1tre4ubrCw8TFxsfIycrS09TV1tfY2dri4+Tl5ufo6ery8/T19vf4+fr/2wBDAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/2wBDAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/3QAEAC7/2gAMAwEAAhEDEQA/AP7Yfgx8GPg9N8HvhRLL8KPhrLLL8NfAskkkngTws8kkj+F9LZ3d200s7uxLMzHczEk5JNAHpX/ClPg3/wBEl+GX/hBeFf8A5W0AH/ClPg3/ANEl+GX/AIQXhX/5W0AH/ClPg3/0SX4Zf+EF4V/+VtAB/wAKU+Df/RJfhl/4QXhX/wCVtAB/wpT4N/8ARJfhl/4QXhX/AOVtAB/wpT4N/wDRJfhl/wCEF4V/+VtAB/wpT4N/9El+GX/hBeFf/lbQAf8AClPg3/0SX4Zf+EF4V/8AlbQAf8KU+Df/AESX4Zf+EF4V/wDlbQAf8KU+Df8A0SX4Zf8AhBeFf/lbQAf8KU+Df/RJfhl/4QXhX/5W0AH/AApT4N/9El+GX/hBeFf/AJW0AH/ClPg3/wBEl+GX/hBeFf8A5W0AH/ClPg3/ANEl+GX/AIQXhX/5W0AH/ClPg3/0SX4Zf+EF4V/+VtAB/wAKU+Df/RJfhl/4QXhX/wCVtAB/wpT4N/8ARJfhl/4QXhX/AOVtAB/wpT4N/wDRJfhl/wCEF4V/+VtAB/wpT4N/9El+GX/hBeFf/lbQAf8AClPg3/0SX4Zf+EF4V/8AlbQAf8KU+Df/AESX4Zf+EF4V/wDlbQAf8KU+Df8A0SX4Zf8AhBeFf/lbQAf8KU+Df/RJfhl/4QXhX/5W0AH/AApT4N/9El+GX/hBeFf/AJW0AH/ClPg3/wBEl+GX/hBeFf8A5W0AH/ClPg3/ANEl+GX/AIQXhX/5W0AH/ClPg3/0SX4Zf+EF4V/+VtAB/wAKU+Df/RJfhl/4QXhX/wCVtAB/wpT4N/8ARJfhl/4QXhX/AOVtAB/wpT4N/wDRJfhl/wCEF4V/+VtAB/wpT4N/9El+GX/hBeFf/lbQAf8AClPg3/0SX4Zf+EF4V/8AlbQAf8KU+Df/AESX4Zf+EF4V/wDlbQAf8KU+Df8A0SX4Zf8AhBeFf/lbQAf8KU+Df/RJfhl/4QXhX/5W0AH/AApT4N/9El+GX/hBeFf/AJW0AH/ClPg3/wBEl+GX/hBeFf8A5W0AH/ClPg3/ANEl+GX/AIQXhX/5W0AH/ClPg3/0SX4Zf+EF4V/+VtAB/wAKU+Df/RJfhl/4QXhX/wCVtAB/wpT4N/8ARJfhl/4QXhX/AOVtAB/wpT4N/wDRJfhl/wCEF4V/+VtAB/wpT4N/9El+GX/hBeFf/lbQAf8AClPg3/0SX4Zf+EF4V/8AlbQB/Nd/wrT4c/8ARP8AwT/4Sug//INAH//Q/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9H+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/T/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9T+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAeDfEX4/+Hvhf8QfD3gbxN4W8Vx6ZrfgHxz8TNQ+JS33w9sPh34O8G/DOXRk8fav4uvfEHj3RPE1jD4Xt/Evhu/v307wrq0M1hrUU+ny3Z03Xk0oAxf8Ahrr9n+W68P2WneN7nXLrxJ4x0D4f2sHh7wj411x9L8aeI9T13RrPwx4qGmeHrr/hD9esNV8Na1YeJNG8Uto+q+Eriz2+KLLRxNbvKAXNd/am+C2h+MbHwIPFDa34kn8V6n4Q1Ox8OWU+sSaBf6R8P/iz8Q7+7v4YALrVNPt7T4K+PPCs58JW/ia/tfH2nf8ACHXun22rW2qRaWAZN7+2L+z3bRaLNY+N5/ECa/4p0XwRYyeG/DPinWLW38XeIPh9r/xR07w7rupW+jHSvC2qQ+B/Dt5r2vweJ77SB4Ns7zRZvGjeHoNc0qa6ANXSP2r/ANn7WUia2+JWhxg6X4r1q7mlNxLpWl6R4Asbe7+IWsaj4lsobvwrD4f+H91dQeHvGvimLXZ/C3hzxhJH4O1LWovE8sWkOANvf2s/2eNN06x1bUviZpmnadqdvey6fdahpXiSyS7v9N8cH4a6l4YgW50aGR/HenePlfwnf/D0L/wnNlrUctnc+HonikKgGaP2wfgC/wARfCHwzh8arNrHje68eaVoWsLYXkXhaTxJ8O/Gfw8+Hmu+GJtcuY4Ihqs/jj4m+HPB+l3MME+g3njFbzwS+tW/jP8As/QL8A+nKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA/mXoA//1f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQB5b8Sfgr8MPi/bahZ/EfwrB4otNV+HPxJ+Emo2t1qGsWltefDz4vW2gWfxF8OTw6ZqNlG0XiO18MaJDJf7Rq2mrZE6Pf6c11etcAHk2k/sUfs26Fq/hbXtJ8DavYaz4JstA07wrqNt8R/ifHc6LY+G/Gcfj/TbS0ZfGKqLZ/FKTX2p28ivBrFpqGs6LqsV3omu65p2oAEGtfsOfsueILvx9fal8MAbj4neIfEXivxo1j4y8f6Qmp+IvF3hL4ieCPE+rW0Ok+KLKDRbjXfDvxY+IUOprocenQXWpeJbrxBJEfEFtp+p2gBQ0H9gr9lfwv8AYz4f+HWraW2nW/hSx06S2+J3xZ86w0/wXbeLLLQ9MtLiTx01xBpf2Dx54107WdMSQWHiTT/FWvWXiKDVLbVLtJQB17+wd+y1qWhWfhXU/h9rOqeEdPvfH9/p3g7VPij8W9S8H6dc/FLRPEnh/wCIT6f4WvvHU2g2K+LNM8Y+K01aO10+FJbrxFrGoIqX17PcMAV9M/YB/ZM0jxR4H8Z2PwtuF8S/Di38NWvgzUZ/iF8T7saNF4R+Is3xY0DFjc+M5tNv5LP4gTvr8s2p2l5JfYTS79rnRkTT6AO9sf2UfgVp97a30PhPWJ5dP8S3vizR4NR+IPxI1XTvD+rah8V/AHxwuYvDek6n4vvNL8OaGPin8MPBPiu28LaJZ2Hhmxk0iXSbHSbfQNX1rS9SAPoqgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKAP5l6AP/W/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9f+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/R/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9L+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/U/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9X+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/X/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9D+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/S/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9P+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/V/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9b+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/Q/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9H+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/T/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9T+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/W/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9f+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/R/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9L+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/U/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9X+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/X/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9D+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/S/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9P+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/V/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9b+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/Q/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9H+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/T/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9T+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/W/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9f+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/R/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9L+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/U/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9X+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1v7Yfgx8Z/g9D8HvhRFL8V/hrFLF8NfAsckcnjvwskkcieF9LV0dG1IMjowKsrDcrAg4INAHpX/C6/g3/wBFa+GX/he+Ff8A5ZUAH/C6/g3/ANFa+GX/AIXvhX/5ZUAH/C6/g3/0Vr4Zf+F74V/+WVAB/wALr+Df/RWvhl/4XvhX/wCWVAB/wuv4N/8ARWvhl/4XvhX/AOWVAB/wuv4N/wDRWvhl/wCF74V/+WVAB/wuv4N/9Fa+GX/he+Ff/llQAf8AC6/g3/0Vr4Zf+F74V/8AllQAf8Lr+Df/AEVr4Zf+F74V/wDllQAf8Lr+Df8A0Vr4Zf8Ahe+Ff/llQAf8Lr+Df/RWvhl/4XvhX/5ZUAH/AAuv4N/9Fa+GX/he+Ff/AJZUAH/C6/g3/wBFa+GX/he+Ff8A5ZUAH/C6/g3/ANFa+GX/AIXvhX/5ZUAH/C6/g3/0Vr4Zf+F74V/+WVAB/wALr+Df/RWvhl/4XvhX/wCWVAB/wuv4N/8ARWvhl/4XvhX/AOWVAB/wuv4N/wDRWvhl/wCF74V/+WVAB/wuv4N/9Fa+GX/he+Ff/llQAf8AC6/g3/0Vr4Zf+F74V/8AllQAf8Lr+Df/AEVr4Zf+F74V/wDllQAf8Lr+Df8A0Vr4Zf8Ahe+Ff/llQAf8Lr+Df/RWvhl/4XvhX/5ZUAH/AAuv4N/9Fa+GX/he+Ff/AJZUAH/C6/g3/wBFa+GX/he+Ff8A5ZUAH/C6/g3/ANFa+GX/AIXvhX/5ZUAH/C6/g3/0Vr4Zf+F74V/+WVAB/wALr+Df/RWvhl/4XvhX/wCWVAB/wuv4N/8ARWvhl/4XvhX/AOWVAB/wuv4N/wDRWvhl/wCF74V/+WVAB/wuv4N/9Fa+GX/he+Ff/llQAf8AC6/g3/0Vr4Zf+F74V/8AllQAf8Lr+Df/AEVr4Zf+F74V/wDllQAf8Lr+Df8A0Vr4Zf8Ahe+Ff/llQAf8Lr+Df/RWvhl/4XvhX/5ZUAH/AAuv4N/9Fa+GX/he+Ff/AJZUAH/C6/g3/wBFa+GX/he+Ff8A5ZUAH/C6/g3/ANFa+GX/AIXvhX/5ZUAH/C6/g3/0Vr4Zf+F74V/+WVAB/wALr+Df/RWvhl/4XvhX/wCWVAB/wuv4N/8ARWvhl/4XvhX/AOWVAB/wuv4N/wDRWvhl/wCF74V/+WVAB/wuv4N/9Fa+GX/he+Ff/llQAf8AC6/g3/0Vr4Zf+F74V/8AllQB/Nd/wsv4c/8ARQPBP/hVaD/8nUAf/9kAAFBLAwQUAAYACAAAACEAuN5y8JsDAACACQAAEQAAAHdvcmQvc2V0dGluZ3MueG1stFZLj9s2EL4X6H8wdK5Wj8iOV403sL1xs8E6WazcS2+URNnE8iEMKatO0f/eESWunGYRuA3ii8n55s1vxn7z9k/BJ0cKmim58KKr0JtQWaiSyf3C+3238efeRBsiS8KVpAvvRLX39ubnn960qabGoJqeoAupU1EsvIMxdRoEujhQQfSVqqlEsFIgiMEr7ANB4Kmp/UKJmhiWM87MKYjDcOYNbtTCa0CmgwtfsAKUVpXpTFJVVaygw5ezgEvi9ia3qmgElcZGDIByzEFJfWC1dt7E//WG4ME5OX6riKPgTq+NwgvKbRWUzxaXpNcZ1KAKqjU+kOAuQSbHwMlXjp5jX2HsoUTrCs2j0J7OM5/+NwfxvxxofkklPXTPciDQ82QoQxTp3V4qIDlHVmI5E8zIu0FaflZKTNq0plDg2yCnw9ALOgA7oqrMEEMR1jXl3JK84JSgwzbdAxFITyexNiWtSMPNjuSZUTUqHQnm/Tqc9/DhVB+otCT6A8fD4Uk87fHiQIAUhkJWkwKjrZU0oLjTK9VHZdY4CoAv1VtocqQPQI+Mtg+sMA3Q3pGdl/GU9bOHjiQR2IAv5mmrStoV1AC7/I06A5tU5HJ/MZDCXQGspLuu8Zk5cbrBmjL2mS5l+aHRhqFH25DvyOBbCWC7MfInpMruVNMNJV2P9A8KZh9ow1m9ZQAK7mSJlPphwVhVUcAADCm6RdYxUK3t83tKSlzR3xk3OKcRLvxSu8OjUsaphuF8Fs5vN32mHXoJslxGr5fJS8jqOpldW0oFz1FF2i3LB3CnjkIT0VusiciBkcm2W6dBp5HD04pJh+cU9wM9R7Imd6Dv94AWhPMNjp4D7AoQacl0fUsre+ZbAvvR76ABL0pxDXx49tWtFQq/gWrqHm2B1D01nEqUJIMlk+aeCSfXTZ45K4kb7QxqZPnpCLZPY3va1OAT2xG7J5YqVrcCf/M4UIlD1tGAbkld92zK99HC42x/MFFHAIO3En917SXfxwMWWyzuMXshRVcZag+HURY72ZneKyd7NcoSJ0tG2dTJpqNs5mSzToZLlALu4icktjt28kpxrlpavh/xr0RuSxcMXzw7iXxcrr/0GGcaJ63GPWwUOOxXi0VJWqriDsmKp/655+t3yTxa9vDU7m+zQx49YWsfabUimpYD5kynvelfm+4zj1f+MrqN/WQ2XfnzeP3OX23iZbReXs+m6/jvYQ7cX6ebfwAAAP//AwBQSwMEFAAGAAgAAAAhAPC8NQHcAQAA8QUAABIAAAB3b3JkL2ZvbnRUYWJsZS54bWy8k9tq4zAQhu8LfQej+8ay4vRg6pQ0bWBh6cXSfQBFkW2xOhhJiTdvvyPZcQMhbJallUHI/4x+jT40j0+/lUx23DphdImyCUYJ18xshK5L9PN9dXOPEuep3lBpNC/Rnjv0NL++euyKymjvEtivXaFYiRrv2yJNHWu4om5iWq4hWBmrqIdfW6eK2l/b9oYZ1VIv1kIKv08JxrdosLGXuJiqEoy/GLZVXPu4P7VcgqPRrhGtO7h1l7h1xm5aaxh3Du6sZO+nqNCjTZafGCnBrHGm8hO4zFBRtILtGY4rJT8MZv9mQEYDxYpvtTaWriXAh0oSMEPzgX7SFZoqCCypFGsrYqCl2jieQWxHZYkwwSs8gzl8OZ6GGaUhkTXUOh5M+kTcyxVVQu4PKt160+ut8Kw5yDtqRaipDzlRQ2Dr1rhErxgGWa1Qr2QlykFYLEeFhKPiyAZlOio4KCz69BkPcReLPmMOnJn2AE5AvAvFXfLGu+SHUVSfAULwLYCYAY4AZvr5QMji9QjIEpS7+/xw/Q8gD38H0mO8HMgCypJnMDwDhnx4GfF1fD6G43cxYJh+BYahQZLvom782TYJzfFFbbIIFZPjVxHahOC75xMc8fL/2SbDws3/AAAA//8DAFBLAwQUAAYACAAAACEA4IvKVR8BAAARAgAAFAAAAHdvcmQvd2ViU2V0dGluZ3MueG1slNFRS8MwEAfwd8HvUPK+pRs6tKwbgkz2MgbVD5Cl1zWY5EIua7dv71nnRHyZbzku9+P+3Hx5dDbrIJJBX4rJOBcZeI218ftSvL2uRg8io6R8rSx6KMUJSCwXtzfzvuhhV0FK/JMyVjwVTpeiTSkUUpJuwSkaYwDPzQajU4nLuJdOxfdDGGl0QSWzM9akk5zm+UycmXiNgk1jNDyjPjjwaZiXESyL6Kk1gb61/hqtx1iHiBqIOI+zX55Txl+Yyd0fyBkdkbBJYw5z3migeHySDy9nf4D7/wHTC+B0sd57jGpn+QS8ScaYWPANlLXYbzcv8rOocYOpUh08UcUpLKyMhaETzBEsbSGuvW6zvuiULcXjTHBT/jrk4gMAAP//AwBQSwMEFAAGAAgAAAAhABZNBGBtAQAA7wIAABEACAFkb2NQcm9wcy9jb3JlLnhtbCCiBAEooAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJySUW+CMBSF35fsP5C+Q4suxhDAZDM+zcRkLlv21rVX7YS2aavIv18BxbH5tLd7e757uJw2nZ3KIjiCsULJDMURQQFIpriQ2wy9rhfhFAXWUclpoSRkqAaLZvn9Xcp0wpSBlVEajBNgA+8kbcJ0hnbO6QRjy3ZQUht5Qnpxo0xJnW/NFmvK9nQLeETIBJfgKKeO4sYw1L0jOlty1lvqgylaA84wFFCCdBbHUYyvrANT2psDrfKDLIWrNdxEL2JPn6zowaqqomrcon7/GL8vn1/aXw2FbLJigPKUs8QJV0Ce4mvpK3v4/ALmuuO+8TUzQJ0y+ZweBQ9WovBdC12EJvI91JUy3PrxQecxDpYZoZ2/yM58cODpglq39De7EcAf61/f+as3IwaOonkZedwSfZueY+52Ax74eJIuzIvyNn6arxcoH5F4EpJxGJM1mSajh4SQj2a9wfzVsDwv8G/Hi0GX0PCJ5t8AAAD//wMAUEsDBBQABgAIAAAAIQCBlv05MgsAAGRyAAAPAAAAd29yZC9zdHlsZXMueG1svJ3bctu6FYbvO9N34OiqvXB8jJ14trPHduLaUzvbO3Kaa4iEJNQgofLgQ5++IEhJkBdBcQGrvrIlan0A8eMHsEBS+u33l1RGTzwvhMrORvsf9kYRz2KViGx2Nvr5cLXzaRQVJcsSJlXGz0avvBj9/uWvf/nt+bQoXyUvIg3IitM0PhvNy3JxurtbxHOesuKDWvBMH5yqPGWlfpnPdlOWP1aLnVilC1aKiZCifN092Ns7HrWYfAhFTaci5l9VXKU8K038bs6lJqqsmItFsaQ9D6E9qzxZ5CrmRaFPOpUNL2UiW2H2jwAoFXGuCjUtP+iTaWtkUDp8f8/8l8o14CMOcLACpPHpzSxTOZtI3fq6JpGGjb7o5k9U/JVPWSXLon6Z3+fty/aV+XOlsrKInk9ZEQvxoEvWkFRo3vV5VoiRPsJZUZ4XgnUenNf/dB6Ji9J6+0IkYrRbl1j8Vx98YvJsdHC0fOeyrsHGe5Jls+V703zn6oddk7MRz3Z+juu3Jpp7NmL5zvi8DtxtT6z5a53uYvWq+dSbttFdQ3eUcdNf9VE+vVXxI0/GpT5wNtqri9Jv/ry5z4XKdZ88G33+3L455qm4FknCM+uD2Vwk/NecZz8Lnqzf//PK9Kv2jVhVmf7/8NOe0UsWybeXmC/qXqqPZqxuve91gKw/XYl14Sb8P0vYfttmXfFzzmqrRvtvEab6KMRBHVFYZ9vNrN6cu/kUqqDD9yro6L0K+vheBR2/V0En71XQp/cqyGD+nwWJLOEvjRFhMYC6jeNwI5rjMBua4/ASmuOwCprjcAKa4+joaI6jH6M5jm6K4JQqdvVCq7MfOnp7P3f7HOHH3T4l+HG3zwB+3O0Dvh93+/jux90+nPtxt4/eftztgzWe2yy1ohtts6wMdtlUqTJTJY9K/hJOY5lmmfyFhldPejwnOUkCTDOytRNxMC1m5vX2HmJM6j+fl3XKFalpNBWzKtdpb2jFefbEpU5AI5YkmkcIzHlZ5Y4W8enTOZ/ynGcxp+zYdFApMh5lVToh6JsLNiNj8Swhbr4lkWRQWHVoVpXz2iSCoFOnLM5VeNUUIxsfbkUR3lY1JLqopORErO80XcywwnMDgwlPDQwmPDMwmPDEwNKMqolaGlFLtTSiBmtpRO3W9E+qdmtpRO3W0ojaraWFt9uDKKUZ4u1Vx/7wvbtLqeod5+B6jMUsY3oBED7dtHum0T3L2Sxni3lU7x93Y+1zxpZzoZLX6IFiTluRqNb1potc6rMWWRXeoBs0KnOteET2WvGIDLbihVvsTi+T6wXaNU0+M64mZadpDWmQacdMVs2CNtxtrAzvYWsDXIm8ILNBN5agB3+vl7O1nBQj37qW4RVbs8Jt9XZUIq1eiySopVTxI80wfP264LlOyx6DSVdKSvXMEzriuMxV09dsyx8YSQZZ/lu6mLNCmFxpAzF8ql9eq47u2CL4hO4lExmNbt92UiZkRLeCuH64u40e1KJOM+uGoQFeqLJUKRmz3Qn82y8++TtNBc91Epy9Ep3tOdH2kIFdCoJJpiGphIikl5kiEyRzqOH9k79OFMsTGtp9zpvbQ0pORByzdNEsOgi8pcfFZz3+EKyGDO9fLBf1vhCVqR5IYNa2YVFN/s3j8KHuu4pIdob+qEqz/2iWuiaaDhe+TNjAhS8RjJp6eqj7L8HJbuDCT3YDR3Wyl5IVhXBeQvXmUZ3ukkd9vuHJX8tTUuXTStI14BJI1oJLIFkTKlmlWUF5xoZHeMKGR32+hF3G8Ai25AzvH7lIyMQwMColDIxKBgOj0sDASAUIv0PHgoXfpmPBwu/VaWBESwALRtXPSKd/oqs8FoyqnxkYVT8zMKp+ZmBU/ezwa8SnU70IpptiLCRVn7OQdBNNVvJ0oXKWvxIhv0k+YwQbpA3tPlfT+rkBlTU3cRMg6z1qSbjYbnBUIv/iE7Kq1SzKehHsiDIplSLaW1tPOCZy8961bWHmmYvgKpjN9lv+xClW4xaM6DJAAwuXzYKFT1MWLHyasmDh05QFC5+mLFj4NGXBwu9fvpcs5nMlE547jNhXkWi8YHF7bQlcox60V38rZvMyGs9Xl6hszPHe1sjlLtNG2PYCuwaK44OesDueiCpdVhQ+AXR8ODzYGHojePmgVk/wevm7EflxYCQs83h75Dq124g8GRgJy/w0MNKMUhuRfYP4V5Y/dnaEk77+s9qYcHS+k75etAruLLavI60iu7rgSV8v2rBKdB7H9SUuqM4wz7jjh5nHHY9xkZuCsZObMthXbkSfwX7wJ1EvRzGDpilvdcvP2+IOzZQ6aOT8s1LNxaaNq6TDn0S80av9rOBRJ+dw+NXWjVHG3Y6Dhxs3YvC440YMHoDciEEjkTMcNSS5KYPHJjdi8CDlRqBHKzgj4EYrGI8brWC8z2gFKT6jVcAqwI0YvBxwI9BGhQi0UQNWCm4Eyqgg3MuokII2KkSgjQoRaKPCBRjOqDAeZ1QY72NUSPExKqSgjQoRaKNCBNqoEIE2KkSgjeq5tneGexkVUtBGhQi0USECbVSzXgwwKozHGRXG+xgVUnyMCiloo0IE2qgQgTYqRKCNChFoo0IEyqgg3MuokII2KkSgjQoRaKM2z8f6GxXG44wK432MCik+RoUUtFEhAm1UiEAbFSLQRoUItFEhAmVUEO5lVEhBGxUi0EaFCLRRzaWDAKPCeJxRYbyPUSHFx6iQgjYqRKCNChFoo0IE2qgQgTYqRKCMCsK9jAopaKNCBNqoENHXP9vr6q5nQ/bxu57Ox0yGX7pqK/XD/v4BG3U4HLWslZs1/AGaC6Ueo86nZQ9NvjEMIiZSKLNF7bgXxOaaC6Soq/V/XPY/lmbTA78prH2Ax1zoB/CjoZFgT+Wor8vbkSDJO+rr6XYkWHUe9Y2+diSYBo/6Bl3jy+WdVHo6AsF9w4wVvO8I7xutrXDYxH1jtBUIW7hvZLYCYQP3jcdW4MeoHpzfRn8c2E7Hq5uiAaGvO1qEEzehr1tCrZbDMTTGUNHchKHquQlDZXQTUHo6MXhh3Si0wm6Un9TQZlip/Y3qJmClhgQvqQHGX2qI8pYaovykhgMjVmpIwErtPzi7CV5SA4y/1BDlLTVE+UkNpzKs1JCAlRoSsFIHTshOjL/UEOUtNUT5SQ0Xd1ipIQErNSRgpYYEL6kBxl9qiPKWGqL8pAZZMlpqSMBKDQlYqSHBS2qA8Zcaorylhqg+qc0uyobUKIWtcNwizArETchWIG5wtgI9siUr2jNbsgie2RLUaqk5LluyRXMThqrnJgyV0U1A6enE4IV1o9AKu1F+UuOypS6p/Y3qJmClxmVLTqlx2VKv1LhsqVdqXLbklhqXLXVJjcuWuqT2H5zdBC+pcdlSr9S4bKlXaly25JYaly11SY3LlrqkxmVLXVIHTshOjL/UuGypV2pctuSWGpctdUmNy5a6pMZlS11S47Ilp9S4bKlXaly21Cs1LltyS43LlrqkxmVLXVLjsqUuqXHZklNqXLbUKzUuW+qV2pEt7T5v/GpYzTa/d6c/XL4ueP3F8dYDM0nzxbntRUDzwZtk9etedXBdk6j9xbP2bVPh9oJhU6IJhEXFc11W3H7ll6OoeyWFPm+WJ/pwCYp0fLOvqcL65JefbhtzfRG0+dzGBc/eGpd1Y/fU1ojBqt72aRRzVfFz2wW31VHXaCKbH8PT/9xkiQY8t7+w1tQ1eWENSh+/5FLesebTauH+qOTTsjm6v2cen31zfNJ8YaEzPjeDhBOwu1mZ5mX7w3eOFm9+wqC9eu1o9fMqrjIutRt4R5ub+ylCm3tdweV/xZf/AQAA//8DAFBLAwQUAAYACAAAACEAQP7QLGkBAAC3AgAAEAAIAWRvY1Byb3BzL2FwcC54bWwgogQBKKAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACcUk1LxTAQvAv+h9K7L32CH8i+iCjiQUV4Vc8h2bbBNAnJKr5/78ZqrXgzp92ZZHZmCZy/j656w5Rt8Jt6vWrqCr0Oxvp+Uz+21wendZVJeaNc8Lipd5jrc7m/Bw8pRExkMVcs4fOmHojimRBZDziqvGLaM9OFNCriNvUidJ3VeBX064iexGHTHAt8J/QGzUGcBetJ8eyN/itqgi7+8lO7i6wnocUxOkUo78tLtzKBRhAzCm0g5Vo7omwYnht4UD1muQYxFfAcksnyEMRUwOWgktLE+5PrExCLFi5idFYr4sXKO6tTyKGj6k5p6ynkoSoKIJa3gENsUb8mS7viY9nCrfWTk6lgZ0n1ScXhy97cwVYrh5ccX3bKZQTxAxSVl/wY23BVYn/xv8FFpmdLwzYqXQafLtMtCNgyioa9zuNmAG54/ckVeX7rezTfd/4SZV9P0z+U66NVw+dzO98YZ5w/iPwAAAD//wMAUEsBAi0AFAAGAAgAAAAhAG2KJ0tmAQAAVAUAABMAAAAAAAAAAAAAAAAAAAAAAFtDb250ZW50X1R5cGVzXS54bWxQSwECLQAUAAYACAAAACEAx8InvP8AAADfAgAACwAAAAAAAAAAAAAAAACfAwAAX3JlbHMvLnJlbHNQSwECLQAUAAYACAAAACEAE6o+h/YAAAAxAwAAHAAAAAAAAAAAAAAAAADPBgAAd29yZC9fcmVscy9kb2N1bWVudC54bWwucmVsc1BLAQItABQABgAIAAAAIQD1Yo5gZQIAAA4HAAARAAAAAAAAAAAAAAAAAAcJAAB3b3JkL2RvY3VtZW50LnhtbFBLAQItABQABgAIAAAAIQBtTVmrIQYAAI4aAAAVAAAAAAAAAAAAAAAAAJsLAAB3b3JkL3RoZW1lL3RoZW1lMS54bWxQSwECLQAKAAAAAAAAACEAvOgH/fQnAAD0JwAAFwAAAAAAAAAAAAAAAADvEQAAZG9jUHJvcHMvdGh1bWJuYWlsLmpwZWdQSwECLQAUAAYACAAAACEAuN5y8JsDAACACQAAEQAAAAAAAAAAAAAAAAAYOgAAd29yZC9zZXR0aW5ncy54bWxQSwECLQAUAAYACAAAACEA8Lw1AdwBAADxBQAAEgAAAAAAAAAAAAAAAADiPQAAd29yZC9mb250VGFibGUueG1sUEsBAi0AFAAGAAgAAAAhAOCLylUfAQAAEQIAABQAAAAAAAAAAAAAAAAA7j8AAHdvcmQvd2ViU2V0dGluZ3MueG1sUEsBAi0AFAAGAAgAAAAhABZNBGBtAQAA7wIAABEAAAAAAAAAAAAAAAAAP0EAAGRvY1Byb3BzL2NvcmUueG1sUEsBAi0AFAAGAAgAAAAhAIGW/TkyCwAAZHIAAA8AAAAAAAAAAAAAAAAA40MAAHdvcmQvc3R5bGVzLnhtbFBLAQItABQABgAIAAAAIQBA/tAsaQEAALcCAAAQAAAAAAAAAAAAAAAAAEJPAABkb2NQcm9wcy9hcHAueG1sUEsFBgAAAAAMAAwABgMAAOFRAAAAAA==" } + + - do: + get: + index: test + type: test + id: 1 + - length: { _source.attachment: 6 } + - match: { _source.attachment.content: "Test elasticsearch" } + - match: { _source.attachment.language: "et" } + - match: { _source.attachment.author: "David Pilato" } + - match: { _source.attachment.date: "2016-03-10T08:25:00Z" } + - match: { _source.attachment.content_length: "19" } + - match: { _source.attachment.content_type: "application/vnd.openxmlformats-officedocument.wordprocessingml.document" } + From cd12241e9f76c01c98961afb4f5b874926cc4269 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Sat, 27 Feb 2016 18:48:42 +0100 Subject: [PATCH 145/320] Decouple the TransportService and ClusterService #16872 Currently, the cluster service is tightly coupled to the transport service by both managing node connections and requiring the bound address in order to create the local disco node. This commit introduces a new NodeConnectionsService which is in charge of node connection management and makes it possible to remove all network related calls from the cluster service. The local DiscoNode is now created by DiscoveryNodeService and is set both the cluster service and the transport service during node start up. Closes #16788 Closes #16872 --- .../elasticsearch/cluster/ClusterModule.java | 1 + .../elasticsearch/cluster/ClusterService.java | 6 - .../cluster/NodeConnectionsService.java | 156 ++++ .../cluster/node/DiscoveryNodeService.java | 25 +- .../service/InternalClusterService.java | 175 +--- .../common/settings/ClusterSettings.java | 6 +- .../common/util/concurrent/KeyedLock.java | 52 +- .../java/org/elasticsearch/node/Node.java | 18 + .../transport/netty/NettyTransport.java | 18 +- .../node/tasks/TaskManagerTestCase.java | 4 +- .../admin/cluster/node/tasks/TasksIT.java | 13 +- .../cluster/ClusterServiceIT.java | 708 --------------- .../cluster/NodeConnectionsServiceTests.java | 275 ++++++ ...rdFailedClusterStateTaskExecutorTests.java | 4 +- .../cluster/service/ClusterServiceTests.java | 824 ++++++++++++++++++ .../elasticsearch/test/MockLogAppender.java | 2 +- .../transport/netty/KeyedLockTests.java | 33 +- .../elasticsearch/tribe/TribeUnitTests.java | 6 +- .../org/elasticsearch/test/ESTestCase.java | 32 +- .../test/InternalTestCluster.java | 8 +- .../test/cluster/NoopClusterService.java | 6 - .../test/cluster/TestClusterService.java | 14 - 22 files changed, 1423 insertions(+), 963 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java create mode 100644 core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java create mode 100644 core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 3e668191ff39..6d9273b26611 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -136,6 +136,7 @@ public class ClusterModule extends AbstractModule { bind(AllocationService.class).asEagerSingleton(); bind(DiscoveryNodeService.class).asEagerSingleton(); bind(ClusterService.class).to(InternalClusterService.class).asEagerSingleton(); + bind(NodeConnectionsService.class).asEagerSingleton(); bind(OperationRouting.class).asEagerSingleton(); bind(MetaDataCreateIndexService.class).asEagerSingleton(); bind(MetaDataDeleteIndexService.class).asEagerSingleton(); diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterService.java b/core/src/main/java/org/elasticsearch/cluster/ClusterService.java index 27df4b9e96f4..10d547afc5c5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterService.java @@ -26,7 +26,6 @@ import org.elasticsearch.cluster.service.PendingClusterTask; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.tasks.TaskManager; import java.util.List; @@ -154,9 +153,4 @@ public interface ClusterService extends LifecycleComponent { * @return A zero time value if the queue is empty, otherwise the time value oldest task waiting in the queue */ TimeValue getMaxTaskWaitTime(); - - /** - * Returns task manager created in the cluster service - */ - TaskManager getTaskManager(); } diff --git a/core/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java b/core/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java new file mode 100644 index 000000000000..cce25652ed76 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java @@ -0,0 +1,156 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster; + +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.FutureUtils; +import org.elasticsearch.common.util.concurrent.KeyedLock; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.ScheduledFuture; + +/** + * This component is responsible for connecting to nodes once they are added to the cluster state, and disconnect when they are + * removed. Also, it periodically checks that all connections are still open and if needed restores them. + * Note that this component is *not* responsible for removing nodes from the cluster if they disconnect / do not respond + * to pings. This is done by {@link org.elasticsearch.discovery.zen.fd.NodesFaultDetection}. Master fault detection + * is done by {@link org.elasticsearch.discovery.zen.fd.MasterFaultDetection}. + */ +public class NodeConnectionsService extends AbstractLifecycleComponent { + + public static final Setting CLUSTER_NODE_RECONNECT_INTERVAL_SETTING = + Setting.positiveTimeSetting("cluster.nodes.reconnect_interval", TimeValue.timeValueSeconds(10), false, Setting.Scope.CLUSTER); + private final ThreadPool threadPool; + private final TransportService transportService; + + // map between current node and the number of failed connection attempts. 0 means successfully connected. + // if a node doesn't appear in this list it shouldn't be monitored + private ConcurrentMap nodes = ConcurrentCollections.newConcurrentMap(); + + final private KeyedLock nodeLocks = new KeyedLock<>(); + + private final TimeValue reconnectInterval; + + private volatile ScheduledFuture backgroundFuture = null; + + @Inject + public NodeConnectionsService(Settings settings, ThreadPool threadPool, TransportService transportService) { + super(settings); + this.threadPool = threadPool; + this.transportService = transportService; + this.reconnectInterval = NodeConnectionsService.CLUSTER_NODE_RECONNECT_INTERVAL_SETTING.get(settings); + } + + public void connectToAddedNodes(ClusterChangedEvent event) { + + // TODO: do this in parallel (and wait) + for (final DiscoveryNode node : event.nodesDelta().addedNodes()) { + try (Releasable ignored = nodeLocks.acquire(node)) { + Integer current = nodes.put(node, 0); + assert current == null : "node " + node + " was added in event but already in internal nodes"; + validateNodeConnected(node); + } + } + } + + public void disconnectFromRemovedNodes(ClusterChangedEvent event) { + for (final DiscoveryNode node : event.nodesDelta().removedNodes()) { + try (Releasable ignored = nodeLocks.acquire(node)) { + Integer current = nodes.remove(node); + assert current != null : "node " + node + " was removed in event but not in internal nodes"; + try { + transportService.disconnectFromNode(node); + } catch (Throwable e) { + logger.warn("failed to disconnect to node [" + node + "]", e); + } + } + } + } + + void validateNodeConnected(DiscoveryNode node) { + assert nodeLocks.isHeldByCurrentThread(node) : "validateNodeConnected must be called under lock"; + if (lifecycle.stoppedOrClosed() || + nodes.containsKey(node) == false) { // we double check existence of node since connectToNode might take time... + // nothing to do + } else { + try { + // connecting to an already connected node is a noop + transportService.connectToNode(node); + nodes.put(node, 0); + } catch (Exception e) { + Integer nodeFailureCount = nodes.get(node); + assert nodeFailureCount != null : node + " didn't have a counter in nodes map"; + nodeFailureCount = nodeFailureCount + 1; + // log every 6th failure + if ((nodeFailureCount % 6) == 1) { + logger.warn("failed to connect to node {} (tried [{}] times)", e, node, nodeFailureCount); + } + nodes.put(node, nodeFailureCount); + } + } + } + + class ConnectionChecker extends AbstractRunnable { + + @Override + public void onFailure(Throwable t) { + logger.warn("unexpected error while checking for node reconnects", t); + } + + protected void doRun() { + for (DiscoveryNode node : nodes.keySet()) { + try (Releasable ignored = nodeLocks.acquire(node)) { + validateNodeConnected(node); + } + } + } + + @Override + public void onAfter() { + if (lifecycle.started()) { + backgroundFuture = threadPool.schedule(reconnectInterval, ThreadPool.Names.GENERIC, this); + } + } + } + + @Override + protected void doStart() { + backgroundFuture = threadPool.schedule(reconnectInterval, ThreadPool.Names.GENERIC, new ConnectionChecker()); + } + + @Override + protected void doStop() { + FutureUtils.cancel(backgroundFuture); + } + + @Override + protected void doClose() { + + } +} diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodeService.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodeService.java index 83f603d28903..47c0e0052d3b 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodeService.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodeService.java @@ -19,24 +19,40 @@ package org.elasticsearch.cluster.node; +import org.elasticsearch.Version; +import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Random; import java.util.concurrent.CopyOnWriteArrayList; /** */ public class DiscoveryNodeService extends AbstractComponent { + public static final Setting NODE_ID_SEED_SETTING = + // don't use node.id.seed so it won't be seen as an attribute + Setting.longSetting("node_id.seed", 0L, Long.MIN_VALUE, false, Setting.Scope.CLUSTER); private final List customAttributesProviders = new CopyOnWriteArrayList<>(); + private final Version version; @Inject - public DiscoveryNodeService(Settings settings) { + public DiscoveryNodeService(Settings settings, Version version) { super(settings); + this.version = version; + } + + public static String generateNodeId(Settings settings) { + Random random = Randomness.get(settings, NODE_ID_SEED_SETTING); + return Strings.randomBase64UUID(random); } public DiscoveryNodeService addCustomAttributeProvider(CustomAttributesProvider customAttributesProvider) { @@ -44,7 +60,7 @@ public class DiscoveryNodeService extends AbstractComponent { return this; } - public Map buildAttributes() { + public DiscoveryNode buildLocalNode(TransportAddress publishAddress) { Map attributes = new HashMap<>(settings.getByPrefix("node.").getAsMap()); attributes.remove("name"); // name is extracted in other places if (attributes.containsKey("client")) { @@ -76,10 +92,11 @@ public class DiscoveryNodeService extends AbstractComponent { } } - return attributes; + final String nodeId = generateNodeId(settings); + return new DiscoveryNode(settings.get("node.name"), nodeId, publishAddress, attributes, version); } - public static interface CustomAttributesProvider { + public interface CustomAttributesProvider { Map buildAttributes(); } diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index 3d70ac84e33a..7cd3d840fbc1 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.service; -import org.elasticsearch.Version; import org.elasticsearch.cluster.AckedClusterStateTaskListener; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; @@ -32,19 +31,18 @@ import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.LocalNodeMasterListener; +import org.elasticsearch.cluster.NodeConnectionsService; import org.elasticsearch.cluster.TimeoutClusterStateListener; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; @@ -54,7 +52,6 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.CountDown; @@ -65,9 +62,7 @@ import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor; import org.elasticsearch.common.util.concurrent.PrioritizedRunnable; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.discovery.Discovery; -import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; import java.util.ArrayList; import java.util.Collection; @@ -78,8 +73,6 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Queue; -import java.util.Random; -import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.Executor; import java.util.concurrent.Future; @@ -97,25 +90,15 @@ import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadF public class InternalClusterService extends AbstractLifecycleComponent implements ClusterService { public static final Setting CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING = Setting.positiveTimeSetting("cluster.service.slow_task_logging_threshold", TimeValue.timeValueSeconds(30), true, Setting.Scope.CLUSTER); - public static final Setting CLUSTER_SERVICE_RECONNECT_INTERVAL_SETTING = Setting.positiveTimeSetting("cluster.service.reconnect_interval", TimeValue.timeValueSeconds(10), false, Setting.Scope.CLUSTER); public static final String UPDATE_THREAD_NAME = "clusterService#updateTask"; - public static final Setting NODE_ID_SEED_SETTING = - // don't use node.id.seed so it won't be seen as an attribute - Setting.longSetting("node_id.seed", 0L, Long.MIN_VALUE, false, Setting.Scope.CLUSTER); private final ThreadPool threadPool; private BiConsumer clusterStatePublisher; private final OperationRouting operationRouting; - private final TransportService transportService; - private final ClusterSettings clusterSettings; - private final DiscoveryNodeService discoveryNodeService; - private final Version version; - - private final TimeValue reconnectInterval; private TimeValue slowTaskLoggingThreshold; @@ -140,47 +123,49 @@ public class InternalClusterService extends AbstractLifecycleComponent publisher) { + synchronized public void setClusterStatePublisher(BiConsumer publisher) { clusterStatePublisher = publisher; } + synchronized public void setLocalNode(DiscoveryNode localNode) { + assert clusterState.nodes().localNodeId() == null : "local node is already set"; + DiscoveryNodes.Builder nodeBuilder = DiscoveryNodes.builder(clusterState.nodes()).put(localNode).localNodeId(localNode.id()); + this.clusterState = ClusterState.builder(clusterState).nodes(nodeBuilder).build(); + } + + synchronized public void setNodeConnectionsService(NodeConnectionsService nodeConnectionsService) { + assert this.nodeConnectionsService == null : "nodeConnectionsService is already set"; + this.nodeConnectionsService = nodeConnectionsService; + } + @Override - public void addInitialStateBlock(ClusterBlock block) throws IllegalStateException { + synchronized public void addInitialStateBlock(ClusterBlock block) throws IllegalStateException { if (lifecycle.started()) { throw new IllegalStateException("can't set initial block when started"); } @@ -188,12 +173,12 @@ public class InternalClusterService extends AbstractLifecycleComponent nodeAttributes = discoveryNodeService.buildAttributes(); - // note, we rely on the fact that its a new id each time we start, see FD and "kill -9" handling - final String nodeId = generateNodeId(settings); - final TransportAddress publishAddress = transportService.boundAddress().publishAddress(); - DiscoveryNode localNode = new DiscoveryNode(settings.get("node.name"), nodeId, publishAddress, nodeAttributes, version); - DiscoveryNodes.Builder nodeBuilder = DiscoveryNodes.builder().put(localNode).localNodeId(localNode.id()); - this.clusterState = ClusterState.builder(clusterState).nodes(nodeBuilder).blocks(initialBlocks).build(); - this.transportService.setLocalNode(localNode); + this.clusterState = ClusterState.builder(clusterState).blocks(initialBlocks).build(); } @Override - protected void doStop() { - FutureUtils.cancel(this.reconnectToNodes); + synchronized protected void doStop() { for (NotifyTimeout onGoingTimeout : onGoingTimeouts) { onGoingTimeout.cancel(); onGoingTimeout.listener.onClose(); @@ -230,7 +207,7 @@ public class InternalClusterService extends AbstractLifecycleComponent batchResult; - long startTimeNS = System.nanoTime(); + long startTimeNS = currentTimeInNanos(); try { List inputs = toExecute.stream().map(tUpdateTask -> tUpdateTask.task).collect(Collectors.toList()); batchResult = executor.execute(previousClusterState, inputs); } catch (Throwable e) { - TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - startTimeNS))); + TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(currentTimeInNanos() - startTimeNS))); if (logger.isTraceEnabled()) { - StringBuilder sb = new StringBuilder("failed to execute cluster state update in ").append(executionTime).append(", state:\nversion [").append(previousClusterState.version()).append("], source [").append(source).append("]\n"); + StringBuilder sb = new StringBuilder("failed to execute cluster state update in [").append(executionTime).append("], state:\nversion [").append(previousClusterState.version()).append("], source [").append(source).append("]\n"); sb.append(previousClusterState.nodes().prettyPrint()); sb.append(previousClusterState.routingTable().prettyPrint()); sb.append(previousClusterState.getRoutingNodes().prettyPrint()); @@ -509,8 +481,8 @@ public class InternalClusterService extends AbstractLifecycleComponent slowTaskLoggingThreshold.getMillis()) { - logger.warn("cluster state update task [{}] took {} above the warn threshold of {}", source, executionTime, slowTaskLoggingThreshold); + logger.warn("cluster state update task [{}] took [{}] above the warn threshold of {}", source, executionTime, slowTaskLoggingThreshold); } } @@ -809,64 +770,6 @@ public class InternalClusterService extends AbstractLifecycleComponent failureCount = ConcurrentCollections.newConcurrentMap(); - - @Override - public void run() { - // master node will check against all nodes if its alive with certain discoveries implementations, - // but we can't rely on that, so we check on it as well - for (DiscoveryNode node : clusterState.nodes()) { - if (lifecycle.stoppedOrClosed()) { - return; - } - if (clusterState.nodes().nodeExists(node.id())) { // we double check existence of node since connectToNode might take time... - if (!transportService.nodeConnected(node)) { - try { - transportService.connectToNode(node); - } catch (Exception e) { - if (lifecycle.stoppedOrClosed()) { - return; - } - if (clusterState.nodes().nodeExists(node.id())) { // double check here as well, maybe its gone? - Integer nodeFailureCount = failureCount.get(node); - if (nodeFailureCount == null) { - nodeFailureCount = 1; - } else { - nodeFailureCount = nodeFailureCount + 1; - } - // log every 6th failure - if ((nodeFailureCount % 6) == 0) { - // reset the failure count... - nodeFailureCount = 0; - logger.warn("failed to reconnect to node {}", e, node); - } - failureCount.put(node, nodeFailureCount); - } - } - } - } - } - // go over and remove failed nodes that have been removed - DiscoveryNodes nodes = clusterState.nodes(); - for (Iterator failedNodesIt = failureCount.keySet().iterator(); failedNodesIt.hasNext(); ) { - DiscoveryNode failedNode = failedNodesIt.next(); - if (!nodes.nodeExists(failedNode.id())) { - failedNodesIt.remove(); - } - } - if (lifecycle.started()) { - reconnectToNodes = threadPool.schedule(reconnectInterval, ThreadPool.Names.GENERIC, this); - } - } - } - - public static String generateNodeId(Settings settings) { - Random random = Randomness.get(settings, NODE_ID_SEED_SETTING); - return Strings.randomBase64UUID(random); - } - private static class LocalNodeMasterListeners implements ClusterStateListener { private final List listeners = new CopyOnWriteArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index fa8b8c4ac412..3215f3db05a5 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -29,8 +29,10 @@ import org.elasticsearch.client.transport.TransportClientNodesService; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.InternalClusterInfoService; +import org.elasticsearch.cluster.NodeConnectionsService; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.AwarenessAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; @@ -259,7 +261,7 @@ public final class ClusterSettings extends AbstractScopedSettings { TransportService.TRACE_LOG_INCLUDE_SETTING, TransportCloseIndexAction.CLUSTER_INDICES_CLOSE_ENABLE_SETTING, ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING, - InternalClusterService.CLUSTER_SERVICE_RECONNECT_INTERVAL_SETTING, + NodeConnectionsService.CLUSTER_NODE_RECONNECT_INTERVAL_SETTING, HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING, HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING, Transport.TRANSPORT_TCP_COMPRESS, @@ -326,7 +328,7 @@ public final class ClusterSettings extends AbstractScopedSettings { Environment.PATH_SCRIPTS_SETTING, Environment.PATH_SHARED_DATA_SETTING, Environment.PIDFILE_SETTING, - InternalClusterService.NODE_ID_SEED_SETTING, + DiscoveryNodeService.NODE_ID_SEED_SETTING, DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING, DiscoveryModule.DISCOVERY_TYPE_SETTING, DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_SETTING, diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/KeyedLock.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/KeyedLock.java index 83bb9fd690d4..5c30330c1561 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/KeyedLock.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/KeyedLock.java @@ -20,7 +20,10 @@ package org.elasticsearch.common.util.concurrent; +import org.elasticsearch.common.lease.Releasable; + import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantLock; @@ -29,9 +32,8 @@ import java.util.concurrent.locks.ReentrantLock; * created the first time they are acquired and removed if no thread hold the * lock. The latter is important to assure that the list of locks does not grow * infinitely. - * - * A Thread can acquire a lock only once. - * + * + * * */ public class KeyedLock { @@ -50,48 +52,38 @@ public class KeyedLock { private final ConcurrentMap map = ConcurrentCollections.newConcurrentMap(); - protected final ThreadLocal threadLocal = new ThreadLocal<>(); - - public void acquire(T key) { + public Releasable acquire(T key) { + assert isHeldByCurrentThread(key) == false : "lock for " + key + " is already heald by this thread"; while (true) { - if (threadLocal.get() != null) { - // if we are here, the thread already has the lock - throw new IllegalStateException("Lock already acquired in Thread" + Thread.currentThread().getId() - + " for key " + key); - } KeyLock perNodeLock = map.get(key); if (perNodeLock == null) { KeyLock newLock = new KeyLock(fair); perNodeLock = map.putIfAbsent(key, newLock); if (perNodeLock == null) { newLock.lock(); - threadLocal.set(newLock); - return; + return new ReleasableLock(key, newLock); } } assert perNodeLock != null; int i = perNodeLock.count.get(); if (i > 0 && perNodeLock.count.compareAndSet(i, i + 1)) { perNodeLock.lock(); - threadLocal.set(perNodeLock); - return; + return new ReleasableLock(key, perNodeLock); } } } - public void release(T key) { - KeyLock lock = threadLocal.get(); + public boolean isHeldByCurrentThread(T key) { + KeyLock lock = map.get(key); if (lock == null) { - throw new IllegalStateException("Lock not acquired"); + return false; } - release(key, lock); + return lock.isHeldByCurrentThread(); } void release(T key, KeyLock lock) { - assert lock.isHeldByCurrentThread(); assert lock == map.get(key); lock.unlock(); - threadLocal.set(null); int decrementAndGet = lock.count.decrementAndGet(); if (decrementAndGet == 0) { map.remove(key, lock); @@ -99,6 +91,24 @@ public class KeyedLock { } + private final class ReleasableLock implements Releasable { + final T key; + final KeyLock lock; + final AtomicBoolean closed = new AtomicBoolean(); + + private ReleasableLock(T key, KeyLock lock) { + this.key = key; + this.lock = lock; + } + + @Override + public void close() { + if (closed.compareAndSet(false, true)) { + release(key, lock); + } + } + } + @SuppressWarnings("serial") private final static class KeyLock extends ReentrantLock { KeyLock(boolean fair) { diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index e279d3e819f2..b995723127a8 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -34,8 +34,10 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.MasterNodeChangePredicate; +import org.elasticsearch.cluster.NodeConnectionsService; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.cluster.service.InternalClusterService; import org.elasticsearch.common.StopWatch; @@ -294,6 +296,10 @@ public class Node implements Closeable { "node cluster service implementation must inherit from InternalClusterService"; final InternalClusterService clusterService = (InternalClusterService) injector.getInstance(ClusterService.class); + final NodeConnectionsService nodeConnectionsService = injector.getInstance(NodeConnectionsService.class); + nodeConnectionsService.start(); + clusterService.setNodeConnectionsService(nodeConnectionsService); + // TODO hack around circular dependencies problems injector.getInstance(GatewayAllocator.class).setReallocation(clusterService, injector.getInstance(RoutingService.class)); @@ -311,6 +317,15 @@ public class Node implements Closeable { // Start the transport service now so the publish address will be added to the local disco node in ClusterService TransportService transportService = injector.getInstance(TransportService.class); transportService.start(); + DiscoveryNode localNode = injector.getInstance(DiscoveryNodeService.class) + .buildLocalNode(transportService.boundAddress().publishAddress()); + + // TODO: need to find a cleaner way to start/construct a service with some initial parameters, + // playing nice with the life cycle interfaces + clusterService.setLocalNode(localNode); + transportService.setLocalNode(localNode); + clusterService.add(transportService.getTaskManager()); + clusterService.start(); // start after cluster service so the local disco is known @@ -392,6 +407,7 @@ public class Node implements Closeable { injector.getInstance(RoutingService.class).stop(); injector.getInstance(ClusterService.class).stop(); injector.getInstance(Discovery.class).stop(); + injector.getInstance(NodeConnectionsService.class).stop(); injector.getInstance(MonitorService.class).stop(); injector.getInstance(GatewayService.class).stop(); injector.getInstance(SearchService.class).stop(); @@ -449,6 +465,8 @@ public class Node implements Closeable { toClose.add(injector.getInstance(RoutingService.class)); toClose.add(() -> stopWatch.stop().start("cluster")); toClose.add(injector.getInstance(ClusterService.class)); + toClose.add(() -> stopWatch.stop().start("node_connections_service")); + toClose.add(injector.getInstance(NodeConnectionsService.class)); toClose.add(() -> stopWatch.stop().start("discovery")); toClose.add(injector.getInstance(Discovery.class)); toClose.add(() -> stopWatch.stop().start("monitor")); diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java index dc9dd70ab8d2..27ba643ef71e 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.math.MathUtils; import org.elasticsearch.common.metrics.CounterMetric; @@ -943,8 +944,8 @@ public class NettyTransport extends AbstractLifecycleComponent implem } globalLock.readLock().lock(); try { - connectionLock.acquire(node.id()); - try { + + try (Releasable ignored = connectionLock.acquire(node.id())) { if (!lifecycle.started()) { throw new IllegalStateException("can't add nodes to a stopped transport"); } @@ -979,8 +980,6 @@ public class NettyTransport extends AbstractLifecycleComponent implem } catch (Exception e) { throw new ConnectTransportException(node, "general node connection failure", e); } - } finally { - connectionLock.release(node.id()); } } finally { globalLock.readLock().unlock(); @@ -1103,8 +1102,8 @@ public class NettyTransport extends AbstractLifecycleComponent implem @Override public void disconnectFromNode(DiscoveryNode node) { - connectionLock.acquire(node.id()); - try { + + try (Releasable ignored = connectionLock.acquire(node.id())) { NodeChannels nodeChannels = connectedNodes.remove(node); if (nodeChannels != null) { try { @@ -1115,8 +1114,6 @@ public class NettyTransport extends AbstractLifecycleComponent implem transportServiceAdapter.raiseNodeDisconnected(node); } } - } finally { - connectionLock.release(node.id()); } } @@ -1128,8 +1125,7 @@ public class NettyTransport extends AbstractLifecycleComponent implem // check outside of the lock NodeChannels nodeChannels = connectedNodes.get(node); if (nodeChannels != null && nodeChannels.hasChannel(channel)) { - connectionLock.acquire(node.id()); - try { + try (Releasable ignored = connectionLock.acquire(node.id())) { nodeChannels = connectedNodes.get(node); // check again within the connection lock, if its still applicable to remove it if (nodeChannels != null && nodeChannels.hasChannel(channel)) { @@ -1143,8 +1139,6 @@ public class NettyTransport extends AbstractLifecycleComponent implem } return true; } - } finally { - connectionLock.release(node.id()); } } return false; diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index 4dcf54b5d0ba..f5d8637571a4 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -194,7 +194,7 @@ public abstract class TaskManagerTestCase extends ESTestCase { } }; transportService.start(); - clusterService = new TestClusterService(threadPool, transportService); + clusterService = new TestClusterService(threadPool); clusterService.add(transportService.getTaskManager()); discoveryNode = new DiscoveryNode(name, transportService.boundAddress().publishAddress(), Version.CURRENT); IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(settings); @@ -238,7 +238,7 @@ public abstract class TaskManagerTestCase extends ESTestCase { RecordingTaskManagerListener[] listeners = new RecordingTaskManagerListener[nodes.length]; for (int i = 0; i < nodes.length; i++) { listeners[i] = new RecordingTaskManagerListener(nodes[i].discoveryNode, actionMasks); - ((MockTaskManager) (nodes[i].clusterService.getTaskManager())).addListener(listeners[i]); + ((MockTaskManager) (nodes[i].transportService.getTaskManager())).addListener(listeners[i]); } return listeners; } diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index 8c791a990182..7c2747a1a281 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -43,6 +43,7 @@ import org.elasticsearch.test.tasks.MockTaskManager; import org.elasticsearch.test.tasks.MockTaskManagerListener; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.ReceiveTimeoutTransportException; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; @@ -263,8 +264,8 @@ public class TasksIT extends ESIntegTestCase { ReentrantLock taskFinishLock = new ReentrantLock(); taskFinishLock.lock(); CountDownLatch taskRegistered = new CountDownLatch(1); - for (ClusterService clusterService : internalCluster().getInstances(ClusterService.class)) { - ((MockTaskManager)clusterService.getTaskManager()).addListener(new MockTaskManagerListener() { + for (TransportService transportService : internalCluster().getInstances(TransportService.class)) { + ((MockTaskManager) transportService.getTaskManager()).addListener(new MockTaskManagerListener() { @Override public void onTaskRegistered(Task task) { if (task.getAction().startsWith(IndexAction.NAME)) { @@ -408,7 +409,7 @@ public class TasksIT extends ESIntegTestCase { @Override public void tearDown() throws Exception { for (Map.Entry, RecordingTaskManagerListener> entry : listeners.entrySet()) { - ((MockTaskManager)internalCluster().getInstance(ClusterService.class, entry.getKey().v1()).getTaskManager()).removeListener(entry.getValue()); + ((MockTaskManager) internalCluster().getInstance(TransportService.class, entry.getKey().v1()).getTaskManager()).removeListener(entry.getValue()); } listeners.clear(); super.tearDown(); @@ -418,10 +419,10 @@ public class TasksIT extends ESIntegTestCase { * Registers recording task event listeners with the given action mask on all nodes */ private void registerTaskManageListeners(String actionMasks) { - for (ClusterService clusterService : internalCluster().getInstances(ClusterService.class)) { - DiscoveryNode node = clusterService.localNode(); + for (String nodeName : internalCluster().getNodeNames()) { + DiscoveryNode node = internalCluster().getInstance(ClusterService.class, nodeName).localNode(); RecordingTaskManagerListener listener = new RecordingTaskManagerListener(node, Strings.splitStringToArray(actionMasks, ',')); - ((MockTaskManager)clusterService.getTaskManager()).addListener(listener); + ((MockTaskManager) internalCluster().getInstance(TransportService.class, nodeName).getTaskManager()).addListener(listener); RecordingTaskManagerListener oldListener = listeners.put(new Tuple<>(node.name(), actionMasks), listener); assertNull(oldListener); } diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java index f5c99fd5f7ed..813557e314b9 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java @@ -18,16 +18,12 @@ */ package org.elasticsearch.cluster; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.service.InternalClusterService; import org.elasticsearch.cluster.service.PendingClusterTask; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Inject; @@ -35,38 +31,24 @@ import org.elasticsearch.common.inject.Singleton; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.zen.ZenDiscovery; -import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.InternalTestCluster; -import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.Set; -import java.util.concurrent.BrokenBarrierException; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.CyclicBarrier; -import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -85,74 +67,6 @@ public class ClusterServiceIT extends ESIntegTestCase { return pluginList(TestPlugin.class); } - public void testTimeoutUpdateTask() throws Exception { - Settings settings = settingsBuilder() - .put("discovery.type", "local") - .build(); - internalCluster().startNode(settings); - ClusterService clusterService1 = internalCluster().getInstance(ClusterService.class); - final CountDownLatch block = new CountDownLatch(1); - clusterService1.submitStateUpdateTask("test1", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) { - try { - block.await(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - return currentState; - } - - @Override - public void onFailure(String source, Throwable t) { - throw new RuntimeException(t); - } - }); - - final CountDownLatch timedOut = new CountDownLatch(1); - final AtomicBoolean executeCalled = new AtomicBoolean(); - clusterService1.submitStateUpdateTask("test2", new ClusterStateUpdateTask() { - @Override - public TimeValue timeout() { - return TimeValue.timeValueMillis(2); - } - - @Override - public void onFailure(String source, Throwable t) { - timedOut.countDown(); - } - - @Override - public ClusterState execute(ClusterState currentState) { - executeCalled.set(true); - return currentState; - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - } - }); - - timedOut.await(); - block.countDown(); - final CountDownLatch allProcessed = new CountDownLatch(1); - clusterService1.submitStateUpdateTask("test3", new ClusterStateUpdateTask() { - @Override - public void onFailure(String source, Throwable t) { - throw new RuntimeException(t); - } - - @Override - public ClusterState execute(ClusterState currentState) { - allProcessed.countDown(); - return currentState; - } - - }); - allProcessed.await(); // executed another task to double check that execute on the timed out update task is not called... - assertThat(executeCalled.get(), equalTo(false)); - } - public void testAckedUpdateTask() throws Exception { Settings settings = settingsBuilder() .put("discovery.type", "local") @@ -299,63 +213,6 @@ public class ClusterServiceIT extends ESIntegTestCase { assertThat(processedLatch.await(1, TimeUnit.SECONDS), equalTo(true)); } - - public void testMasterAwareExecution() throws Exception { - Settings settings = settingsBuilder() - .put("discovery.type", "local") - .build(); - - InternalTestCluster.Async master = internalCluster().startNodeAsync(settings); - InternalTestCluster.Async nonMaster = internalCluster().startNodeAsync(settingsBuilder().put(settings).put(Node.NODE_MASTER_SETTING.getKey(), false).build()); - master.get(); - ensureGreen(); // make sure we have a cluster - - ClusterService clusterService = internalCluster().getInstance(ClusterService.class, nonMaster.get()); - - final boolean[] taskFailed = {false}; - final CountDownLatch latch1 = new CountDownLatch(1); - clusterService.submitStateUpdateTask("test", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - latch1.countDown(); - return currentState; - } - - @Override - public void onFailure(String source, Throwable t) { - taskFailed[0] = true; - latch1.countDown(); - } - }); - - latch1.await(); - assertTrue("cluster state update task was executed on a non-master", taskFailed[0]); - - taskFailed[0] = true; - final CountDownLatch latch2 = new CountDownLatch(1); - clusterService.submitStateUpdateTask("test", new ClusterStateUpdateTask() { - @Override - public boolean runOnlyOnMaster() { - return false; - } - - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - taskFailed[0] = false; - latch2.countDown(); - return currentState; - } - - @Override - public void onFailure(String source, Throwable t) { - taskFailed[0] = true; - latch2.countDown(); - } - }); - latch2.await(); - assertFalse("non-master cluster state update task was not executed", taskFailed[0]); - } - public void testAckedUpdateTaskNoAckExpected() throws Exception { Settings settings = settingsBuilder() .put("discovery.type", "local") @@ -715,571 +572,6 @@ public class ClusterServiceIT extends ESIntegTestCase { } } - /** - * Note, this test can only work as long as we have a single thread executor executing the state update tasks! - */ - public void testPrioritizedTasks() throws Exception { - Settings settings = settingsBuilder() - .put("discovery.type", "local") - .build(); - internalCluster().startNode(settings); - ClusterService clusterService = internalCluster().getInstance(ClusterService.class); - BlockingTask block = new BlockingTask(Priority.IMMEDIATE); - clusterService.submitStateUpdateTask("test", block); - int taskCount = randomIntBetween(5, 20); - Priority[] priorities = Priority.values(); - - // will hold all the tasks in the order in which they were executed - List tasks = new ArrayList<>(taskCount); - CountDownLatch latch = new CountDownLatch(taskCount); - for (int i = 0; i < taskCount; i++) { - Priority priority = priorities[randomIntBetween(0, priorities.length - 1)]; - clusterService.submitStateUpdateTask("test", new PrioritizedTask(priority, latch, tasks)); - } - - block.release(); - latch.await(); - - Priority prevPriority = null; - for (PrioritizedTask task : tasks) { - if (prevPriority == null) { - prevPriority = task.priority(); - } else { - assertThat(task.priority().sameOrAfter(prevPriority), is(true)); - } - } - } - - /* - * test that a listener throwing an exception while handling a - * notification does not prevent publication notification to the - * executor - */ - public void testClusterStateTaskListenerThrowingExceptionIsOkay() throws InterruptedException { - Settings settings = settingsBuilder() - .put("discovery.type", "local") - .build(); - internalCluster().startNode(settings); - ClusterService clusterService = internalCluster().getInstance(ClusterService.class); - - final CountDownLatch latch = new CountDownLatch(1); - AtomicBoolean published = new AtomicBoolean(); - - clusterService.submitStateUpdateTask( - "testClusterStateTaskListenerThrowingExceptionIsOkay", - new Object(), - ClusterStateTaskConfig.build(Priority.NORMAL), - new ClusterStateTaskExecutor() { - @Override - public boolean runOnlyOnMaster() { - return false; - } - - @Override - public BatchResult execute(ClusterState currentState, List tasks) throws Exception { - ClusterState newClusterState = ClusterState.builder(currentState).build(); - return BatchResult.builder().successes(tasks).build(newClusterState); - } - - @Override - public void clusterStatePublished(ClusterState newClusterState) { - published.set(true); - latch.countDown(); - } - }, - new ClusterStateTaskListener() { - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - throw new IllegalStateException(source); - } - - @Override - public void onFailure(String source, Throwable t) { - } - } - ); - - latch.await(); - assertTrue(published.get()); - } - - // test that for a single thread, tasks are executed in the order - // that they are submitted - public void testClusterStateUpdateTasksAreExecutedInOrder() throws BrokenBarrierException, InterruptedException { - Settings settings = settingsBuilder() - .put("discovery.type", "local") - .build(); - internalCluster().startNode(settings); - ClusterService clusterService = internalCluster().getInstance(ClusterService.class); - - class TaskExecutor implements ClusterStateTaskExecutor { - List tasks = new ArrayList<>(); - - @Override - public BatchResult execute(ClusterState currentState, List tasks) throws Exception { - this.tasks.addAll(tasks); - return BatchResult.builder().successes(tasks).build(ClusterState.builder(currentState).build()); - } - - @Override - public boolean runOnlyOnMaster() { - return false; - } - } - - int numberOfThreads = randomIntBetween(2, 8); - TaskExecutor[] executors = new TaskExecutor[numberOfThreads]; - for (int i = 0; i < numberOfThreads; i++) { - executors[i] = new TaskExecutor(); - } - - int tasksSubmittedPerThread = randomIntBetween(2, 1024); - - CopyOnWriteArrayList> failures = new CopyOnWriteArrayList<>(); - CountDownLatch updateLatch = new CountDownLatch(numberOfThreads * tasksSubmittedPerThread); - - ClusterStateTaskListener listener = new ClusterStateTaskListener() { - @Override - public void onFailure(String source, Throwable t) { - logger.error("unexpected failure: [{}]", t, source); - failures.add(new Tuple<>(source, t)); - updateLatch.countDown(); - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - updateLatch.countDown(); - } - }; - - CyclicBarrier barrier = new CyclicBarrier(1 + numberOfThreads); - - for (int i = 0; i < numberOfThreads; i++) { - final int index = i; - Thread thread = new Thread(() -> { - try { - barrier.await(); - for (int j = 0; j < tasksSubmittedPerThread; j++) { - clusterService.submitStateUpdateTask("[" + index + "][" + j + "]", j, ClusterStateTaskConfig.build(randomFrom(Priority.values())), executors[index], listener); - } - barrier.await(); - } catch (InterruptedException | BrokenBarrierException e) { - throw new AssertionError(e); - } - }); - thread.start(); - } - - // wait for all threads to be ready - barrier.await(); - // wait for all threads to finish - barrier.await(); - - updateLatch.await(); - - assertThat(failures, empty()); - - for (int i = 0; i < numberOfThreads; i++) { - assertEquals(tasksSubmittedPerThread, executors[i].tasks.size()); - for (int j = 0; j < tasksSubmittedPerThread; j++) { - assertNotNull(executors[i].tasks.get(j)); - assertEquals("cluster state update task executed out of order", j, (int)executors[i].tasks.get(j)); - } - } - } - - public void testClusterStateBatchedUpdates() throws BrokenBarrierException, InterruptedException { - Settings settings = settingsBuilder() - .put("discovery.type", "local") - .build(); - internalCluster().startNode(settings); - ClusterService clusterService = internalCluster().getInstance(ClusterService.class); - - AtomicInteger counter = new AtomicInteger(); - class Task { - private AtomicBoolean state = new AtomicBoolean(); - - public void execute() { - if (!state.compareAndSet(false, true)) { - throw new IllegalStateException(); - } else { - counter.incrementAndGet(); - } - } - } - - int numberOfThreads = randomIntBetween(2, 8); - int tasksSubmittedPerThread = randomIntBetween(1, 1024); - int numberOfExecutors = Math.max(1, numberOfThreads / 4); - final Semaphore semaphore = new Semaphore(numberOfExecutors); - - class TaskExecutor implements ClusterStateTaskExecutor { - private AtomicInteger counter = new AtomicInteger(); - private AtomicInteger batches = new AtomicInteger(); - private AtomicInteger published = new AtomicInteger(); - - @Override - public BatchResult execute(ClusterState currentState, List tasks) throws Exception { - tasks.forEach(task -> task.execute()); - counter.addAndGet(tasks.size()); - ClusterState maybeUpdatedClusterState = currentState; - if (randomBoolean()) { - maybeUpdatedClusterState = ClusterState.builder(currentState).build(); - batches.incrementAndGet(); - semaphore.acquire(); - } - return BatchResult.builder().successes(tasks).build(maybeUpdatedClusterState); - } - - @Override - public boolean runOnlyOnMaster() { - return false; - } - - @Override - public void clusterStatePublished(ClusterState newClusterState) { - published.incrementAndGet(); - semaphore.release(); - } - } - - ConcurrentMap counters = new ConcurrentHashMap<>(); - CountDownLatch updateLatch = new CountDownLatch(numberOfThreads * tasksSubmittedPerThread); - ClusterStateTaskListener listener = new ClusterStateTaskListener() { - @Override - public void onFailure(String source, Throwable t) { - assert false; - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - counters.computeIfAbsent(source, key -> new AtomicInteger()).incrementAndGet(); - updateLatch.countDown(); - } - }; - - List executors = new ArrayList<>(); - for (int i = 0; i < numberOfExecutors; i++) { - executors.add(new TaskExecutor()); - } - - // randomly assign tasks to executors - List assignments = new ArrayList<>(); - for (int i = 0; i < numberOfThreads; i++) { - for (int j = 0; j < tasksSubmittedPerThread; j++) { - assignments.add(randomFrom(executors)); - } - } - - Map counts = new HashMap<>(); - for (TaskExecutor executor : assignments) { - counts.merge(executor, 1, (previous, one) -> previous + one); - } - - CyclicBarrier barrier = new CyclicBarrier(1 + numberOfThreads); - for (int i = 0; i < numberOfThreads; i++) { - final int index = i; - Thread thread = new Thread(() -> { - try { - barrier.await(); - for (int j = 0; j < tasksSubmittedPerThread; j++) { - ClusterStateTaskExecutor executor = assignments.get(index * tasksSubmittedPerThread + j); - clusterService.submitStateUpdateTask( - Thread.currentThread().getName(), - new Task(), - ClusterStateTaskConfig.build(randomFrom(Priority.values())), - executor, - listener); - } - barrier.await(); - } catch (BrokenBarrierException | InterruptedException e) { - throw new AssertionError(e); - } - }); - thread.start(); - } - - // wait for all threads to be ready - barrier.await(); - // wait for all threads to finish - barrier.await(); - - // wait until all the cluster state updates have been processed - updateLatch.await(); - // and until all of the publication callbacks have completed - semaphore.acquire(numberOfExecutors); - - // assert the number of executed tasks is correct - assertEquals(numberOfThreads * tasksSubmittedPerThread, counter.get()); - - // assert each executor executed the correct number of tasks - for (TaskExecutor executor : executors) { - if (counts.containsKey(executor)) { - assertEquals((int) counts.get(executor), executor.counter.get()); - assertEquals(executor.batches.get(), executor.published.get()); - } - } - - // assert the correct number of clusterStateProcessed events were triggered - for (Map.Entry entry : counters.entrySet()) { - assertEquals(entry.getValue().get(), tasksSubmittedPerThread); - } - } - - @TestLogging("cluster:TRACE") // To ensure that we log cluster state events on TRACE level - public void testClusterStateUpdateLogging() throws Exception { - Settings settings = settingsBuilder() - .put("discovery.type", "local") - .build(); - internalCluster().startNode(settings); - ClusterService clusterService1 = internalCluster().getInstance(ClusterService.class); - MockLogAppender mockAppender = new MockLogAppender(); - mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test1", "cluster.service", Level.DEBUG, "*processing [test1]: took * no change in cluster_state")); - mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test2", "cluster.service", Level.TRACE, "*failed to execute cluster state update in *")); - mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test3", "cluster.service", Level.DEBUG, "*processing [test3]: took * done applying updated cluster_state (version: *, uuid: *)")); - - Logger rootLogger = Logger.getRootLogger(); - rootLogger.addAppender(mockAppender); - try { - final CountDownLatch latch = new CountDownLatch(4); - clusterService1.submitStateUpdateTask("test1", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - return currentState; - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - latch.countDown(); - } - - @Override - public void onFailure(String source, Throwable t) { - fail(); - } - }); - clusterService1.submitStateUpdateTask("test2", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) { - throw new IllegalArgumentException("Testing handling of exceptions in the cluster state task"); - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - fail(); - } - - @Override - public void onFailure(String source, Throwable t) { - latch.countDown(); - } - }); - clusterService1.submitStateUpdateTask("test3", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) { - return ClusterState.builder(currentState).incrementVersion().build(); - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - latch.countDown(); - } - - @Override - public void onFailure(String source, Throwable t) { - fail(); - } - }); - // Additional update task to make sure all previous logging made it to the logger - // We don't check logging for this on since there is no guarantee that it will occur before our check - clusterService1.submitStateUpdateTask("test4", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) { - return currentState; - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - latch.countDown(); - } - - @Override - public void onFailure(String source, Throwable t) { - fail(); - } - }); - assertThat(latch.await(1, TimeUnit.SECONDS), equalTo(true)); - } finally { - rootLogger.removeAppender(mockAppender); - } - mockAppender.assertAllExpectationsMatched(); - } - - @TestLogging("cluster:WARN") // To ensure that we log cluster state events on WARN level - public void testLongClusterStateUpdateLogging() throws Exception { - Settings settings = settingsBuilder() - .put("discovery.type", "local") - .put(InternalClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.getKey(), "10s") - .build(); - internalCluster().startNode(settings); - ClusterService clusterService1 = internalCluster().getInstance(ClusterService.class); - MockLogAppender mockAppender = new MockLogAppender(); - mockAppender.addExpectation(new MockLogAppender.UnseenEventExpectation("test1 shouldn't see because setting is too low", "cluster.service", Level.WARN, "*cluster state update task [test1] took * above the warn threshold of *")); - mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test2", "cluster.service", Level.WARN, "*cluster state update task [test2] took * above the warn threshold of 10ms")); - mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test3", "cluster.service", Level.WARN, "*cluster state update task [test3] took * above the warn threshold of 10ms")); - mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test4", "cluster.service", Level.WARN, "*cluster state update task [test4] took * above the warn threshold of 10ms")); - - Logger rootLogger = Logger.getRootLogger(); - rootLogger.addAppender(mockAppender); - try { - final CountDownLatch latch = new CountDownLatch(5); - final CountDownLatch processedFirstTask = new CountDownLatch(1); - clusterService1.submitStateUpdateTask("test1", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - Thread.sleep(100); - return currentState; - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - latch.countDown(); - processedFirstTask.countDown(); - } - - @Override - public void onFailure(String source, Throwable t) { - fail(); - } - }); - - processedFirstTask.await(1, TimeUnit.SECONDS); - assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder() - .put(InternalClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.getKey(), "10ms"))); - - clusterService1.submitStateUpdateTask("test2", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - Thread.sleep(100); - throw new IllegalArgumentException("Testing handling of exceptions in the cluster state task"); - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - fail(); - } - - @Override - public void onFailure(String source, Throwable t) { - latch.countDown(); - } - }); - clusterService1.submitStateUpdateTask("test3", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - Thread.sleep(100); - return ClusterState.builder(currentState).incrementVersion().build(); - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - latch.countDown(); - } - - @Override - public void onFailure(String source, Throwable t) { - fail(); - } - }); - clusterService1.submitStateUpdateTask("test4", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - Thread.sleep(100); - return currentState; - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - latch.countDown(); - } - - @Override - public void onFailure(String source, Throwable t) { - fail(); - } - }); - // Additional update task to make sure all previous logging made it to the logger - // We don't check logging for this on since there is no guarantee that it will occur before our check - clusterService1.submitStateUpdateTask("test5", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) { - return currentState; - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - latch.countDown(); - } - - @Override - public void onFailure(String source, Throwable t) { - fail(); - } - }); - assertThat(latch.await(5, TimeUnit.SECONDS), equalTo(true)); - } finally { - rootLogger.removeAppender(mockAppender); - } - mockAppender.assertAllExpectationsMatched(); - } - - private static class BlockingTask extends ClusterStateUpdateTask { - private final CountDownLatch latch = new CountDownLatch(1); - - public BlockingTask(Priority priority) { - super(priority); - } - - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - latch.await(); - return currentState; - } - - @Override - public void onFailure(String source, Throwable t) { - } - - public void release() { - latch.countDown(); - } - - } - - private static class PrioritizedTask extends ClusterStateUpdateTask { - - private final CountDownLatch latch; - private final List tasks; - - private PrioritizedTask(Priority priority, CountDownLatch latch, List tasks) { - super(priority); - this.latch = latch; - this.tasks = tasks; - } - - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - tasks.add(this); - latch.countDown(); - return currentState; - } - - @Override - public void onFailure(String source, Throwable t) { - latch.countDown(); - } - } - public static class TestPlugin extends Plugin { @Override diff --git a/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java new file mode 100644 index 000000000000..84c9e9f07a02 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java @@ -0,0 +1,275 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.component.Lifecycle; +import org.elasticsearch.common.component.LifecycleListener; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.BoundTransportAddress; +import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.TransportServiceAdapter; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.equalTo; + +public class NodeConnectionsServiceTests extends ESTestCase { + + private static ThreadPool THREAD_POOL; + private MockTransport transport; + private TransportService transportService; + + private List generateNodes() { + List nodes = new ArrayList<>(); + for (int i = randomIntBetween(20, 50); i > 0; i--) { + final HashMap attributes = new HashMap<>(); + if (rarely()) { + attributes.put("client", "true"); + } else { + attributes.put("master", "" + randomBoolean()); + attributes.put("data", "" + randomBoolean()); + attributes.put("ingest", "" + randomBoolean()); + } + nodes.add(new DiscoveryNode("node_" + i, "" + i, DummyTransportAddress.INSTANCE, attributes, Version.CURRENT)); + } + return nodes; + } + + private ClusterState clusterStateFromNodes(List nodes) { + final DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); + for (DiscoveryNode node : nodes) { + builder.put(node); + } + return ClusterState.builder(new ClusterName("test")).nodes(builder).build(); + } + + public void testConnectAndDisconnect() { + List nodes = generateNodes(); + NodeConnectionsService service = new NodeConnectionsService(Settings.EMPTY, THREAD_POOL, transportService); + + ClusterState current = clusterStateFromNodes(Collections.emptyList()); + ClusterChangedEvent event = new ClusterChangedEvent("test", clusterStateFromNodes(randomSubsetOf(nodes)), current); + + service.connectToAddedNodes(event); + assertConnected(event.nodesDelta().addedNodes()); + + service.disconnectFromRemovedNodes(event); + assertConnectedExactlyToNodes(event.state()); + + current = event.state(); + event = new ClusterChangedEvent("test", clusterStateFromNodes(randomSubsetOf(nodes)), current); + + service.connectToAddedNodes(event); + assertConnected(event.nodesDelta().addedNodes()); + + service.disconnectFromRemovedNodes(event); + assertConnectedExactlyToNodes(event.state()); + } + + + public void testReconnect() { + List nodes = generateNodes(); + NodeConnectionsService service = new NodeConnectionsService(Settings.EMPTY, THREAD_POOL, transportService); + + ClusterState current = clusterStateFromNodes(Collections.emptyList()); + ClusterChangedEvent event = new ClusterChangedEvent("test", clusterStateFromNodes(randomSubsetOf(nodes)), current); + + transport.randomConnectionExceptions = true; + + service.connectToAddedNodes(event); + + for (int i = 0; i < 3; i++) { + // simulate disconnects + for (DiscoveryNode node : randomSubsetOf(nodes)) { + transport.disconnectFromNode(node); + } + service.new ConnectionChecker().run(); + } + + // disable exceptions so things can be restored + transport.randomConnectionExceptions = false; + service.new ConnectionChecker().run(); + assertConnectedExactlyToNodes(event.state()); + } + + private void assertConnectedExactlyToNodes(ClusterState state) { + assertConnected(state.nodes()); + assertThat(transport.connectedNodes.size(), equalTo(state.nodes().size())); + } + + private void assertConnected(Iterable nodes) { + for (DiscoveryNode node : nodes) { + assertTrue("not connected to " + node, transport.connectedNodes.contains(node)); + } + } + + private void assertNotConnected(Iterable nodes) { + for (DiscoveryNode node : nodes) { + assertFalse("still connected to " + node, transport.connectedNodes.contains(node)); + } + } + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + this.transport = new MockTransport(); + transportService = new TransportService(transport, THREAD_POOL); + transportService.start(); + transportService.acceptIncomingRequests(); + } + + @Override + @After + public void tearDown() throws Exception { + transportService.stop(); + super.tearDown(); + } + + @AfterClass + public static void stopThreadPool() { + ThreadPool.terminate(THREAD_POOL, 30, TimeUnit.SECONDS); + THREAD_POOL = null; + } + + + final class MockTransport implements Transport { + + Set connectedNodes = ConcurrentCollections.newConcurrentSet(); + volatile boolean randomConnectionExceptions = false; + + @Override + public void transportServiceAdapter(TransportServiceAdapter service) { + + } + + @Override + public BoundTransportAddress boundAddress() { + return null; + } + + @Override + public Map profileBoundAddresses() { + return null; + } + + @Override + public TransportAddress[] addressesFromString(String address, int perAddressLimit) throws Exception { + return new TransportAddress[0]; + } + + @Override + public boolean addressSupported(Class address) { + return false; + } + + @Override + public boolean nodeConnected(DiscoveryNode node) { + return connectedNodes.contains(node); + } + + @Override + public void connectToNode(DiscoveryNode node) throws ConnectTransportException { + if (connectedNodes.contains(node) == false && randomConnectionExceptions && randomBoolean()) { + throw new ConnectTransportException(node, "simulated"); + } + connectedNodes.add(node); + } + + @Override + public void connectToNodeLight(DiscoveryNode node) throws ConnectTransportException { + + } + + @Override + public void disconnectFromNode(DiscoveryNode node) { + connectedNodes.remove(node); + } + + @Override + public void sendRequest(DiscoveryNode node, long requestId, String action, TransportRequest request, + TransportRequestOptions options) throws IOException, TransportException { + + } + + @Override + public long serverOpen() { + return 0; + } + + @Override + public List getLocalAddresses() { + return null; + } + + @Override + public Lifecycle.State lifecycleState() { + return null; + } + + @Override + public void addLifecycleListener(LifecycleListener listener) { + + } + + @Override + public void removeLifecycleListener(LifecycleListener listener) { + + } + + @Override + public Transport start() { + return null; + } + + @Override + public Transport stop() { + return null; + } + + @Override + public void close() { + + } + } +} \ No newline at end of file diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java index 6339c700eeca..29ce8e7a6364 100644 --- a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; @@ -40,7 +41,6 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; -import org.elasticsearch.cluster.service.InternalClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.test.ESAllocationTestCase; @@ -305,7 +305,7 @@ public class ShardFailedClusterStateTaskExecutorTests extends ESAllocationTestCa return randomSubsetOf(1, shards.toArray(new ShardRouting[0])).get(0); } else { return - TestShardRouting.newShardRouting(shardRouting.index(), shardRouting.id(), InternalClusterService.generateNodeId(Settings.EMPTY), randomBoolean(), randomFrom(ShardRoutingState.values())); + TestShardRouting.newShardRouting(shardRouting.index(), shardRouting.id(), DiscoveryNodeService.generateNodeId(Settings.EMPTY), randomBoolean(), randomFrom(ShardRoutingState.values())); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java new file mode 100644 index 000000000000..ff55de45649f --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java @@ -0,0 +1,824 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster.service; + +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateTaskConfig; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; +import org.elasticsearch.cluster.ClusterStateTaskListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.NodeConnectionsService; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.OperationRouting; +import org.elasticsearch.common.Priority; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLogAppender; +import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.BrokenBarrierException; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.Semaphore; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class ClusterServiceTests extends ESTestCase { + + static ThreadPool threadPool; + TimedClusterService clusterService; + + @BeforeClass + public static void createThreadPool() { + threadPool = new ThreadPool(ClusterServiceTests.class.getName()); + } + + @AfterClass + public static void stopThreadPool() { + if (threadPool != null) { + threadPool.shutdownNow(); + threadPool = null; + } + } + + @Before + public void setUp() throws Exception { + super.setUp(); + clusterService = createClusterService(true); + } + + @After + public void tearDown() throws Exception { + clusterService.close(); + super.tearDown(); + } + + TimedClusterService createClusterService(boolean makeMaster) throws InterruptedException { + TimedClusterService test = new TimedClusterService(Settings.EMPTY, null, + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), + threadPool, new ClusterName("ClusterServiceTests")); + test.setLocalNode(new DiscoveryNode("node1", DummyTransportAddress.INSTANCE, Version.CURRENT)); + test.setNodeConnectionsService(new NodeConnectionsService(Settings.EMPTY, null, null) { + @Override + public void connectToAddedNodes(ClusterChangedEvent event) { + // skip + } + + @Override + public void disconnectFromRemovedNodes(ClusterChangedEvent event) { + // skip + } + }); + test.setClusterStatePublisher((event, ackListener) -> { + }); + test.start(); + CountDownLatch latch = new CountDownLatch(1); + test.submitStateUpdateTask("making a master", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + final DiscoveryNodes nodes = currentState.nodes(); + final DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(nodes) + .masterNodeId(makeMaster ? nodes.localNodeId() : null); + return ClusterState.builder(currentState).blocks(ClusterBlocks.EMPTY_CLUSTER_BLOCK).nodes(nodesBuilder).build(); + } + + @Override + public boolean runOnlyOnMaster() { + return false; + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + latch.countDown(); + } + + @Override + public void onFailure(String source, Throwable t) { + logger.warn("unexpected exception", t); + fail("unexpected exception" + t); + } + }); + latch.await(); + return test; + } + + public void testTimeoutUpdateTask() throws Exception { + final CountDownLatch block = new CountDownLatch(1); + clusterService.submitStateUpdateTask("test1", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) { + try { + block.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + return currentState; + } + + @Override + public void onFailure(String source, Throwable t) { + throw new RuntimeException(t); + } + }); + + final CountDownLatch timedOut = new CountDownLatch(1); + final AtomicBoolean executeCalled = new AtomicBoolean(); + clusterService.submitStateUpdateTask("test2", new ClusterStateUpdateTask() { + @Override + public TimeValue timeout() { + return TimeValue.timeValueMillis(2); + } + + @Override + public void onFailure(String source, Throwable t) { + timedOut.countDown(); + } + + @Override + public ClusterState execute(ClusterState currentState) { + executeCalled.set(true); + return currentState; + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + } + }); + + timedOut.await(); + block.countDown(); + final CountDownLatch allProcessed = new CountDownLatch(1); + clusterService.submitStateUpdateTask("test3", new ClusterStateUpdateTask() { + @Override + public void onFailure(String source, Throwable t) { + throw new RuntimeException(t); + } + + @Override + public ClusterState execute(ClusterState currentState) { + allProcessed.countDown(); + return currentState; + } + + }); + allProcessed.await(); // executed another task to double check that execute on the timed out update task is not called... + assertThat(executeCalled.get(), equalTo(false)); + } + + + public void testMasterAwareExecution() throws Exception { + ClusterService nonMaster = createClusterService(false); + + final boolean[] taskFailed = {false}; + final CountDownLatch latch1 = new CountDownLatch(1); + nonMaster.submitStateUpdateTask("test", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + latch1.countDown(); + return currentState; + } + + @Override + public void onFailure(String source, Throwable t) { + taskFailed[0] = true; + latch1.countDown(); + } + }); + + latch1.await(); + assertTrue("cluster state update task was executed on a non-master", taskFailed[0]); + + taskFailed[0] = true; + final CountDownLatch latch2 = new CountDownLatch(1); + nonMaster.submitStateUpdateTask("test", new ClusterStateUpdateTask() { + @Override + public boolean runOnlyOnMaster() { + return false; + } + + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + taskFailed[0] = false; + latch2.countDown(); + return currentState; + } + + @Override + public void onFailure(String source, Throwable t) { + taskFailed[0] = true; + latch2.countDown(); + } + }); + latch2.await(); + assertFalse("non-master cluster state update task was not executed", taskFailed[0]); + + nonMaster.close(); + } + + /* + * test that a listener throwing an exception while handling a + * notification does not prevent publication notification to the + * executor + */ + public void testClusterStateTaskListenerThrowingExceptionIsOkay() throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + AtomicBoolean published = new AtomicBoolean(); + + clusterService.submitStateUpdateTask( + "testClusterStateTaskListenerThrowingExceptionIsOkay", + new Object(), + ClusterStateTaskConfig.build(Priority.NORMAL), + new ClusterStateTaskExecutor() { + @Override + public boolean runOnlyOnMaster() { + return false; + } + + @Override + public BatchResult execute(ClusterState currentState, List tasks) throws Exception { + ClusterState newClusterState = ClusterState.builder(currentState).build(); + return BatchResult.builder().successes(tasks).build(newClusterState); + } + + @Override + public void clusterStatePublished(ClusterState newClusterState) { + published.set(true); + latch.countDown(); + } + }, + new ClusterStateTaskListener() { + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + throw new IllegalStateException(source); + } + + @Override + public void onFailure(String source, Throwable t) { + } + } + ); + + latch.await(); + assertTrue(published.get()); + } + + // test that for a single thread, tasks are executed in the order + // that they are submitted + public void testClusterStateUpdateTasksAreExecutedInOrder() throws BrokenBarrierException, InterruptedException { + class TaskExecutor implements ClusterStateTaskExecutor { + List tasks = new ArrayList<>(); + + @Override + public BatchResult execute(ClusterState currentState, List tasks) throws Exception { + this.tasks.addAll(tasks); + return BatchResult.builder().successes(tasks).build(ClusterState.builder(currentState).build()); + } + + @Override + public boolean runOnlyOnMaster() { + return false; + } + } + + int numberOfThreads = randomIntBetween(2, 8); + TaskExecutor[] executors = new TaskExecutor[numberOfThreads]; + for (int i = 0; i < numberOfThreads; i++) { + executors[i] = new TaskExecutor(); + } + + int tasksSubmittedPerThread = randomIntBetween(2, 1024); + + CopyOnWriteArrayList> failures = new CopyOnWriteArrayList<>(); + CountDownLatch updateLatch = new CountDownLatch(numberOfThreads * tasksSubmittedPerThread); + + ClusterStateTaskListener listener = new ClusterStateTaskListener() { + @Override + public void onFailure(String source, Throwable t) { + logger.error("unexpected failure: [{}]", t, source); + failures.add(new Tuple<>(source, t)); + updateLatch.countDown(); + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + updateLatch.countDown(); + } + }; + + CyclicBarrier barrier = new CyclicBarrier(1 + numberOfThreads); + + for (int i = 0; i < numberOfThreads; i++) { + final int index = i; + Thread thread = new Thread(() -> { + try { + barrier.await(); + for (int j = 0; j < tasksSubmittedPerThread; j++) { + clusterService.submitStateUpdateTask("[" + index + "][" + j + "]", j, + ClusterStateTaskConfig.build(randomFrom(Priority.values())), executors[index], listener); + } + barrier.await(); + } catch (InterruptedException | BrokenBarrierException e) { + throw new AssertionError(e); + } + }); + thread.start(); + } + + // wait for all threads to be ready + barrier.await(); + // wait for all threads to finish + barrier.await(); + + updateLatch.await(); + + assertThat(failures, empty()); + + for (int i = 0; i < numberOfThreads; i++) { + assertEquals(tasksSubmittedPerThread, executors[i].tasks.size()); + for (int j = 0; j < tasksSubmittedPerThread; j++) { + assertNotNull(executors[i].tasks.get(j)); + assertEquals("cluster state update task executed out of order", j, (int) executors[i].tasks.get(j)); + } + } + } + + public void testClusterStateBatchedUpdates() throws BrokenBarrierException, InterruptedException { + AtomicInteger counter = new AtomicInteger(); + class Task { + private AtomicBoolean state = new AtomicBoolean(); + + public void execute() { + if (!state.compareAndSet(false, true)) { + throw new IllegalStateException(); + } else { + counter.incrementAndGet(); + } + } + } + + int numberOfThreads = randomIntBetween(2, 8); + int tasksSubmittedPerThread = randomIntBetween(1, 1024); + int numberOfExecutors = Math.max(1, numberOfThreads / 4); + final Semaphore semaphore = new Semaphore(numberOfExecutors); + + class TaskExecutor implements ClusterStateTaskExecutor { + private AtomicInteger counter = new AtomicInteger(); + private AtomicInteger batches = new AtomicInteger(); + private AtomicInteger published = new AtomicInteger(); + + @Override + public BatchResult execute(ClusterState currentState, List tasks) throws Exception { + tasks.forEach(task -> task.execute()); + counter.addAndGet(tasks.size()); + ClusterState maybeUpdatedClusterState = currentState; + if (randomBoolean()) { + maybeUpdatedClusterState = ClusterState.builder(currentState).build(); + batches.incrementAndGet(); + semaphore.acquire(); + } + return BatchResult.builder().successes(tasks).build(maybeUpdatedClusterState); + } + + @Override + public boolean runOnlyOnMaster() { + return false; + } + + @Override + public void clusterStatePublished(ClusterState newClusterState) { + published.incrementAndGet(); + semaphore.release(); + } + } + + ConcurrentMap counters = new ConcurrentHashMap<>(); + CountDownLatch updateLatch = new CountDownLatch(numberOfThreads * tasksSubmittedPerThread); + ClusterStateTaskListener listener = new ClusterStateTaskListener() { + @Override + public void onFailure(String source, Throwable t) { + assert false; + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + counters.computeIfAbsent(source, key -> new AtomicInteger()).incrementAndGet(); + updateLatch.countDown(); + } + }; + + List executors = new ArrayList<>(); + for (int i = 0; i < numberOfExecutors; i++) { + executors.add(new TaskExecutor()); + } + + // randomly assign tasks to executors + List assignments = new ArrayList<>(); + for (int i = 0; i < numberOfThreads; i++) { + for (int j = 0; j < tasksSubmittedPerThread; j++) { + assignments.add(randomFrom(executors)); + } + } + + Map counts = new HashMap<>(); + for (TaskExecutor executor : assignments) { + counts.merge(executor, 1, (previous, one) -> previous + one); + } + + CyclicBarrier barrier = new CyclicBarrier(1 + numberOfThreads); + for (int i = 0; i < numberOfThreads; i++) { + final int index = i; + Thread thread = new Thread(() -> { + try { + barrier.await(); + for (int j = 0; j < tasksSubmittedPerThread; j++) { + ClusterStateTaskExecutor executor = assignments.get(index * tasksSubmittedPerThread + j); + clusterService.submitStateUpdateTask( + Thread.currentThread().getName(), + new Task(), + ClusterStateTaskConfig.build(randomFrom(Priority.values())), + executor, + listener); + } + barrier.await(); + } catch (BrokenBarrierException | InterruptedException e) { + throw new AssertionError(e); + } + }); + thread.start(); + } + + // wait for all threads to be ready + barrier.await(); + // wait for all threads to finish + barrier.await(); + + // wait until all the cluster state updates have been processed + updateLatch.await(); + // and until all of the publication callbacks have completed + semaphore.acquire(numberOfExecutors); + + // assert the number of executed tasks is correct + assertEquals(numberOfThreads * tasksSubmittedPerThread, counter.get()); + + // assert each executor executed the correct number of tasks + for (TaskExecutor executor : executors) { + if (counts.containsKey(executor)) { + assertEquals((int) counts.get(executor), executor.counter.get()); + assertEquals(executor.batches.get(), executor.published.get()); + } + } + + // assert the correct number of clusterStateProcessed events were triggered + for (Map.Entry entry : counters.entrySet()) { + assertEquals(entry.getValue().get(), tasksSubmittedPerThread); + } + } + + /** + * Note, this test can only work as long as we have a single thread executor executing the state update tasks! + */ + public void testPrioritizedTasks() throws Exception { + Settings settings = settingsBuilder() + .put("discovery.type", "local") + .build(); + BlockingTask block = new BlockingTask(Priority.IMMEDIATE); + clusterService.submitStateUpdateTask("test", block); + int taskCount = randomIntBetween(5, 20); + Priority[] priorities = Priority.values(); + + // will hold all the tasks in the order in which they were executed + List tasks = new ArrayList<>(taskCount); + CountDownLatch latch = new CountDownLatch(taskCount); + for (int i = 0; i < taskCount; i++) { + Priority priority = priorities[randomIntBetween(0, priorities.length - 1)]; + clusterService.submitStateUpdateTask("test", new PrioritizedTask(priority, latch, tasks)); + } + + block.release(); + latch.await(); + + Priority prevPriority = null; + for (PrioritizedTask task : tasks) { + if (prevPriority == null) { + prevPriority = task.priority(); + } else { + assertThat(task.priority().sameOrAfter(prevPriority), is(true)); + } + } + } + + @TestLogging("cluster:TRACE") // To ensure that we log cluster state events on TRACE level + public void testClusterStateUpdateLogging() throws Exception { + MockLogAppender mockAppender = new MockLogAppender(); + mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test1", "cluster.service", Level.DEBUG, + "*processing [test1]: took [1s] no change in cluster_state")); + mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test2", "cluster.service", Level.TRACE, + "*failed to execute cluster state update in [2s]*")); + mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test3", "cluster.service", Level.DEBUG, + "*processing [test3]: took [3s] done applying updated cluster_state (version: *, uuid: *)")); + + Logger rootLogger = Logger.getRootLogger(); + rootLogger.addAppender(mockAppender); + try { + final CountDownLatch latch = new CountDownLatch(4); + clusterService.currentTimeOverride = System.nanoTime(); + clusterService.submitStateUpdateTask("test1", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + clusterService.currentTimeOverride += TimeValue.timeValueSeconds(1).nanos(); + return currentState; + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + latch.countDown(); + } + + @Override + public void onFailure(String source, Throwable t) { + fail(); + } + }); + clusterService.submitStateUpdateTask("test2", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) { + clusterService.currentTimeOverride += TimeValue.timeValueSeconds(2).nanos(); + throw new IllegalArgumentException("Testing handling of exceptions in the cluster state task"); + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + fail(); + } + + @Override + public void onFailure(String source, Throwable t) { + latch.countDown(); + } + }); + clusterService.submitStateUpdateTask("test3", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) { + clusterService.currentTimeOverride += TimeValue.timeValueSeconds(3).nanos(); + return ClusterState.builder(currentState).incrementVersion().build(); + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + latch.countDown(); + } + + @Override + public void onFailure(String source, Throwable t) { + fail(); + } + }); + // Additional update task to make sure all previous logging made it to the logger + // We don't check logging for this on since there is no guarantee that it will occur before our check + clusterService.submitStateUpdateTask("test4", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) { + return currentState; + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + latch.countDown(); + } + + @Override + public void onFailure(String source, Throwable t) { + fail(); + } + }); + latch.await(); + } finally { + rootLogger.removeAppender(mockAppender); + } + mockAppender.assertAllExpectationsMatched(); + } + + @TestLogging("cluster:WARN") // To ensure that we log cluster state events on WARN level + public void testLongClusterStateUpdateLogging() throws Exception { + MockLogAppender mockAppender = new MockLogAppender(); + mockAppender.addExpectation(new MockLogAppender.UnseenEventExpectation("test1 shouldn't see because setting is too low", + "cluster.service", Level.WARN, "*cluster state update task [test1] took [*] above the warn threshold of *")); + mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test2", "cluster.service", Level.WARN, + "*cluster state update task [test2] took [32s] above the warn threshold of *")); + mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test3", "cluster.service", Level.WARN, + "*cluster state update task [test3] took [33s] above the warn threshold of *")); + mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test4", "cluster.service", Level.WARN, + "*cluster state update task [test4] took [34s] above the warn threshold of *")); + + Logger rootLogger = Logger.getRootLogger(); + rootLogger.addAppender(mockAppender); + try { + final CountDownLatch latch = new CountDownLatch(5); + final CountDownLatch processedFirstTask = new CountDownLatch(1); + clusterService.currentTimeOverride = System.nanoTime(); + clusterService.submitStateUpdateTask("test1", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + clusterService.currentTimeOverride += TimeValue.timeValueSeconds(1).nanos(); + return currentState; + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + latch.countDown(); + processedFirstTask.countDown(); + } + + @Override + public void onFailure(String source, Throwable t) { + fail(); + } + }); + + processedFirstTask.await(); + clusterService.submitStateUpdateTask("test2", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + clusterService.currentTimeOverride += TimeValue.timeValueSeconds(32).nanos(); + throw new IllegalArgumentException("Testing handling of exceptions in the cluster state task"); + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + fail(); + } + + @Override + public void onFailure(String source, Throwable t) { + latch.countDown(); + } + }); + clusterService.submitStateUpdateTask("test3", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + clusterService.currentTimeOverride += TimeValue.timeValueSeconds(33).nanos(); + return ClusterState.builder(currentState).incrementVersion().build(); + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + latch.countDown(); + } + + @Override + public void onFailure(String source, Throwable t) { + fail(); + } + }); + clusterService.submitStateUpdateTask("test4", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + clusterService.currentTimeOverride += TimeValue.timeValueSeconds(34).nanos(); + return currentState; + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + latch.countDown(); + } + + @Override + public void onFailure(String source, Throwable t) { + fail(); + } + }); + // Additional update task to make sure all previous logging made it to the logger + // We don't check logging for this on since there is no guarantee that it will occur before our check + clusterService.submitStateUpdateTask("test5", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) { + return currentState; + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + latch.countDown(); + } + + @Override + public void onFailure(String source, Throwable t) { + fail(); + } + }); + latch.await(); + } finally { + rootLogger.removeAppender(mockAppender); + } + mockAppender.assertAllExpectationsMatched(); + } + + private static class BlockingTask extends ClusterStateUpdateTask { + private final CountDownLatch latch = new CountDownLatch(1); + + public BlockingTask(Priority priority) { + super(priority); + } + + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + latch.await(); + return currentState; + } + + @Override + public void onFailure(String source, Throwable t) { + } + + public void release() { + latch.countDown(); + } + + } + + private static class PrioritizedTask extends ClusterStateUpdateTask { + + private final CountDownLatch latch; + private final List tasks; + + private PrioritizedTask(Priority priority, CountDownLatch latch, List tasks) { + super(priority); + this.latch = latch; + this.tasks = tasks; + } + + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + tasks.add(this); + latch.countDown(); + return currentState; + } + + @Override + public void onFailure(String source, Throwable t) { + latch.countDown(); + } + } + + static class TimedClusterService extends InternalClusterService { + + public volatile Long currentTimeOverride = null; + + public TimedClusterService(Settings settings, OperationRouting operationRouting, ClusterSettings clusterSettings, + ThreadPool threadPool, ClusterName clusterName) { + super(settings, operationRouting, clusterSettings, threadPool, clusterName); + } + + @Override + protected long currentTimeInNanos() { + if (currentTimeOverride != null) { + return currentTimeOverride; + } + return super.currentTimeInNanos(); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/test/MockLogAppender.java b/core/src/test/java/org/elasticsearch/test/MockLogAppender.java index c0866a810819..9e4a881b25b5 100644 --- a/core/src/test/java/org/elasticsearch/test/MockLogAppender.java +++ b/core/src/test/java/org/elasticsearch/test/MockLogAppender.java @@ -80,7 +80,7 @@ public class MockLogAppender extends AppenderSkeleton { protected final String logger; protected final Level level; protected final String message; - protected boolean saw; + volatile boolean saw; public AbstractEventExpectation(String name, String logger, Level level, String message) { this.name = name; diff --git a/core/src/test/java/org/elasticsearch/transport/netty/KeyedLockTests.java b/core/src/test/java/org/elasticsearch/transport/netty/KeyedLockTests.java index 9581dfff42f4..f94513755902 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/KeyedLockTests.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/KeyedLockTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.transport.netty; +import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.util.concurrent.KeyedLock; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; @@ -29,9 +30,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; public class KeyedLockTests extends ESTestCase { @@ -68,28 +67,6 @@ public class KeyedLockTests extends ESTestCase { } } - public void testCannotAcquireTwoLocks() throws InterruptedException { - KeyedLock connectionLock = new KeyedLock(); - String name = randomRealisticUnicodeOfLength(scaledRandomIntBetween(10, 50)); - connectionLock.acquire(name); - try { - connectionLock.acquire(name); - fail("Expected IllegalStateException"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), containsString("Lock already acquired")); - } - } - - public void testCannotReleaseUnacquiredLock() throws InterruptedException { - KeyedLock connectionLock = new KeyedLock(); - String name = randomRealisticUnicodeOfLength(scaledRandomIntBetween(10, 50)); - try { - connectionLock.release(name); - fail("Expected IllegalStateException"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), is("Lock not acquired")); - } - } public static class AcquireAndReleaseThread extends Thread { private CountDownLatch startLatch; @@ -117,16 +94,16 @@ public class KeyedLockTests extends ESTestCase { int numRuns = scaledRandomIntBetween(5000, 50000); for (int i = 0; i < numRuns; i++) { String curName = names[randomInt(names.length - 1)]; - connectionLock.acquire(curName); - try { + assert connectionLock.isHeldByCurrentThread(curName) == false; + try (Releasable ignored = connectionLock.acquire(curName)) { + assert connectionLock.isHeldByCurrentThread(curName); + assert connectionLock.isHeldByCurrentThread(curName + "bla") == false; Integer integer = counter.get(curName); if (integer == null) { counter.put(curName, 1); } else { counter.put(curName, integer.intValue() + 1); } - } finally { - connectionLock.release(curName); } AtomicInteger atomicInteger = new AtomicInteger(0); AtomicInteger value = safeCounter.putIfAbsent(curName, atomicInteger); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java index 9c68ea196aa7..63c09890acc0 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java @@ -23,7 +23,7 @@ import org.apache.lucene.util.IOUtils; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.service.InternalClusterService; +import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -66,14 +66,14 @@ public class TribeUnitTests extends ESTestCase { .put(baseSettings) .put("cluster.name", "tribe1") .put("node.name", "tribe1_node") - .put(InternalClusterService.NODE_ID_SEED_SETTING.getKey(), random().nextLong()) + .put(DiscoveryNodeService.NODE_ID_SEED_SETTING.getKey(), random().nextLong()) .build()).start(); tribe2 = new TribeClientNode( Settings.builder() .put(baseSettings) .put("cluster.name", "tribe2") .put("node.name", "tribe2_node") - .put(InternalClusterService.NODE_ID_SEED_SETTING.getKey(), random().nextLong()) + .put(DiscoveryNodeService.NODE_ID_SEED_SETTING.getKey(), random().nextLong()) .build()).start(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 84d887338024..c2cb12644b0a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -67,6 +67,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Random; @@ -282,10 +283,10 @@ public abstract class ESTestCase extends LuceneTestCase { * Returns a double value in the interval [start, end) if lowerInclusive is * set to true, (start, end) otherwise. * - * @param start lower bound of interval to draw uniformly distributed random numbers from - * @param end upper bound + * @param start lower bound of interval to draw uniformly distributed random numbers from + * @param end upper bound * @param lowerInclusive whether or not to include lower end of the interval - * */ + */ public static double randomDoubleBetween(double start, double end, boolean lowerInclusive) { double result = 0.0; @@ -555,12 +556,27 @@ public abstract class ESTestCase extends LuceneTestCase { * Returns size random values */ public static List randomSubsetOf(int size, T... values) { - if (size > values.length) { - throw new IllegalArgumentException("Can\'t pick " + size + " random objects from a list of " + values.length + " objects"); - } List list = arrayAsArrayList(values); - Collections.shuffle(list, random()); - return list.subList(0, size); + return randomSubsetOf(size, list); + } + + /** + * Returns a random subset of values (including a potential empty list) + */ + public static List randomSubsetOf(Collection collection) { + return randomSubsetOf(randomInt(collection.size() - 1), collection); + } + + /** + * Returns size random values + */ + public static List randomSubsetOf(int size, Collection collection) { + if (size > collection.size()) { + throw new IllegalArgumentException("Can\'t pick " + size + " random objects from a collection of " + collection.size() + " objects"); + } + List tempList = new ArrayList<>(collection); + Collections.shuffle(tempList, random()); + return tempList.subList(0, size); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 82c7db11d69c..43483f171175 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -39,13 +39,13 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; -import org.elasticsearch.cluster.service.InternalClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreaker; @@ -591,7 +591,7 @@ public final class InternalTestCluster extends TestCluster { .put(Environment.PATH_HOME_SETTING.getKey(), baseDir) // allow overriding path.home .put(settings) .put("node.name", name) - .put(InternalClusterService.NODE_ID_SEED_SETTING.getKey(), seed) + .put(DiscoveryNodeService.NODE_ID_SEED_SETTING.getKey(), seed) .build(); MockNode node = new MockNode(finalSettings, version, plugins); return new NodeAndClient(name, node); @@ -838,8 +838,8 @@ public final class InternalTestCluster extends TestCluster { IOUtils.rm(nodeEnv.nodeDataPaths()); } } - final long newIdSeed = InternalClusterService.NODE_ID_SEED_SETTING.get(node.settings()) + 1; // use a new seed to make sure we have new node id - Settings finalSettings = Settings.builder().put(node.settings()).put(newSettings).put(InternalClusterService.NODE_ID_SEED_SETTING.getKey(), newIdSeed).build(); + final long newIdSeed = DiscoveryNodeService.NODE_ID_SEED_SETTING.get(node.settings()) + 1; // use a new seed to make sure we have new node id + Settings finalSettings = Settings.builder().put(node.settings()).put(newSettings).put(DiscoveryNodeService.NODE_ID_SEED_SETTING.getKey(), newIdSeed).build(); Collection> plugins = node.getPlugins(); Version version = node.getVersion(); node = new MockNode(finalSettings, version, plugins); diff --git a/test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java b/test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java index 99ba809c1441..ad73a097c1e0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java @@ -38,7 +38,6 @@ import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.tasks.TaskManager; import java.util.List; @@ -153,11 +152,6 @@ public class NoopClusterService implements ClusterService { return TimeValue.timeValueMillis(0); } - @Override - public TaskManager getTaskManager() { - return null; - } - @Override public Lifecycle.State lifecycleState() { return null; diff --git a/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java b/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java index 3b1082cae448..ebae5cc99474 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java @@ -47,9 +47,7 @@ import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.FutureUtils; -import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; import java.util.Arrays; import java.util.Iterator; @@ -62,7 +60,6 @@ import java.util.concurrent.ScheduledFuture; public class TestClusterService implements ClusterService { volatile ClusterState state; - private volatile TaskManager taskManager; private final List listeners = new CopyOnWriteArrayList<>(); private final Queue onGoingTimeouts = ConcurrentCollections.newQueue(); private final ThreadPool threadPool; @@ -75,12 +72,6 @@ public class TestClusterService implements ClusterService { public TestClusterService(ThreadPool threadPool) { this(ClusterState.builder(new ClusterName("test")).build(), threadPool); - taskManager = new TaskManager(Settings.EMPTY); - } - - public TestClusterService(ThreadPool threadPool, TransportService transportService) { - this(ClusterState.builder(new ClusterName("test")).build(), threadPool); - taskManager = transportService.getTaskManager(); } public TestClusterService(ClusterState state) { @@ -243,11 +234,6 @@ public class TestClusterService implements ClusterService { throw new UnsupportedOperationException(); } - @Override - public TaskManager getTaskManager() { - return taskManager; - } - @Override public List pendingTasks() { throw new UnsupportedOperationException(); From f9622f9acc86bee251b8b083aa34bff8384b9fbd Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Thu, 10 Mar 2016 12:33:14 +0100 Subject: [PATCH 146/320] Docs: Added a note about the update API not supporting external versioning Closes #12820 --- docs/reference/docs/update.asciidoc | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index 634bc23d6ac3..35dbccf7aa2a 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -251,5 +251,15 @@ sure the document doesn't change during the update. You can use the `version` parameter to specify that the document should only be updated if its version matches the one specified. By setting version type to `force` you can force the new version of the document after update (use with care! with `force` -there is no guarantee the document didn't change).Version types `external` & -`external_gte` are not supported. +there is no guarantee the document didn't change). + +[NOTE] +.The update API does not support external versioning +===================================================== + +External versioning (version types `external` & `external_gte`) is not +supported by the update API as it would result in Elasticsearch version +numbers being out of sync with the external system. Use the +<> instead. + +===================================================== From a8c7ae78094780bacc53e17763620ed7c0a8ab11 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Thu, 10 Mar 2016 13:08:29 +0100 Subject: [PATCH 147/320] Fixed bad docs link --- docs/reference/docs/update.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index 35dbccf7aa2a..316714259e05 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -260,6 +260,6 @@ there is no guarantee the document didn't change). External versioning (version types `external` & `external_gte`) is not supported by the update API as it would result in Elasticsearch version numbers being out of sync with the external system. Use the -<> instead. +<> instead. ===================================================== From 9f923255878b7baefd89bc37af8fe3072f163322 Mon Sep 17 00:00:00 2001 From: jaymode Date: Thu, 10 Mar 2016 07:16:37 -0500 Subject: [PATCH 148/320] Allow additional settings for the node in ESSingleNodeTestCase This change adds a method that extending classes can override to provide additional settings for the node used in a single node test case. --- .../java/org/elasticsearch/test/ESSingleNodeTestCase.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 6e16d60eafc0..57dfc1068458 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -32,7 +32,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; @@ -160,6 +159,11 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { return Arrays.asList(plugins); } + /** Additional settings to add when creating the node. Also allows overriding the default settings. */ + protected Settings nodeSettings() { + return Settings.EMPTY; + } + private Node newNode() { Settings settings = Settings.builder() .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), InternalTestCluster.clusterName("single-node-cluster", randomLong())) @@ -177,6 +181,7 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { .put(Node.NODE_LOCAL_SETTING.getKey(), true) .put(Node.NODE_DATA_SETTING.getKey(), true) .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // make sure we get what we set :) + .put(nodeSettings()) // allow test cases to provide their own settings or override these .build(); Node build = new MockNode(settings, getVersion(), getPlugins()); build.start(); From 2fa33d5c47d292fdcd15c7acbefe2a579c3e9a38 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 2 Mar 2016 17:57:45 +0100 Subject: [PATCH 149/320] Added ingest statistics to node stats API The ingest stats include the following statistics: * `ingest.total.count`- The total number of document ingested during the lifetime of this node * `ingest.total.time_in_millis` - The total time spent on ingest preprocessing documents during the lifetime of this node * `ingest.total.current` - The total number of documents currently being ingested. * `ingest.total.failed` - The total number ingest preprocessing operations failed during the lifetime of this node Also these stats are returned on a per pipeline basis. --- .../admin/cluster/node/stats/NodeStats.java | 20 +- .../cluster/node/stats/NodesStatsRequest.java | 16 ++ .../node/stats/NodesStatsRequestBuilder.java | 8 + .../node/stats/TransportNodesStatsAction.java | 3 +- .../stats/TransportClusterStatsAction.java | 2 +- .../action/ingest/IngestActionFilter.java | 2 +- .../common/io/stream/StreamInput.java | 9 + .../common/io/stream/StreamOutput.java | 9 + .../org/elasticsearch/ingest/IngestStats.java | 171 ++++++++++++++++++ .../ingest/PipelineExecutionService.java | 141 ++++++++++++--- .../node/service/NodeService.java | 9 +- .../node/stats/RestNodesStatsAction.java | 3 +- .../elasticsearch/cluster/DiskUsageTests.java | 12 +- .../ingest/PipelineExecutionServiceTests.java | 41 ++++- docs/reference/cluster/nodes-stats.asciidoc | 23 +++ docs/reference/ingest/ingest-node.asciidoc | 3 + .../rest-api-spec/test/ingest/70_bulk.yaml | 34 ++++ .../MockInternalClusterInfoService.java | 2 +- .../test/InternalTestCluster.java | 2 +- 19 files changed, 469 insertions(+), 41 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/ingest/IngestStats.java diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java index a4cf2b1de2ad..c1d4bb78ba3c 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java @@ -31,6 +31,7 @@ import org.elasticsearch.discovery.DiscoveryStats; import org.elasticsearch.http.HttpStats; import org.elasticsearch.indices.NodeIndicesStats; import org.elasticsearch.indices.breaker.AllCircuitBreakerStats; +import org.elasticsearch.ingest.IngestStats; import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.monitor.jvm.JvmStats; import org.elasticsearch.monitor.os.OsStats; @@ -81,6 +82,9 @@ public class NodeStats extends BaseNodeResponse implements ToXContent { @Nullable private DiscoveryStats discoveryStats; + @Nullable + private IngestStats ingestStats; + NodeStats() { } @@ -89,7 +93,8 @@ public class NodeStats extends BaseNodeResponse implements ToXContent { @Nullable FsInfo fs, @Nullable TransportStats transport, @Nullable HttpStats http, @Nullable AllCircuitBreakerStats breaker, @Nullable ScriptStats scriptStats, - @Nullable DiscoveryStats discoveryStats) { + @Nullable DiscoveryStats discoveryStats, + @Nullable IngestStats ingestStats) { super(node); this.timestamp = timestamp; this.indices = indices; @@ -103,6 +108,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContent { this.breaker = breaker; this.scriptStats = scriptStats; this.discoveryStats = discoveryStats; + this.ingestStats = ingestStats; } public long getTimestamp() { @@ -187,6 +193,11 @@ public class NodeStats extends BaseNodeResponse implements ToXContent { return this.discoveryStats; } + @Nullable + public IngestStats getIngestStats() { + return ingestStats; + } + public static NodeStats readNodeStats(StreamInput in) throws IOException { NodeStats nodeInfo = new NodeStats(); nodeInfo.readFrom(in); @@ -224,7 +235,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContent { breaker = AllCircuitBreakerStats.readOptionalAllCircuitBreakerStats(in); scriptStats = in.readOptionalStreamable(ScriptStats::new); discoveryStats = in.readOptionalStreamable(() -> new DiscoveryStats(null)); - + ingestStats = in.readOptionalWritable(IngestStats.PROTO); } @Override @@ -282,6 +293,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContent { out.writeOptionalStreamable(breaker); out.writeOptionalStreamable(scriptStats); out.writeOptionalStreamable(discoveryStats); + out.writeOptionalWriteable(ingestStats); } @Override @@ -337,6 +349,10 @@ public class NodeStats extends BaseNodeResponse implements ToXContent { getDiscoveryStats().toXContent(builder, params); } + if (getIngestStats() != null) { + getIngestStats().toXContent(builder, params); + } + return builder; } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequest.java index 5916421c1edc..88162a617a8f 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequest.java @@ -42,6 +42,7 @@ public class NodesStatsRequest extends BaseNodesRequest { private boolean breaker; private boolean script; private boolean discovery; + private boolean ingest; public NodesStatsRequest() { } @@ -69,6 +70,7 @@ public class NodesStatsRequest extends BaseNodesRequest { this.breaker = true; this.script = true; this.discovery = true; + this.ingest = true; return this; } @@ -87,6 +89,7 @@ public class NodesStatsRequest extends BaseNodesRequest { this.breaker = false; this.script = false; this.discovery = false; + this.ingest = false; return this; } @@ -250,6 +253,17 @@ public class NodesStatsRequest extends BaseNodesRequest { return this; } + public boolean ingest() { + return ingest; + } + + /** + * Should ingest statistics be returned. + */ + public NodesStatsRequest ingest(boolean ingest) { + this.ingest = ingest; + return this; + } @Override public void readFrom(StreamInput in) throws IOException { @@ -265,6 +279,7 @@ public class NodesStatsRequest extends BaseNodesRequest { breaker = in.readBoolean(); script = in.readBoolean(); discovery = in.readBoolean(); + ingest = in.readBoolean(); } @Override @@ -281,6 +296,7 @@ public class NodesStatsRequest extends BaseNodesRequest { out.writeBoolean(breaker); out.writeBoolean(script); out.writeBoolean(discovery); + out.writeBoolean(ingest); } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestBuilder.java index dc35eefee7d0..027e61226819 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestBuilder.java @@ -137,4 +137,12 @@ public class NodesStatsRequestBuilder extends NodesOperationRequestBuilder shardsStats = new ArrayList<>(); for (IndexService indexService : indicesService) { for (IndexShard indexShard : indexService) { diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java index 75555ccff7c1..1eb9337c814b 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java @@ -112,7 +112,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio logger.error("failed to execute pipeline for a bulk request", throwable); listener.onFailure(throwable); } else { - long ingestTookInMillis = TimeUnit.MILLISECONDS.convert(System.nanoTime() - ingestStartTimeInNanos, TimeUnit.NANOSECONDS); + long ingestTookInMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - ingestStartTimeInNanos); BulkRequest bulkRequest = bulkRequestModifier.getBulkRequest(); ActionListener actionListener = bulkRequestModifier.wrapActionListenerIfNeeded(ingestTookInMillis, listener); if (bulkRequest.requests().isEmpty()) { diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index 8eda42ae9bea..e84766d021bb 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; +import org.elasticsearch.ingest.IngestStats; import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.tasks.Task; import org.elasticsearch.search.aggregations.AggregatorBuilder; @@ -552,6 +553,14 @@ public abstract class StreamInput extends InputStream { } } + public T readOptionalWritable(T prototype) throws IOException { + if (readBoolean()) { + return (T) prototype.readFrom(this); + } else { + return null; + } + } + public T readThrowable() throws IOException { if (readBoolean()) { int key = readVInt(); diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index 864da006bf0f..0251e1e74be9 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -520,6 +520,15 @@ public abstract class StreamOutput extends OutputStream { } } + public void writeOptionalWriteable(@Nullable Writeable writeable) throws IOException { + if (writeable != null) { + writeBoolean(true); + writeable.writeTo(this); + } else { + writeBoolean(false); + } + } + public void writeThrowable(Throwable throwable) throws IOException { if (throwable == null) { writeBoolean(false); diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestStats.java b/core/src/main/java/org/elasticsearch/ingest/IngestStats.java new file mode 100644 index 000000000000..a59ddce4fec7 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/ingest/IngestStats.java @@ -0,0 +1,171 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +public class IngestStats implements Writeable, ToXContent { + + public final static IngestStats PROTO = new IngestStats(null, null); + + private final Stats totalStats; + private final Map statsPerPipeline; + + public IngestStats(Stats totalStats, Map statsPerPipeline) { + this.totalStats = totalStats; + this.statsPerPipeline = statsPerPipeline; + } + + /** + * @return The accumulated stats for all pipelines + */ + public Stats getTotalStats() { + return totalStats; + } + + /** + * @return The stats on a per pipeline basis + */ + public Map getStatsPerPipeline() { + return statsPerPipeline; + } + + @Override + public IngestStats readFrom(StreamInput in) throws IOException { + Stats totalStats = Stats.PROTO.readFrom(in); + totalStats.readFrom(in); + int size = in.readVInt(); + Map statsPerPipeline = new HashMap<>(size); + for (int i = 0; i < size; i++) { + Stats stats = Stats.PROTO.readFrom(in); + statsPerPipeline.put(in.readString(), stats); + stats.readFrom(in); + } + return new IngestStats(totalStats, statsPerPipeline); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + totalStats.writeTo(out); + out.writeVLong(statsPerPipeline.size()); + for (Map.Entry entry : statsPerPipeline.entrySet()) { + out.writeString(entry.getKey()); + entry.getValue().writeTo(out); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject("ingest"); + builder.startObject("total"); + totalStats.toXContent(builder, params); + builder.endObject(); + builder.startObject("pipelines"); + for (Map.Entry entry : statsPerPipeline.entrySet()) { + builder.startObject(entry.getKey()); + entry.getValue().toXContent(builder, params); + builder.endObject(); + } + builder.endObject(); + builder.endObject(); + return builder; + } + + public static class Stats implements Writeable, ToXContent { + + private final static Stats PROTO = new Stats(0, 0, 0, 0); + + private final long ingestCount; + private final long ingestTimeInMillis; + private final long ingestCurrent; + private final long ingestFailedCount; + + public Stats(long ingestCount, long ingestTimeInMillis, long ingestCurrent, long ingestFailedCount) { + this.ingestCount = ingestCount; + this.ingestTimeInMillis = ingestTimeInMillis; + this.ingestCurrent = ingestCurrent; + this.ingestFailedCount = ingestFailedCount; + } + + /** + * @return The total number of executed ingest preprocessing operations. + */ + public long getIngestCount() { + return ingestCount; + } + + /** + * + * @return The total time spent of ingest preprocessing in millis. + */ + public long getIngestTimeInMillis() { + return ingestTimeInMillis; + } + + /** + * @return The total number of ingest preprocessing operations currently executing. + */ + public long getIngestCurrent() { + return ingestCurrent; + } + + /** + * @return The total number of ingest preprocessing operations that have failed. + */ + public long getIngestFailedCount() { + return ingestFailedCount; + } + + @Override + public Stats readFrom(StreamInput in) throws IOException { + long ingestCount = in.readVLong(); + long ingestTimeInMillis = in.readVLong(); + long ingestCurrent = in.readVLong(); + long ingestFailedCount = in.readVLong(); + return new Stats(ingestCount, ingestTimeInMillis, ingestCurrent, ingestFailedCount); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVLong(ingestCount); + out.writeVLong(ingestTimeInMillis); + out.writeVLong(ingestCurrent); + out.writeVLong(ingestFailedCount); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("count", ingestCount); + builder.timeValueField("time_in_millis", "time", ingestTimeInMillis, TimeUnit.MILLISECONDS); + builder.field("current", ingestCurrent); + builder.field("failed", ingestFailedCount); + return builder; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java index 3f0de550782c..94c79db30a08 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java @@ -19,23 +19,36 @@ package org.elasticsearch.ingest; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.metrics.CounterMetric; +import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.threadpool.ThreadPool; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import java.util.function.Consumer; -public class PipelineExecutionService { +public class PipelineExecutionService implements ClusterStateListener { private final PipelineStore store; private final ThreadPool threadPool; + private final StatsHolder totalStats = new StatsHolder(); + private volatile Map statsHolderPerPipeline = Collections.emptyMap(); + public PipelineExecutionService(PipelineStore store, ThreadPool threadPool) { this.store = store; this.threadPool = threadPool; @@ -89,29 +102,85 @@ public class PipelineExecutionService { }); } - private void innerExecute(IndexRequest indexRequest, Pipeline pipeline) throws Exception { - String index = indexRequest.index(); - String type = indexRequest.type(); - String id = indexRequest.id(); - String routing = indexRequest.routing(); - String parent = indexRequest.parent(); - String timestamp = indexRequest.timestamp(); - String ttl = indexRequest.ttl() == null ? null : indexRequest.ttl().toString(); - Map sourceAsMap = indexRequest.sourceAsMap(); - IngestDocument ingestDocument = new IngestDocument(index, type, id, routing, parent, timestamp, ttl, sourceAsMap); - pipeline.execute(ingestDocument); + public IngestStats stats() { + Map statsHolderPerPipeline = this.statsHolderPerPipeline; - Map metadataMap = ingestDocument.extractMetadata(); - //it's fine to set all metadata fields all the time, as ingest document holds their starting values - //before ingestion, which might also get modified during ingestion. - indexRequest.index(metadataMap.get(IngestDocument.MetaData.INDEX)); - indexRequest.type(metadataMap.get(IngestDocument.MetaData.TYPE)); - indexRequest.id(metadataMap.get(IngestDocument.MetaData.ID)); - indexRequest.routing(metadataMap.get(IngestDocument.MetaData.ROUTING)); - indexRequest.parent(metadataMap.get(IngestDocument.MetaData.PARENT)); - indexRequest.timestamp(metadataMap.get(IngestDocument.MetaData.TIMESTAMP)); - indexRequest.ttl(metadataMap.get(IngestDocument.MetaData.TTL)); - indexRequest.source(ingestDocument.getSourceAndMetadata()); + Map statsPerPipeline = new HashMap<>(statsHolderPerPipeline.size()); + for (Map.Entry entry : statsHolderPerPipeline.entrySet()) { + statsPerPipeline.put(entry.getKey(), entry.getValue().createStats()); + } + + return new IngestStats(totalStats.createStats(), statsPerPipeline); + } + + @Override + public void clusterChanged(ClusterChangedEvent event) { + IngestMetadata ingestMetadata = event.state().getMetaData().custom(IngestMetadata.TYPE); + if (ingestMetadata != null) { + updatePipelineStats(ingestMetadata); + } + } + + void updatePipelineStats(IngestMetadata ingestMetadata) { + boolean changed = false; + Map newStatsPerPipeline = new HashMap<>(statsHolderPerPipeline); + for (String pipeline : newStatsPerPipeline.keySet()) { + if (ingestMetadata.getPipelines().containsKey(pipeline) == false) { + newStatsPerPipeline.remove(pipeline); + changed = true; + } + } + for (String pipeline : ingestMetadata.getPipelines().keySet()) { + if (newStatsPerPipeline.containsKey(pipeline) == false) { + newStatsPerPipeline.put(pipeline, new StatsHolder()); + changed = true; + } + } + + if (changed) { + statsHolderPerPipeline = Collections.unmodifiableMap(newStatsPerPipeline); + } + } + + private void innerExecute(IndexRequest indexRequest, Pipeline pipeline) throws Exception { + long startTimeInNanos = System.nanoTime(); + // the pipeline specific stat holder may not exist and that is fine: + // (e.g. the pipeline may have been removed while we're ingesting a document + Optional pipelineStats = Optional.ofNullable(statsHolderPerPipeline.get(pipeline.getId())); + try { + totalStats.preIngest(); + pipelineStats.ifPresent(StatsHolder::preIngest); + String index = indexRequest.index(); + String type = indexRequest.type(); + String id = indexRequest.id(); + String routing = indexRequest.routing(); + String parent = indexRequest.parent(); + String timestamp = indexRequest.timestamp(); + String ttl = indexRequest.ttl() == null ? null : indexRequest.ttl().toString(); + Map sourceAsMap = indexRequest.sourceAsMap(); + IngestDocument ingestDocument = new IngestDocument(index, type, id, routing, parent, timestamp, ttl, sourceAsMap); + pipeline.execute(ingestDocument); + + Map metadataMap = ingestDocument.extractMetadata(); + //it's fine to set all metadata fields all the time, as ingest document holds their starting values + //before ingestion, which might also get modified during ingestion. + indexRequest.index(metadataMap.get(IngestDocument.MetaData.INDEX)); + indexRequest.type(metadataMap.get(IngestDocument.MetaData.TYPE)); + indexRequest.id(metadataMap.get(IngestDocument.MetaData.ID)); + indexRequest.routing(metadataMap.get(IngestDocument.MetaData.ROUTING)); + indexRequest.parent(metadataMap.get(IngestDocument.MetaData.PARENT)); + indexRequest.timestamp(metadataMap.get(IngestDocument.MetaData.TIMESTAMP)); + indexRequest.ttl(metadataMap.get(IngestDocument.MetaData.TTL)); + indexRequest.source(ingestDocument.getSourceAndMetadata()); + } catch (Exception e) { + totalStats.ingestFailed(); + pipelineStats.ifPresent(StatsHolder::ingestFailed); + throw e; + } finally { + long ingestTimeInMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTimeInNanos); + totalStats.postIngest(ingestTimeInMillis); + pipelineStats.ifPresent(statsHolder -> statsHolder.postIngest(ingestTimeInMillis)); + } } private Pipeline getPipeline(String pipelineId) { @@ -121,4 +190,30 @@ public class PipelineExecutionService { } return pipeline; } + + static class StatsHolder { + + private final MeanMetric ingestMetric = new MeanMetric(); + private final CounterMetric ingestCurrent = new CounterMetric(); + private final CounterMetric ingestFailed = new CounterMetric(); + + void preIngest() { + ingestCurrent.inc(); + } + + void postIngest(long ingestTimeInMillis) { + ingestCurrent.dec(); + ingestMetric.inc(ingestTimeInMillis); + } + + void ingestFailed() { + ingestFailed.inc(); + } + + IngestStats.Stats createStats() { + return new IngestStats.Stats(ingestMetric.count(), ingestMetric.sum(), ingestCurrent.count(), ingestFailed.count()); + } + + } + } diff --git a/core/src/main/java/org/elasticsearch/node/service/NodeService.java b/core/src/main/java/org/elasticsearch/node/service/NodeService.java index 88b2fe488681..7096b7cc56cb 100644 --- a/core/src/main/java/org/elasticsearch/node/service/NodeService.java +++ b/core/src/main/java/org/elasticsearch/node/service/NodeService.java @@ -90,6 +90,7 @@ public class NodeService extends AbstractComponent implements Closeable { this.ingestService = new IngestService(settings, threadPool, processorsRegistryBuilder); this.settingsFilter = settingsFilter; clusterService.add(ingestService.getPipelineStore()); + clusterService.add(ingestService.getPipelineExecutionService()); } // can not use constructor injection or there will be a circular dependency @@ -165,13 +166,14 @@ public class NodeService extends AbstractComponent implements Closeable { httpServer == null ? null : httpServer.stats(), circuitBreakerService.stats(), scriptService.stats(), - discovery.stats() + discovery.stats(), + ingestService.getPipelineExecutionService().stats() ); } public NodeStats stats(CommonStatsFlags indices, boolean os, boolean process, boolean jvm, boolean threadPool, boolean fs, boolean transport, boolean http, boolean circuitBreaker, - boolean script, boolean discoveryStats) { + boolean script, boolean discoveryStats, boolean ingest) { // for indices stats we want to include previous allocated shards stats as well (it will // only be applied to the sensible ones to use, like refresh/merge/flush/indexing stats) return new NodeStats(discovery.localNode(), System.currentTimeMillis(), @@ -185,7 +187,8 @@ public class NodeService extends AbstractComponent implements Closeable { http ? (httpServer == null ? null : httpServer.stats()) : null, circuitBreaker ? circuitBreakerService.stats() : null, script ? scriptService.stats() : null, - discoveryStats ? discovery.stats() : null + discoveryStats ? discovery.stats() : null, + ingest ? ingestService.getPipelineExecutionService().stats() : null ); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java index fb8e9c637406..1e2aece1646e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java @@ -81,6 +81,7 @@ public class RestNodesStatsAction extends BaseRestHandler { nodesStatsRequest.breaker(metrics.contains("breaker")); nodesStatsRequest.script(metrics.contains("script")); nodesStatsRequest.discovery(metrics.contains("discovery")); + nodesStatsRequest.ingest(metrics.contains("ingest")); // check for index specific metrics if (metrics.contains("indices")) { @@ -113,6 +114,6 @@ public class RestNodesStatsAction extends BaseRestHandler { nodesStatsRequest.indices().includeSegmentFileSizes(true); } - client.admin().cluster().nodesStats(nodesStatsRequest, new RestToXContentListener(channel)); + client.admin().cluster().nodesStats(nodesStatsRequest, new RestToXContentListener<>(channel)); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java index a74102f6969a..c18a36e01429 100644 --- a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java @@ -143,11 +143,11 @@ public class DiskUsageTests extends ESTestCase { }; NodeStats[] nodeStats = new NodeStats[] { new NodeStats(new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Version.CURRENT), 0, - null,null,null,null,null,new FsInfo(0, node1FSInfo), null,null,null,null,null), + null,null,null,null,null,new FsInfo(0, node1FSInfo), null,null,null,null,null, null), new NodeStats(new DiscoveryNode("node_2", DummyTransportAddress.INSTANCE, Version.CURRENT), 0, - null,null,null,null,null, new FsInfo(0, node2FSInfo), null,null,null,null,null), + null,null,null,null,null, new FsInfo(0, node2FSInfo), null,null,null,null,null, null), new NodeStats(new DiscoveryNode("node_3", DummyTransportAddress.INSTANCE, Version.CURRENT), 0, - null,null,null,null,null, new FsInfo(0, node3FSInfo), null,null,null,null,null) + null,null,null,null,null, new FsInfo(0, node3FSInfo), null,null,null,null,null, null) }; InternalClusterInfoService.fillDiskUsagePerNode(logger, nodeStats, newLeastAvaiableUsages, newMostAvaiableUsages); DiskUsage leastNode_1 = newLeastAvaiableUsages.get("node_1"); @@ -184,11 +184,11 @@ public class DiskUsageTests extends ESTestCase { }; NodeStats[] nodeStats = new NodeStats[] { new NodeStats(new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Version.CURRENT), 0, - null,null,null,null,null,new FsInfo(0, node1FSInfo), null,null,null,null,null), + null,null,null,null,null,new FsInfo(0, node1FSInfo), null,null,null,null,null, null), new NodeStats(new DiscoveryNode("node_2", DummyTransportAddress.INSTANCE, Version.CURRENT), 0, - null,null,null,null,null, new FsInfo(0, node2FSInfo), null,null,null,null,null), + null,null,null,null,null, new FsInfo(0, node2FSInfo), null,null,null,null,null, null), new NodeStats(new DiscoveryNode("node_3", DummyTransportAddress.INSTANCE, Version.CURRENT), 0, - null,null,null,null,null, new FsInfo(0, node3FSInfo), null,null,null,null,null) + null,null,null,null,null, new FsInfo(0, node3FSInfo), null,null,null,null,null, null) }; InternalClusterInfoService.fillDiskUsagePerNode(logger, nodeStats, newLeastAvailableUsages, newMostAvailableUsages); DiskUsage leastNode_1 = newLeastAvailableUsages.get("node_1"); diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java index e644df2a83a5..b84ba928be44 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.ingest.core.CompoundProcessor; import org.elasticsearch.ingest.core.IngestDocument; @@ -38,15 +39,16 @@ import org.mockito.ArgumentMatcher; import org.mockito.invocation.InvocationOnMock; import java.util.Collections; +import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.function.BiConsumer; import java.util.function.Consumer; import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.eq; import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.argThat; import static org.mockito.Mockito.doAnswer; @@ -341,6 +343,43 @@ public class PipelineExecutionServiceTests extends ESTestCase { verify(completionHandler, times(1)).accept(null); } + public void testStats() throws Exception { + IngestStats ingestStats = executionService.stats(); + assertThat(ingestStats.getStatsPerPipeline().size(), equalTo(0)); + assertThat(ingestStats.getTotalStats().getIngestCount(), equalTo(0L)); + assertThat(ingestStats.getTotalStats().getIngestCurrent(), equalTo(0L)); + assertThat(ingestStats.getTotalStats().getIngestFailedCount(), equalTo(0L)); + assertThat(ingestStats.getTotalStats().getIngestTimeInMillis(), equalTo(0L)); + + when(store.get("_id1")).thenReturn(new Pipeline("_id1", null, new CompoundProcessor())); + when(store.get("_id2")).thenReturn(new Pipeline("_id2", null, new CompoundProcessor())); + + Map configurationMap = new HashMap<>(); + configurationMap.put("_id1", new PipelineConfiguration("_id1", new BytesArray("{}"))); + configurationMap.put("_id2", new PipelineConfiguration("_id2", new BytesArray("{}"))); + executionService.updatePipelineStats(new IngestMetadata(configurationMap)); + + Consumer failureHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); + + IndexRequest indexRequest = new IndexRequest("_index"); + indexRequest.setPipeline("_id1"); + executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); + ingestStats = executionService.stats(); + assertThat(ingestStats.getStatsPerPipeline().size(), equalTo(2)); + assertThat(ingestStats.getStatsPerPipeline().get("_id1").getIngestCount(), equalTo(1L)); + assertThat(ingestStats.getStatsPerPipeline().get("_id2").getIngestCount(), equalTo(0L)); + assertThat(ingestStats.getTotalStats().getIngestCount(), equalTo(1L)); + + indexRequest.setPipeline("_id2"); + executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); + ingestStats = executionService.stats(); + assertThat(ingestStats.getStatsPerPipeline().size(), equalTo(2)); + assertThat(ingestStats.getStatsPerPipeline().get("_id1").getIngestCount(), equalTo(1L)); + assertThat(ingestStats.getStatsPerPipeline().get("_id2").getIngestCount(), equalTo(1L)); + assertThat(ingestStats.getTotalStats().getIngestCount(), equalTo(2L)); + } + private IngestDocument eqID(String index, String type, String id, Map source) { return argThat(new IngestDocumentMatcher(index, type, id, source)); } diff --git a/docs/reference/cluster/nodes-stats.asciidoc b/docs/reference/cluster/nodes-stats.asciidoc index bcef61d4ef71..8a2f27112b54 100644 --- a/docs/reference/cluster/nodes-stats.asciidoc +++ b/docs/reference/cluster/nodes-stats.asciidoc @@ -60,6 +60,9 @@ of `indices`, `os`, `process`, `jvm`, `transport`, `http`, `discovery`:: Statistics about the discovery +`ingest`:: + Statistics about ingest preprocessing + [source,js] -------------------------------------------------- # return indices and os @@ -227,3 +230,23 @@ curl -XGET 'http://localhost:9200/_nodes/stats?pretty&groups=_all' # Some groups from just the indices stats curl -XGET 'http://localhost:9200/_nodes/stats/indices?pretty&groups=foo,bar' -------------------------------------------------- + +[float] +[[ingest-stats]] +=== Ingest statistics + +The `ingest` flag can be set to retrieve statistics that concern ingest: + +`ingest.total.count`:: + The total number of document ingested during the lifetime of this node + +`ingest.total.time_in_millis`:: + The total time spent on ingest preprocessing documents during the lifetime of this node + +`ingest.total.current`:: + The total number of documents currently being ingested. + +`ingest.total.failed`:: + The total number ingest preprocessing operations failed during the lifetime of this node + +On top of these overall ingest statistics, these statistics are also provided on a per pipeline basis. \ No newline at end of file diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 0827baa6ea14..b314495b34ae 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -634,6 +634,9 @@ plugin.mandatory: ingest-attachment,ingest-geoip A node will not start if either of these plugins are not available. +The <> can be used to fetch ingest usage statistics, globally and on a per +pipeline basis. Useful to find out which pipelines are used the most or spent the most time on preprocessing. + [[append-procesesor]] === Append Processor Appends one or more values to an existing array if the field already exists and it is an array. diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/70_bulk.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/70_bulk.yaml index b363f018667d..3177c678206f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/70_bulk.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/70_bulk.yaml @@ -68,6 +68,23 @@ setup: - is_false: _source.field1 - is_false: _source.field2 + - do: + cluster.state: {} + # Get master node id + - set: { master_node: master } + + - do: + nodes.stats: + metric: [ ingest ] + - gte: {nodes.$master.ingest.total.count: 1} + - gte: {nodes.$master.ingest.total.failed: 0} + - gte: {nodes.$master.ingest.total.time_in_millis: 0} + - match: {nodes.$master.ingest.total.current: 0} + - match: {nodes.$master.ingest.pipelines.pipeline1.count: 1} + - match: {nodes.$master.ingest.pipelines.pipeline1.failed: 0} + - gte: {nodes.$master.ingest.pipelines.pipeline1.time_in_millis: 0} + - match: {nodes.$master.ingest.pipelines.pipeline1.current: 0} + --- "Test bulk request with default pipeline": @@ -88,6 +105,23 @@ setup: - f1: v2 - gte: { ingest_took: 0 } + - do: + cluster.state: {} + # Get master node id + - set: { master_node: master } + + - do: + nodes.stats: + metric: [ ingest ] + - gte: {nodes.$master.ingest.total.count: 1} + - gte: {nodes.$master.ingest.total.failed: 0} + - gte: {nodes.$master.ingest.total.time_in_millis: 0} + - match: {nodes.$master.ingest.total.current: 0} + - match: {nodes.$master.ingest.pipelines.pipeline2.count: 1} + - match: {nodes.$master.ingest.pipelines.pipeline2.failed: 0} + - gte: {nodes.$master.ingest.pipelines.pipeline2.time_in_millis: 0} + - match: {nodes.$master.ingest.pipelines.pipeline2.current: 0} + - do: get: index: test_index diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java index 3e9b0c09cb22..c52c2d8cbf59 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java @@ -74,7 +74,7 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService { null, null, null, null, null, fsInfo, null, null, null, - null, null); + null, null, null); } @Inject diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 43483f171175..f8dc889a6b6d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -1850,7 +1850,7 @@ public final class InternalTestCluster extends TestCluster { } NodeService nodeService = getInstanceFromNode(NodeService.class, nodeAndClient.node); - NodeStats stats = nodeService.stats(CommonStatsFlags.ALL, false, false, false, false, false, false, false, false, false, false); + NodeStats stats = nodeService.stats(CommonStatsFlags.ALL, false, false, false, false, false, false, false, false, false, false, false); assertThat("Fielddata size must be 0 on node: " + stats.getNode(), stats.getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0L)); assertThat("Query cache size must be 0 on node: " + stats.getNode(), stats.getIndices().getQueryCache().getMemorySizeInBytes(), equalTo(0L)); assertThat("FixedBitSet cache size must be 0 on node: " + stats.getNode(), stats.getIndices().getSegments().getBitsetMemoryInBytes(), equalTo(0L)); From 4219f8e06273d764ab935537f5a0cdd1dbaccda4 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Thu, 10 Mar 2016 14:09:04 +0100 Subject: [PATCH 150/320] Updated Java API docs with version recommendations Closes #15188 --- docs/java-api/client.asciidoc | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/docs/java-api/client.asciidoc b/docs/java-api/client.asciidoc index 7eea693d4326..86b9b56cae75 100644 --- a/docs/java-api/client.asciidoc +++ b/docs/java-api/client.asciidoc @@ -12,12 +12,16 @@ Obtaining an elasticsearch `Client` is simple. The most common way to get a client is by creating a <> that connects to a cluster. -*Important:* -______________________________________________________________________________________________________________________________________________________________ -Please note that you are encouraged to use the same version on client -and cluster sides. You may hit some incompatibility issues when mixing -major versions. -______________________________________________________________________________________________________________________________________________________________ +[IMPORTANT] +============================== + +The client must have the same major version (e.g. `2.x`, or `5.x`) as the +nodes in the cluster. Clients may connect to clusters which have a different +minor version (e.g. `2.3.x`) but it is possible that new funcionality may not +be supported. Ideally, the client should have the same version as the +cluster. + +============================== [[transport-client]] From b8db32b7fdda20c9888e07d129a129504c987401 Mon Sep 17 00:00:00 2001 From: Andrew Cholakian Date: Wed, 2 Dec 2015 18:01:52 -0600 Subject: [PATCH 151/320] Improved transport sniffing docs Closes #15204 --- docs/java-api/client.asciidoc | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/docs/java-api/client.asciidoc b/docs/java-api/client.asciidoc index 86b9b56cae75..7af8c230269d 100644 --- a/docs/java-api/client.asciidoc +++ b/docs/java-api/client.asciidoc @@ -57,11 +57,23 @@ Client client = TransportClient.builder().settings(settings).build(); //Add transport addresses and do something with the client... -------------------------------------------------- -The client allows sniffing the rest of the cluster, which adds data nodes -into its list of machines to use. In this case, note that the IP addresses -used will be the ones that the other nodes were started with (the -"publish" address). In order to enable it, set the -`client.transport.sniff` to `true`: +The Transport client comes with a cluster sniffing feature which +allows it to dynamically add new hosts and remove old ones. +When sniffing is enabled the the transport client will connect to the nodes in its +internal node list, which is built via calls to addTransportAddress. +After this, the client will call the internal cluster state API on those nodes +to discover available data nodes. The internal node list of the client will +be replaced with those data nodes only. This list is refreshed every five seconds by default. +Note that the IP addresses the sniffer connects to are the ones declared as the 'publish' +address in those node's elasticsearch config. + +Keep in mind that list might possibly not include the original node it connected to +if that node is not a data node. If, for instance, you initially connect to a +master node, after sniffing no further requests will go to that master node, +but rather to any data nodes instead. The reason the transport excludes non-data +nodes is to avoid sending search traffic to master only nodes. + +In order to enable sniffing, set `client.transport.sniff` to `true`: [source,java] -------------------------------------------------- From f7a2dbfcaf12a9e6cd6547c909a6cd44fb45fb19 Mon Sep 17 00:00:00 2001 From: Nicholas Knize Date: Thu, 10 Mar 2016 07:28:13 -0600 Subject: [PATCH 152/320] fixing silly typo in docs --- docs/reference/migration/migrate_2_2.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/migration/migrate_2_2.asciidoc b/docs/reference/migration/migrate_2_2.asciidoc index 9611d86a2ac4..8772c1017b06 100644 --- a/docs/reference/migration/migrate_2_2.asciidoc +++ b/docs/reference/migration/migrate_2_2.asciidoc @@ -10,7 +10,7 @@ your application to Elasticsearch 2.2. ==== Geo Point Type The `geo_point` format has been changed to reduce index size and the time required to both index and query -geo point data. To make these performance improvements possible both `doc_values` are `coerce` are required +geo point data. To make these performance improvements possible both `doc_values` and `coerce` are required and therefore cannot be changed. For this reason the `doc_values` and `coerce` parameters have been removed from the <> field mapping. From 046212035ce598300175bec36fad70aafc209f93 Mon Sep 17 00:00:00 2001 From: Robin Clarke Date: Thu, 5 Nov 2015 14:40:06 +0100 Subject: [PATCH 153/320] Clarification about precedence of settings Closes #14559 --- docs/reference/cluster/update-settings.asciidoc | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/reference/cluster/update-settings.asciidoc b/docs/reference/cluster/update-settings.asciidoc index 8ec584247309..d0b127cd3522 100644 --- a/docs/reference/cluster/update-settings.asciidoc +++ b/docs/reference/cluster/update-settings.asciidoc @@ -83,6 +83,16 @@ Cluster wide settings can be returned using: curl -XGET localhost:9200/_cluster/settings -------------------------------------------------- +[float] +=== Precedence of settings + +Transient cluster settings take precedence over persistent cluster settings, +which take precedence over settings configured in the `elasticsearch.yml` +config file. + +For this reason it is preferrable to use the `elasticsearch.yml` file only +for local configurations, and set all cluster-wider settings with the +`settings` API. A list of dynamically updatable settings can be found in the <> documentation. From 2fa573bc581e9b9676c3297dbd117dee83f400f8 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Thu, 10 Mar 2016 14:34:05 +0100 Subject: [PATCH 154/320] Missing word in docs --- docs/reference/mapping/params/store.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/mapping/params/store.asciidoc b/docs/reference/mapping/params/store.asciidoc index b81208aed776..46d57e9d8b5c 100644 --- a/docs/reference/mapping/params/store.asciidoc +++ b/docs/reference/mapping/params/store.asciidoc @@ -1,7 +1,7 @@ [[mapping-store]] === `store` -By default, field values <> to make them searchable, +By default, field values are <> to make them searchable, but they are not _stored_. This means that the field can be queried, but the original field value cannot be retrieved. From 0bbb84c19a0ff2ad974bb2bd8002871e4ee1ac73 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 10 Mar 2016 15:18:08 +0100 Subject: [PATCH 155/320] test: 'Test bulk request with default pipeline' may get run first and then the total ingest count for pipeline1 is 2. --- .../src/main/resources/rest-api-spec/test/ingest/70_bulk.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/70_bulk.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/70_bulk.yaml index 3177c678206f..63cb42f10352 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/70_bulk.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/70_bulk.yaml @@ -80,7 +80,7 @@ setup: - gte: {nodes.$master.ingest.total.failed: 0} - gte: {nodes.$master.ingest.total.time_in_millis: 0} - match: {nodes.$master.ingest.total.current: 0} - - match: {nodes.$master.ingest.pipelines.pipeline1.count: 1} + - gte: {nodes.$master.ingest.pipelines.pipeline1.count: 1} - match: {nodes.$master.ingest.pipelines.pipeline1.failed: 0} - gte: {nodes.$master.ingest.pipelines.pipeline1.time_in_millis: 0} - match: {nodes.$master.ingest.pipelines.pipeline1.current: 0} From 5a8d40e44a142eb8f6050914f75e1950ae12fe18 Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Thu, 10 Mar 2016 10:39:46 -0500 Subject: [PATCH 156/320] Fixes CompletionSuggesterBuilderTests to test for the correct expected error message. --- .../completion/CompletionSuggesterBuilderTests.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java index 2126e6d1c81d..9f854150ed42 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.suggest.completion; import com.carrotsearch.randomizedtesting.generators.RandomStrings; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.MappedFieldType; @@ -51,6 +52,10 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe @Override protected CompletionSuggestionBuilder randomSuggestionBuilder() { + return randomCompletionSuggestionBuilder(); + } + + public static CompletionSuggestionBuilder randomCompletionSuggestionBuilder() { return randomSuggestionBuilderWithContextInfo().builder; } @@ -60,7 +65,7 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe List geoContexts = new ArrayList<>(); } - private BuilderAndInfo randomSuggestionBuilderWithContextInfo() { + private static BuilderAndInfo randomSuggestionBuilderWithContextInfo() { final BuilderAndInfo builderAndInfo = new BuilderAndInfo(); CompletionSuggestionBuilder testBuilder = new CompletionSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); setCommonPropertiesOnRandomBuilder(testBuilder); @@ -190,8 +195,8 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe try { final SuggestBuilder suggestBuilder = SuggestBuilder.fromXContent(newParseContext(payload), suggesters); fail("Should not have been able to create SuggestBuilder from malformed JSON: " + suggestBuilder); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("parsing failed")); + } catch (ParsingException e) { + assertThat(e.getMessage(), containsString("failed to parse field [payload]")); } } } From 22e716551bb87b4b28da46b949b317c69f9b4a1b Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Thu, 3 Mar 2016 11:44:56 -0700 Subject: [PATCH 157/320] Add -XX+AlwaysPreTouch JVM flag Enables the touching of all memory pages used by the JVM heap spaces during initialization of the HotSpot VM, which commits all memory pages at initialization time. By default, pages are committed only as they are needed. --- distribution/src/main/resources/bin/elasticsearch.in.bat | 4 ++++ distribution/src/main/resources/bin/elasticsearch.in.sh | 4 ++++ docs/reference/migration/migrate_5_0.asciidoc | 9 +++++++++ 3 files changed, 17 insertions(+) diff --git a/distribution/src/main/resources/bin/elasticsearch.in.bat b/distribution/src/main/resources/bin/elasticsearch.in.bat index 7138cf5f5cac..b909a4649527 100644 --- a/distribution/src/main/resources/bin/elasticsearch.in.bat +++ b/distribution/src/main/resources/bin/elasticsearch.in.bat @@ -85,6 +85,10 @@ REM JAVA_OPTS=%JAVA_OPTS% -XX:HeapDumpPath=$ES_HOME/logs/heapdump.hprof REM Disables explicit GC set JAVA_OPTS=%JAVA_OPTS% -XX:+DisableExplicitGC +REM Enable pre-touching of memory pages used by the JVM during hotspot +REM initialization +set JAVA_OPTS=%JAVA_OPTS% -XX:+AlwaysPreTouch + REM Ensure UTF-8 encoding by default (e.g. filenames) set JAVA_OPTS=%JAVA_OPTS% -Dfile.encoding=UTF-8 diff --git a/distribution/src/main/resources/bin/elasticsearch.in.sh b/distribution/src/main/resources/bin/elasticsearch.in.sh index f859a06ffab9..69d2fc941126 100644 --- a/distribution/src/main/resources/bin/elasticsearch.in.sh +++ b/distribution/src/main/resources/bin/elasticsearch.in.sh @@ -81,6 +81,10 @@ JAVA_OPTS="$JAVA_OPTS -XX:+HeapDumpOnOutOfMemoryError" # Disables explicit GC JAVA_OPTS="$JAVA_OPTS -XX:+DisableExplicitGC" +# Enable pre-touching of memory pages used by the JVM during hotspot +# initialization +JAVA_OPTS="$JAVA_OPTS -XX:+AlwaysPreTouch" + # Ensure UTF-8 encoding by default (e.g. filenames) JAVA_OPTS="$JAVA_OPTS -Dfile.encoding=UTF-8" diff --git a/docs/reference/migration/migrate_5_0.asciidoc b/docs/reference/migration/migrate_5_0.asciidoc index 6fcf566fdcb3..37c73698779d 100644 --- a/docs/reference/migration/migrate_5_0.asciidoc +++ b/docs/reference/migration/migrate_5_0.asciidoc @@ -808,6 +808,15 @@ changed to now route standard output to the journal and standard error to inherit this setting (these are the defaults for systemd). These settings can be modified by editing the elasticsearch.service file. +==== Longer startup times + +In Elasticsearch 5.0.0 the `-XX:+AlwaysPreTouch` flag has been added to the JVM +startup options. This option touches all memory pages used by the JVM heap +during initialization of the HotSpot VM to reduce the chance of having to commit +a memory page during GC time. This will increase the startup time of +Elasticsearch as well as increasing the initial resident memory usage of the +Java process. + [[breaking_50_scripting]] === Scripting From 266394c3ab5837ed0d2e30a4808a6d01e3e52193 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Tue, 8 Mar 2016 21:20:15 +0100 Subject: [PATCH 158/320] Fail closing or deleting indices during a full snapshot Closes #16321 --- .../cluster/SnapshotsInProgress.java | 18 +++- .../metadata/MetaDataDeleteIndexService.java | 10 ++- .../metadata/MetaDataIndexStateService.java | 30 ++----- .../snapshots/RestoreService.java | 26 ++++++ .../snapshots/SnapshotsService.java | 70 +++++++++++++++- .../cluster/ClusterStateDiffIT.java | 1 + .../SharedClusterSnapshotRestoreIT.java | 82 +++++++++++++++---- docs/reference/migration/migrate_5_0.asciidoc | 10 +++ 8 files changed, 198 insertions(+), 49 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java b/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java index 78eef3163320..c90edee0d50f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java +++ b/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java @@ -69,15 +69,17 @@ public class SnapshotsInProgress extends AbstractDiffable implements Cus private final State state; private final SnapshotId snapshotId; private final boolean includeGlobalState; + private final boolean partial; private final ImmutableOpenMap shards; private final List indices; private final ImmutableOpenMap> waitingIndices; private final long startTime; - public Entry(SnapshotId snapshotId, boolean includeGlobalState, State state, List indices, long startTime, ImmutableOpenMap shards) { + public Entry(SnapshotId snapshotId, boolean includeGlobalState, boolean partial, State state, List indices, long startTime, ImmutableOpenMap shards) { this.state = state; this.snapshotId = snapshotId; this.includeGlobalState = includeGlobalState; + this.partial = partial; this.indices = indices; this.startTime = startTime; if (shards == null) { @@ -90,7 +92,7 @@ public class SnapshotsInProgress extends AbstractDiffable implements Cus } public Entry(Entry entry, State state, ImmutableOpenMap shards) { - this(entry.snapshotId, entry.includeGlobalState, state, entry.indices, entry.startTime, shards); + this(entry.snapshotId, entry.includeGlobalState, entry.partial, state, entry.indices, entry.startTime, shards); } public Entry(Entry entry, ImmutableOpenMap shards) { @@ -121,6 +123,10 @@ public class SnapshotsInProgress extends AbstractDiffable implements Cus return includeGlobalState; } + public boolean partial() { + return partial; + } + public long startTime() { return startTime; } @@ -133,6 +139,7 @@ public class SnapshotsInProgress extends AbstractDiffable implements Cus Entry entry = (Entry) o; if (includeGlobalState != entry.includeGlobalState) return false; + if (partial != entry.partial) return false; if (startTime != entry.startTime) return false; if (!indices.equals(entry.indices)) return false; if (!shards.equals(entry.shards)) return false; @@ -148,6 +155,7 @@ public class SnapshotsInProgress extends AbstractDiffable implements Cus int result = state.hashCode(); result = 31 * result + snapshotId.hashCode(); result = 31 * result + (includeGlobalState ? 1 : 0); + result = 31 * result + (partial ? 1 : 0); result = 31 * result + shards.hashCode(); result = 31 * result + indices.hashCode(); result = 31 * result + waitingIndices.hashCode(); @@ -360,6 +368,7 @@ public class SnapshotsInProgress extends AbstractDiffable implements Cus for (int i = 0; i < entries.length; i++) { SnapshotId snapshotId = SnapshotId.readSnapshotId(in); boolean includeGlobalState = in.readBoolean(); + boolean partial = in.readBoolean(); State state = State.fromValue(in.readByte()); int indices = in.readVInt(); List indexBuilder = new ArrayList<>(); @@ -375,7 +384,7 @@ public class SnapshotsInProgress extends AbstractDiffable implements Cus State shardState = State.fromValue(in.readByte()); builder.put(shardId, new ShardSnapshotStatus(nodeId, shardState)); } - entries[i] = new Entry(snapshotId, includeGlobalState, state, Collections.unmodifiableList(indexBuilder), startTime, builder.build()); + entries[i] = new Entry(snapshotId, includeGlobalState, partial, state, Collections.unmodifiableList(indexBuilder), startTime, builder.build()); } return new SnapshotsInProgress(entries); } @@ -386,6 +395,7 @@ public class SnapshotsInProgress extends AbstractDiffable implements Cus for (Entry entry : entries) { entry.snapshotId().writeTo(out); out.writeBoolean(entry.includeGlobalState()); + out.writeBoolean(entry.partial()); out.writeByte(entry.state().value()); out.writeVInt(entry.indices().size()); for (String index : entry.indices()) { @@ -406,6 +416,7 @@ public class SnapshotsInProgress extends AbstractDiffable implements Cus static final XContentBuilderString SNAPSHOTS = new XContentBuilderString("snapshots"); static final XContentBuilderString SNAPSHOT = new XContentBuilderString("snapshot"); static final XContentBuilderString INCLUDE_GLOBAL_STATE = new XContentBuilderString("include_global_state"); + static final XContentBuilderString PARTIAL = new XContentBuilderString("partial"); static final XContentBuilderString STATE = new XContentBuilderString("state"); static final XContentBuilderString INDICES = new XContentBuilderString("indices"); static final XContentBuilderString START_TIME_MILLIS = new XContentBuilderString("start_time_millis"); @@ -431,6 +442,7 @@ public class SnapshotsInProgress extends AbstractDiffable implements Cus builder.field(Fields.REPOSITORY, entry.snapshotId().getRepository()); builder.field(Fields.SNAPSHOT, entry.snapshotId().getSnapshot()); builder.field(Fields.INCLUDE_GLOBAL_STATE, entry.includeGlobalState()); + builder.field(Fields.PARTIAL, entry.partial()); builder.field(Fields.STATE, entry.state()); builder.startArray(Fields.INDICES); { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java index 54c014fb4edf..132e46b1e94f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java @@ -34,11 +34,12 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.FutureUtils; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.threadpool.ThreadPool; -import java.util.Arrays; -import java.util.Collection; +import java.util.Set; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -67,7 +68,7 @@ public class MetaDataDeleteIndexService extends AbstractComponent { } public void deleteIndices(final Request request, final Listener userListener) { - Collection indices = Arrays.asList(request.indices); + Set indices = Sets.newHashSet(request.indices); final DeleteIndexListener listener = new DeleteIndexListener(userListener); clusterService.submitStateUpdateTask("delete-index " + indices, new ClusterStateUpdateTask(Priority.URGENT) { @@ -84,6 +85,9 @@ public class MetaDataDeleteIndexService extends AbstractComponent { @Override public ClusterState execute(final ClusterState currentState) { + // Check if index deletion conflicts with any running snapshots + SnapshotsService.checkIndexDeletion(currentState, indices); + RoutingTable.Builder routingTableBuilder = RoutingTable.builder(currentState.routingTable()); MetaData.Builder metaDataBuilder = MetaData.builder(currentState.metaData()); ClusterBlocks.Builder clusterBlocksBuilder = ClusterBlocks.builder().blocks(currentState.blocks()); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java index 6639f9bdbd60..121065bc638e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java @@ -19,14 +19,12 @@ package org.elasticsearch.cluster.metadata; -import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.close.CloseIndexClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexClusterStateUpdateRequest; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.RestoreInProgress; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -39,8 +37,9 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.snapshots.RestoreService; +import org.elasticsearch.snapshots.SnapshotsService; import java.util.ArrayList; import java.util.Arrays; @@ -99,27 +98,10 @@ public class MetaDataIndexStateService extends AbstractComponent { return currentState; } - // Check if any of the indices to be closed are currently being restored from a snapshot and fail closing if such an index - // is found as closing an index that is being restored makes the index unusable (it cannot be recovered). - RestoreInProgress restore = currentState.custom(RestoreInProgress.TYPE); - if (restore != null) { - Set indicesToFail = null; - for (RestoreInProgress.Entry entry : restore.entries()) { - for (ObjectObjectCursor shard : entry.shards()) { - if (!shard.value.state().completed()) { - if (indicesToClose.contains(shard.key.getIndexName())) { - if (indicesToFail == null) { - indicesToFail = new HashSet<>(); - } - indicesToFail.add(shard.key.getIndexName()); - } - } - } - } - if (indicesToFail != null) { - throw new IllegalArgumentException("Cannot close indices that are being restored: " + indicesToFail); - } - } + // Check if index closing conflicts with any running restores + RestoreService.checkIndexClosing(currentState, indicesToClose); + // Check if index closing conflicts with any running snapshots + SnapshotsService.checkIndexClosing(currentState, indicesToClose); logger.info("closing indices [{}]", indicesAsString); diff --git a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java index c6f189ea8a2d..65fb88d4b645 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -774,6 +774,32 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis return false; } + /** + * Check if any of the indices to be closed are currently being restored from a snapshot and fail closing if such an index + * is found as closing an index that is being restored makes the index unusable (it cannot be recovered). + */ + public static void checkIndexClosing(ClusterState currentState, Set indices) { + RestoreInProgress restore = currentState.custom(RestoreInProgress.TYPE); + if (restore != null) { + Set indicesToFail = null; + for (RestoreInProgress.Entry entry : restore.entries()) { + for (ObjectObjectCursor shard : entry.shards()) { + if (!shard.value.state().completed()) { + if (indices.contains(shard.key.getIndexName())) { + if (indicesToFail == null) { + indicesToFail = new HashSet<>(); + } + indicesToFail.add(shard.key.getIndexName()); + } + } + } + } + if (indicesToFail != null) { + throw new IllegalArgumentException("Cannot close indices that are being restored: " + indicesToFail); + } + } + } + /** * Adds restore completion listener *

      diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 909fb4a15695..ae3fb9a39401 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -206,7 +206,7 @@ public class SnapshotsService extends AbstractLifecycleComponent indices = Arrays.asList(indexNameExpressionResolver.concreteIndices(currentState, request.indicesOptions(), request.indices())); logger.trace("[{}][{}] creating snapshot for indices [{}]", request.repository(), request.name(), indices); - newSnapshot = new SnapshotsInProgress.Entry(snapshotId, request.includeGlobalState(), State.INIT, indices, System.currentTimeMillis(), null); + newSnapshot = new SnapshotsInProgress.Entry(snapshotId, request.includeGlobalState(), request.partial(), State.INIT, indices, System.currentTimeMillis(), null); snapshots = new SnapshotsInProgress(newSnapshot); } else { // TODO: What should we do if a snapshot is already running? @@ -228,7 +228,7 @@ public class SnapshotsService extends AbstractLifecycleComponent indices) { + Set indicesToFail = indicesToFailForCloseOrDeletion(currentState, indices); + if (indicesToFail != null) { + throw new IllegalArgumentException("Cannot delete indices that are being snapshotted: " + indicesToFail + + ". Try again after snapshot finishes or cancel the currently running snapshot."); + } + } + + /** + * Check if any of the indices to be closed are currently being snapshotted. Fail as closing an index that is being + * snapshotted (with partial == false) makes the snapshot fail. + */ + public static void checkIndexClosing(ClusterState currentState, Set indices) { + Set indicesToFail = indicesToFailForCloseOrDeletion(currentState, indices); + if (indicesToFail != null) { + throw new IllegalArgumentException("Cannot close indices that are being snapshotted: " + indicesToFail + + ". Try again after snapshot finishes or cancel the currently running snapshot."); + } + } + + private static Set indicesToFailForCloseOrDeletion(ClusterState currentState, Set indices) { + SnapshotsInProgress snapshots = currentState.custom(SnapshotsInProgress.TYPE); + Set indicesToFail = null; + if (snapshots != null) { + for (final SnapshotsInProgress.Entry entry : snapshots.entries()) { + if (entry.partial() == false) { + if (entry.state() == State.INIT) { + for (String index : entry.indices()) { + if (indices.contains(index)) { + if (indicesToFail == null) { + indicesToFail = new HashSet<>(); + } + indicesToFail.add(index); + } + } + } else { + for (ObjectObjectCursor shard : entry.shards()) { + if (!shard.value.state().completed()) { + if (indices.contains(shard.key.getIndexName())) { + if (indicesToFail == null) { + indicesToFail = new HashSet<>(); + } + indicesToFail.add(shard.key.getIndexName()); + } + } + } + } + } + } + } + return indicesToFail; + } + /** * Adds snapshot completion listener * @@ -1302,6 +1359,15 @@ public class SnapshotsService extends AbstractLifecycleComponentemptyList(), Math.abs(randomLong()), diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 9fb2b0f99897..a8a45e6a42f4 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -1813,19 +1813,31 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas } } - public void testDeleteIndexDuringSnapshot() throws Exception { + public void testCloseOrDeleteIndexDuringSnapshot() throws Exception { Client client = client(); boolean allowPartial = randomBoolean(); - logger.info("--> creating repository"); - assertAcked(client.admin().cluster().preparePutRepository("test-repo") + + // only block on repo init if we have partial snapshot or we run into deadlock when acquiring shard locks for index deletion/closing + boolean initBlocking = allowPartial || randomBoolean(); + if (initBlocking) { + assertAcked(client.admin().cluster().preparePutRepository("test-repo") .setType("mock").setSettings(Settings.settingsBuilder() - .put("location", randomRepoPath()) - .put("compress", randomBoolean()) - .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES) - .put("block_on_init", true) + .put("location", randomRepoPath()) + .put("compress", randomBoolean()) + .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES) + .put("block_on_init", true) )); + } else { + assertAcked(client.admin().cluster().preparePutRepository("test-repo") + .setType("mock").setSettings(Settings.settingsBuilder() + .put("location", randomRepoPath()) + .put("compress", randomBoolean()) + .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES) + .put("block_on_data", true) + )); + } createIndex("test-idx-1", "test-idx-2", "test-idx-3"); ensureGreen(); @@ -1843,25 +1855,61 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas logger.info("--> snapshot allow partial {}", allowPartial); ListenableActionFuture future = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap") - .setIndices("test-idx-*").setWaitForCompletion(true).setPartial(allowPartial).execute(); + .setIndices("test-idx-*").setWaitForCompletion(true).setPartial(allowPartial).execute(); logger.info("--> wait for block to kick in"); - waitForBlock(internalCluster().getMasterName(), "test-repo", TimeValue.timeValueMinutes(1)); - logger.info("--> delete some indices while snapshot is running"); - client.admin().indices().prepareDelete("test-idx-1", "test-idx-2").get(); - logger.info("--> unblock running master node"); - unblockNode(internalCluster().getMasterName()); + if (initBlocking) { + waitForBlock(internalCluster().getMasterName(), "test-repo", TimeValue.timeValueMinutes(1)); + } else { + waitForBlockOnAnyDataNode("test-repo", TimeValue.timeValueMinutes(1)); + } + if (allowPartial) { + // partial snapshots allow close / delete operations + if (randomBoolean()) { + logger.info("--> delete index while partial snapshot is running"); + client.admin().indices().prepareDelete("test-idx-1").get(); + } else { + logger.info("--> close index while partial snapshot is running"); + client.admin().indices().prepareClose("test-idx-1").get(); + } + } else { + // non-partial snapshots do not allow close / delete operations on indices where snapshot has not been completed + if (randomBoolean()) { + try { + logger.info("--> delete index while non-partial snapshot is running"); + client.admin().indices().prepareDelete("test-idx-1").get(); + fail("Expected deleting index to fail during snapshot"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("Cannot delete indices that are being snapshotted: [test-idx-1]")); + } + } else { + try { + logger.info("--> close index while non-partial snapshot is running"); + client.admin().indices().prepareClose("test-idx-1").get(); + fail("Expected closing index to fail during snapshot"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("Cannot close indices that are being snapshotted: [test-idx-1]")); + } + } + } + if (initBlocking) { + logger.info("--> unblock running master node"); + unblockNode(internalCluster().getMasterName()); + } else { + logger.info("--> unblock all data nodes"); + unblockAllDataNodes("test-repo"); + } logger.info("--> waiting for snapshot to finish"); CreateSnapshotResponse createSnapshotResponse = future.get(); if (allowPartial) { - logger.info("Deleted index during snapshot, but allow partial"); + logger.info("Deleted/Closed index during snapshot, but allow partial"); assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo((SnapshotState.PARTIAL))); assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); assertThat(createSnapshotResponse.getSnapshotInfo().failedShards(), greaterThan(0)); assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), lessThan(createSnapshotResponse.getSnapshotInfo().totalShards())); } else { - logger.info("Deleted index during snapshot and doesn't allow partial"); - assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo((SnapshotState.FAILED))); + logger.info("Snapshot successfully completed"); + assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo((SnapshotState.SUCCESS))); } } @@ -1960,7 +2008,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas shards.put(new ShardId("test-idx", "_na_", 1), new ShardSnapshotStatus("unknown-node", State.ABORTED)); shards.put(new ShardId("test-idx", "_na_", 2), new ShardSnapshotStatus("unknown-node", State.ABORTED)); List entries = new ArrayList<>(); - entries.add(new Entry(new SnapshotId("test-repo", "test-snap"), true, State.ABORTED, Collections.singletonList("test-idx"), System.currentTimeMillis(), shards.build())); + entries.add(new Entry(new SnapshotId("test-repo", "test-snap"), true, false, State.ABORTED, Collections.singletonList("test-idx"), System.currentTimeMillis(), shards.build())); return ClusterState.builder(currentState).putCustom(SnapshotsInProgress.TYPE, new SnapshotsInProgress(Collections.unmodifiableList(entries))).build(); } diff --git a/docs/reference/migration/migrate_5_0.asciidoc b/docs/reference/migration/migrate_5_0.asciidoc index 6fcf566fdcb3..898b79ad487f 100644 --- a/docs/reference/migration/migrate_5_0.asciidoc +++ b/docs/reference/migration/migrate_5_0.asciidoc @@ -21,6 +21,7 @@ your application to Elasticsearch 5.0. * <> * <> * <> +* <> [[breaking_50_search_changes]] === Warmers @@ -844,3 +845,12 @@ distributed document frequencies anymore. The option to disable the security manager `--security.manager.enabled` has been removed. In order to grant special permissions to elasticsearch users must tweak the local Java Security Policy. + +[[breaking_50_snapshot_restore]] +=== Snapshot/Restore + +==== Closing / deleting indices while running snapshot + +In previous versions of Elasticsearch, closing or deleting an index during a full snapshot would make the snapshot fail. This is now changed +by failing the close/delete index request instead. The behavior for partial snapshots remains unchanged: Closing or deleting an index during +a partial snapshot is still possible. The snapshot result is then marked as partial. From b8d931d23cb87df2838efaec20751da1397fad64 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 4 Mar 2016 14:06:32 -0500 Subject: [PATCH 159/320] [reindex] Timeout if sub-requests timeout Sadly, it isn't easy to simulate a timeout during an integration test, you just have to cause one. Groovy's sleep should do the job. --- .../AbstractAsyncBulkByScrollAction.java | 32 ++++++++++++------- .../reindex/BulkIndexByScrollResponse.java | 14 +++++++- ...kIndexByScrollResponseContentListener.java | 3 ++ .../index/reindex/ReindexResponse.java | 6 ++-- .../reindex/RestUpdateByQueryAction.java | 5 ++- .../index/reindex/TransportReindexAction.java | 5 +-- .../reindex/TransportUpdateByQueryAction.java | 4 +-- .../reindex/AsyncBulkByScrollActionTests.java | 28 +++++++++++++--- .../index/reindex/RoundTripTests.java | 4 +-- .../rest-api-spec/test/reindex/10_basic.yaml | 1 + .../test/update-by-query/10_basic.yaml | 1 + .../test/reindex/30_timeout.yaml | 29 +++++++++++++++++ .../test/update-by-query/30_timeout.yaml | 26 +++++++++++++++ .../rest-api-spec/api/update-by-query.json | 4 +++ 14 files changed, 135 insertions(+), 27 deletions(-) create mode 100644 qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/reindex/30_timeout.yaml create mode 100644 qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/update-by-query/30_timeout.yaml diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java index 861c03cd7068..4de06c88b8d8 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java @@ -103,7 +103,8 @@ public abstract class AbstractAsyncBulkByScrollAction docs); - protected abstract Response buildResponse(TimeValue took, List indexingFailures, List searchFailures); + protected abstract Response buildResponse(TimeValue took, List indexingFailures, List searchFailures, + boolean timedOut); public void start() { initialSearch(); @@ -161,8 +162,13 @@ public abstract class AbstractAsyncBulkByScrollAction 0) { - startNormalTermination(emptyList(), unmodifiableList(Arrays.asList(searchResponse.getShardFailures()))); + if ( // If any of the shards failed that should abort the request. + (searchResponse.getShardFailures() != null && searchResponse.getShardFailures().length > 0) + // Timeouts aren't shard failures but we still need to pass them back to the user. + || searchResponse.isTimedOut() + ) { + startNormalTermination(emptyList(), unmodifiableList(Arrays.asList(searchResponse.getShardFailures())), + searchResponse.isTimedOut()); return; } long total = searchResponse.getHits().totalHits(); @@ -176,7 +182,7 @@ public abstract class AbstractAsyncBulkByScrollAction= mainRequest.getSize()) { // We've processed all the requested docs. - startNormalTermination(emptyList(), emptyList()); + startNormalTermination(emptyList(), emptyList(), false); return; } startNextScroll(); @@ -311,9 +317,9 @@ public abstract class AbstractAsyncBulkByScrollAction indexingFailures, List searchFailures) { + void startNormalTermination(List indexingFailures, List searchFailures, boolean timedOut) { if (false == mainRequest.isRefresh()) { - finishHim(null, indexingFailures, searchFailures); + finishHim(null, indexingFailures, searchFailures, timedOut); return; } RefreshRequest refresh = new RefreshRequest(); @@ -321,7 +327,7 @@ public abstract class AbstractAsyncBulkByScrollAction() { @Override public void onResponse(RefreshResponse response) { - finishHim(null, indexingFailures, searchFailures); + finishHim(null, indexingFailures, searchFailures, timedOut); } @Override @@ -337,7 +343,7 @@ public abstract class AbstractAsyncBulkByScrollAction indexingFailures, List searchFailures) { + void finishHim(Throwable failure, List indexingFailures, List searchFailures, boolean timedOut) { String scrollId = scroll.get(); if (Strings.hasLength(scrollId)) { /* @@ -369,7 +376,8 @@ public abstract class AbstractAsyncBulkByScrollAction indexingFailures; private List searchFailures; + private boolean timedOut; public BulkIndexByScrollResponse() { } public BulkIndexByScrollResponse(TimeValue took, BulkByScrollTask.Status status, List indexingFailures, - List searchFailures) { + List searchFailures, boolean timedOut) { this.took = took; this.status = requireNonNull(status, "Null status not supported"); this.indexingFailures = indexingFailures; this.searchFailures = searchFailures; + this.timedOut = timedOut; } public TimeValue getTook() { @@ -103,6 +105,13 @@ public class BulkIndexByScrollResponse extends ActionResponse implements ToXCont return searchFailures; } + /** + * Did any of the sub-requests that were part of this request timeout? + */ + public boolean isTimedOut() { + return timedOut; + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -116,6 +125,7 @@ public class BulkIndexByScrollResponse extends ActionResponse implements ToXCont for (ShardSearchFailure failure: searchFailures) { failure.writeTo(out); } + out.writeBoolean(timedOut); } @Override @@ -135,11 +145,13 @@ public class BulkIndexByScrollResponse extends ActionResponse implements ToXCont searchFailures.add(readShardSearchFailure(in)); } this.searchFailures = unmodifiableList(searchFailures); + this.timedOut = in.readBoolean(); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field("took", took.millis()); + builder.field("timed_out", timedOut); status.innerXContent(builder, params, false, false); builder.startArray("failures"); for (Failure failure: indexingFailures) { diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java index 24fdb16b3979..6a46a2c8e496 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java @@ -36,6 +36,9 @@ public class BulkIndexByScrollResponseContentListener status.getStatus()) { status = failure.getStatus(); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexResponse.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexResponse.java index a4aee0c00d36..7e74fe26ec2c 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexResponse.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexResponse.java @@ -35,8 +35,9 @@ public class ReindexResponse extends BulkIndexByScrollResponse { public ReindexResponse() { } - public ReindexResponse(TimeValue took, Status status, List indexingFailures, List searchFailures) { - super(took, status, indexingFailures, searchFailures); + public ReindexResponse(TimeValue took, Status status, List indexingFailures, List searchFailures, + boolean timedOut) { + super(took, status, indexingFailures, searchFailures, timedOut); } public long getCreated() { @@ -46,6 +47,7 @@ public class ReindexResponse extends BulkIndexByScrollResponse { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field("took", getTook()); + builder.field("timed_out", isTimedOut()); getStatus().innerXContent(builder, params, true, false); builder.startArray("failures"); for (Failure failure: getIndexingFailures()) { diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java index f4afd8c36e14..17214ad15c5c 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java @@ -107,7 +107,10 @@ public class RestUpdateByQueryAction extends internalRequest.setSize(internalRequest.getSearchRequest().source().size()); internalRequest.setPipeline(request.param("pipeline")); internalRequest.getSearchRequest().source().size(request.paramAsInt("scroll_size", scrollSize)); - + // Let the requester set search timeout. It is probably only going to be useful for testing but who knows. + if (request.hasParam("search_timeout")) { + internalRequest.getSearchRequest().source().timeout(request.paramAsTime("search_timeout", null)); + } execute(request, internalRequest, channel); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 069ee032f8e3..dbe464e98b45 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -191,8 +191,9 @@ public class TransportReindexAction extends HandledTransportAction indexingFailures, List searchFailures) { - return new ReindexResponse(took, task.getStatus(), indexingFailures, searchFailures); + protected ReindexResponse buildResponse(TimeValue took, List indexingFailures, List searchFailures, + boolean timedOut) { + return new ReindexResponse(took, task.getStatus(), indexingFailures, searchFailures, timedOut); } /* diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java index 0e13c6718dd7..d004e86ac0c0 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java @@ -96,8 +96,8 @@ public class TransportUpdateByQueryAction extends HandledTransportAction indexingFailures, - List searchFailures) { - return new BulkIndexByScrollResponse(took, task.getStatus(), indexingFailures, searchFailures); + List searchFailures, boolean timedOut) { + return new BulkIndexByScrollResponse(took, task.getStatus(), indexingFailures, searchFailures, timedOut); } @Override diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index ae05f3270df9..2aedd603fbc3 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -248,15 +248,33 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { */ public void testShardFailuresAbortRequest() throws Exception { ShardSearchFailure shardFailure = new ShardSearchFailure(new RuntimeException("test")); - new DummyAbstractAsyncBulkByScrollAction() - .onScrollResponse(new SearchResponse(null, scrollId(), 5, 4, randomLong(), new ShardSearchFailure[] { shardFailure })); + InternalSearchResponse internalResponse = new InternalSearchResponse(null, null, null, null, false, null); + new DummyAbstractAsyncBulkByScrollAction().onScrollResponse( + new SearchResponse(internalResponse, scrollId(), 5, 4, randomLong(), new ShardSearchFailure[] { shardFailure })); BulkIndexByScrollResponse response = listener.get(); assertThat(response.getIndexingFailures(), emptyCollectionOf(Failure.class)); assertThat(response.getSearchFailures(), contains(shardFailure)); + assertFalse(response.isTimedOut()); assertNull(response.getReasonCancelled()); assertThat(client.scrollsCleared, contains(scrollId)); } + /** + * Mimicks search timeouts. + */ + public void testSearchTimeoutsAbortRequest() throws Exception { + InternalSearchResponse internalResponse = new InternalSearchResponse(null, null, null, null, true, null); + new DummyAbstractAsyncBulkByScrollAction() + .onScrollResponse(new SearchResponse(internalResponse, scrollId(), 5, 4, randomLong(), new ShardSearchFailure[0])); + BulkIndexByScrollResponse response = listener.get(); + assertThat(response.getIndexingFailures(), emptyCollectionOf(Failure.class)); + assertThat(response.getSearchFailures(), emptyCollectionOf(ShardSearchFailure.class)); + assertTrue(response.isTimedOut()); + assertNull(response.getReasonCancelled()); + assertThat(client.scrollsCleared, contains(scrollId)); + } + + /** * Mimicks bulk indexing failures. */ @@ -396,7 +414,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { public void testCancelBeforeStartNormalTermination() throws Exception { // Refresh or not doesn't matter - we don't try to refresh. mainRequest.setRefresh(usually()); - cancelTaskCase((DummyAbstractAsyncBulkByScrollAction action) -> action.startNormalTermination(emptyList(), emptyList())); + cancelTaskCase((DummyAbstractAsyncBulkByScrollAction action) -> action.startNormalTermination(emptyList(), emptyList(), false)); // This wouldn't return if we called refresh - the action would hang waiting for the refresh that we haven't mocked. } @@ -430,8 +448,8 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { @Override protected BulkIndexByScrollResponse buildResponse(TimeValue took, List indexingFailures, - List searchFailures) { - return new BulkIndexByScrollResponse(took, task.getStatus(), indexingFailures, searchFailures); + List searchFailures, boolean timedOut) { + return new BulkIndexByScrollResponse(took, task.getStatus(), indexingFailures, searchFailures, timedOut); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java index f5c31fe8f420..6e1cbb59e862 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java @@ -102,7 +102,7 @@ public class RoundTripTests extends ESTestCase { public void testReindexResponse() throws IOException { ReindexResponse response = new ReindexResponse(timeValueMillis(randomPositiveLong()), randomStatus(), randomIndexingFailures(), - randomSearchFailures()); + randomSearchFailures(), randomBoolean()); ReindexResponse tripped = new ReindexResponse(); roundTrip(response, tripped); assertResponseEquals(response, tripped); @@ -110,7 +110,7 @@ public class RoundTripTests extends ESTestCase { public void testBulkIndexByScrollResponse() throws IOException { BulkIndexByScrollResponse response = new BulkIndexByScrollResponse(timeValueMillis(randomPositiveLong()), randomStatus(), - randomIndexingFailures(), randomSearchFailures()); + randomIndexingFailures(), randomSearchFailures(), randomBoolean()); BulkIndexByScrollResponse tripped = new BulkIndexByScrollResponse(); roundTrip(response, tripped); assertResponseEquals(response, tripped); diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml index 7f84c1aac8b8..a00fefc444aa 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml @@ -75,6 +75,7 @@ index: source dest: index: dest + - is_false: timed_out - match: {task: '/.+:\d+/'} - set: {task: task} - is_false: updated diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml index 94ffa2349a9e..b4ebb93c3277 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml @@ -12,6 +12,7 @@ - do: update-by-query: index: test + - is_false: timed_out - match: {updated: 1} - match: {version_conflicts: 0} - match: {batches: 1} diff --git a/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/reindex/30_timeout.yaml b/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/reindex/30_timeout.yaml new file mode 100644 index 000000000000..533dbc3462b3 --- /dev/null +++ b/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/reindex/30_timeout.yaml @@ -0,0 +1,29 @@ +--- +"Timeout": + - do: + index: + index: twitter + type: tweet + id: 1 + body: { "user": "kimchy" } + - do: + indices.refresh: {} + + - do: + catch: request_timeout + reindex: + refresh: true + body: + source: + index: twitter + timeout: 10ms + query: + script: + # Sleep 100x longer than the timeout. That should cause a timeout! + # Return true causes the document to try to be collected which is what actually triggers the timeout. + script: sleep(1000); return true + dest: + index: new_twitter + - is_true: timed_out + - match: {created: 0} + - match: {noops: 0} diff --git a/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/update-by-query/30_timeout.yaml b/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/update-by-query/30_timeout.yaml new file mode 100644 index 000000000000..2a291bf0541e --- /dev/null +++ b/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/update-by-query/30_timeout.yaml @@ -0,0 +1,26 @@ +--- +"Timeout": + - do: + index: + index: twitter + type: tweet + id: 1 + body: { "user": "kimchy" } + - do: + indices.refresh: {} + + - do: + catch: request_timeout + update-by-query: + index: twitter + refresh: true + search_timeout: 10ms + body: + query: + script: + # Sleep 100x longer than the timeout. That should cause a timeout! + # Return true causes the document to try to be collected which is what actually triggers the timeout. + script: sleep(1000); return true + - is_true: timed_out + - match: {updated: 0} + - match: {noops: 0} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update-by-query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update-by-query.json index 9d5183ee4f3f..dca49cbcc6a1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update-by-query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update-by-query.json @@ -105,6 +105,10 @@ "options" : ["query_then_fetch", "dfs_query_then_fetch"], "description" : "Search operation type" }, + "search_timeout": { + "type" : "time", + "description" : "Explicit timeout for each search request. Defaults to no timeout." + }, "size": { "type" : "number", "description" : "Number of hits to return (default: 10)" From 378e0d2c0cdd08385a5218044037490cf1eb5ae1 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 8 Mar 2016 17:18:06 -0500 Subject: [PATCH 160/320] [reindex][test] Script changes destination index --- .../rest-api-spec/test/reindex/10_script.yaml | 48 +++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/reindex/10_script.yaml b/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/reindex/10_script.yaml index d37a94deea70..aa553a5c9dc8 100644 --- a/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/reindex/10_script.yaml +++ b/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/reindex/10_script.yaml @@ -395,3 +395,51 @@ match: user: otherkimchy - match: { hits.total: 1 } + +--- +"Change index to write to a different index": + - do: + index: + index: twitter + type: tweet + id: 1 + body: { "user": "kimchy" } + - do: + index: + index: twitter + type: tweet + id: 2 + body: { "user": "another" } + - do: + indices.refresh: {} + + - do: + reindex: + refresh: true + body: + source: + index: twitter + dest: + index: new_twitter + script: + inline: if (ctx._source.user == "kimchy") {ctx._index = 'other_new_twitter'} + - match: {created: 2} + - match: {noops: 0} + + - do: + search: + index: other_new_twitter + body: + query: + match: + user: kimchy + - match: { hits.total: 1 } + + - do: + search: + index: new_twitter + body: + query: + match: + user: another + - match: { hits.total: 1 } From 94798a367219fc9d2db12429de3064a6ca652f58 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 10 Mar 2016 13:12:29 -0500 Subject: [PATCH 161/320] [reindex][test] Fix timeout value --- .../test/resources/rest-api-spec/test/reindex/30_timeout.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/reindex/30_timeout.yaml b/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/reindex/30_timeout.yaml index 533dbc3462b3..ddd22246717a 100644 --- a/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/reindex/30_timeout.yaml +++ b/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/reindex/30_timeout.yaml @@ -16,7 +16,7 @@ body: source: index: twitter - timeout: 10ms + timeout: 10 query: script: # Sleep 100x longer than the timeout. That should cause a timeout! From 982a369efc7759f2062378468a5585770ca1edf9 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Thu, 10 Mar 2016 11:34:34 -0700 Subject: [PATCH 162/320] [DOCS] Document the modifiers for `field_value_factor` Resolves #13511 --- .../query-dsl/function-score-query.asciidoc | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/docs/reference/query-dsl/function-score-query.asciidoc b/docs/reference/query-dsl/function-score-query.asciidoc index 39fdae802421..4c8f93bcede8 100644 --- a/docs/reference/query-dsl/function-score-query.asciidoc +++ b/docs/reference/query-dsl/function-score-query.asciidoc @@ -215,6 +215,20 @@ There are a number of options for the `field_value_factor` function: `log1p`, `log2p`, `ln`, `ln1p`, `ln2p`, `square`, `sqrt`, or `reciprocal`. Defaults to `none`. +|==================================== +| Modifier | Meaning + +| `none` | Do not apply any multiplier to the field value +| `log` | Take the https://en.wikipedia.org/wiki/Logarithm[logarithm] of the field value +| `log1p` | Add 1 to the field value and take the logarithm +| `log2p` | Add 2 to the field value and take the logarithm +| `ln` | Take the https://en.wikipedia.org/wiki/Natural_logarithm[natural logarithm] of the field value +| `ln1p` | Add 1 to the field value and take the natural logarithm +| `ln2p` | Add 2 to the field value and take the natural logarithm +| `square` | Square the field value (multiply it by itself) +| `sqrt` | Take the https://en.wikipedia.org/wiki/Square_root[square root] of the field value +| `reciprocal` | https://en.wikipedia.org/wiki/Multiplicative_inverse[Reciprocate] the field value, same as `1/x` where `x` is the field's value + `missing`:: Value used if the document doesn't have that field. The modifier From b2eec960457dcfb8655efc1d4aa5768ca9a53a05 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 1 Mar 2016 13:48:50 -0500 Subject: [PATCH 163/320] [reindex] Make search failure cause rest failure Indexing failures have caused the reindex http request to fail for a while now. Both search and indexing failures cause it to abort. But search failures didn't cause a non-200 response code from the http api. This fixes that. Also slips in a fix to some infrequently failing rest tests. Closes #16037 --- ...kIndexByScrollResponseContentListener.java | 12 +++++++ .../rest-api-spec/test/reindex/10_basic.yaml | 3 +- .../test/update-by-query/10_basic.yaml | 3 +- .../test/reindex/40_search_failures.yaml | 34 +++++++++++++++++++ .../update-by-query/40_search_failure.yaml | 30 ++++++++++++++++ 5 files changed, 80 insertions(+), 2 deletions(-) create mode 100644 qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/reindex/40_search_failures.yaml create mode 100644 qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/update-by-query/40_search_failure.yaml diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java index 6a46a2c8e496..24612aa14dec 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java @@ -19,7 +19,9 @@ package org.elasticsearch.index.reindex; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; +import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestToXContentListener; @@ -35,6 +37,10 @@ public class BulkIndexByScrollResponseContentListener status.getStatus()) { + status = failureStatus; + } + } return status; } } diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml index a00fefc444aa..31e97967af0f 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml @@ -126,7 +126,8 @@ - match: {failures.0.id: "1"} - match: {failures.0.status: 409} - match: {failures.0.cause.type: version_conflict_engine_exception} - - match: {failures.0.cause.reason: "[foo][1]: version conflict, document already exists (current version [1])"} + # Use a regex so we don't mind if the version isn't always 1. Sometimes it comes out 2. + - match: {failures.0.cause.reason: "/\\[foo\\]\\[1\\]:.version.conflict,.document.already.exists.\\(current.version.\\[\\d+\\]\\)/"} - match: {failures.0.cause.shard: /\d+/} - match: {failures.0.cause.index: dest} - is_true: took diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml index b4ebb93c3277..65db8a5e66fd 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml @@ -87,7 +87,8 @@ - match: {failures.0.id: "1"} - match: {failures.0.status: 409} - match: {failures.0.cause.type: version_conflict_engine_exception} - - match: {failures.0.cause.reason: "[foo][1]: version conflict, current version [2] is different than the one provided [1]"} + # Use a regex so we don't mind if the current version isn't always 1. Sometimes it comes out 2. + - match: {failures.0.cause.reason: "/\\[foo\\]\\[1\\]:.version.conflict,.current.version.\\[\\d+\\].is.different.than.the.one.provided.\\[\\d+\\]/"} - match: {failures.0.cause.shard: /\d+/} - match: {failures.0.cause.index: test} - is_true: took diff --git a/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/reindex/40_search_failures.yaml b/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/reindex/40_search_failures.yaml new file mode 100644 index 000000000000..50442c2d51be --- /dev/null +++ b/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/reindex/40_search_failures.yaml @@ -0,0 +1,34 @@ +--- +"Response format search failures": + - do: + index: + index: source + type: foo + id: 1 + body: { "text": "test" } + - do: + indices.refresh: {} + + - do: + catch: request + reindex: + body: + source: + index: source + query: + script: + script: 1/0 # Divide by 0 to cause a search time exception + dest: + index: dest + - match: {created: 0} + - match: {updated: 0} + - match: {version_conflicts: 0} + - match: {batches: 0} + - is_true: failures.0.shard + - match: {failures.0.index: source} + - is_true: failures.0.node + - match: {failures.0.reason.type: script_exception} + - match: {failures.0.reason.reason: "failed to run inline script [1/0] using lang [groovy]"} + - match: {failures.0.reason.caused_by.type: arithmetic_exception} + - match: {failures.0.reason.caused_by.reason: Division by zero} + - is_true: took diff --git a/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/update-by-query/40_search_failure.yaml b/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/update-by-query/40_search_failure.yaml new file mode 100644 index 000000000000..8f89409c586b --- /dev/null +++ b/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/update-by-query/40_search_failure.yaml @@ -0,0 +1,30 @@ +--- +"Response format search failures": + - do: + index: + index: source + type: foo + id: 1 + body: { "text": "test" } + - do: + indices.refresh: {} + + - do: + catch: request + update-by-query: + index: source + body: + query: + script: + script: 1/0 # Divide by 0 to cause a search time exception + - match: {updated: 0} + - match: {version_conflicts: 0} + - match: {batches: 0} + - is_true: failures.0.shard + - match: {failures.0.index: source} + - is_true: failures.0.node + - match: {failures.0.reason.type: script_exception} + - match: {failures.0.reason.reason: "failed to run inline script [1/0] using lang [groovy]"} + - match: {failures.0.reason.caused_by.type: arithmetic_exception} + - match: {failures.0.reason.caused_by.reason: Division by zero} + - is_true: took From 304cbbbf31db881a30ba5544d491baa5635034f1 Mon Sep 17 00:00:00 2001 From: thefourtheye Date: Fri, 11 Mar 2016 00:31:38 +0530 Subject: [PATCH 164/320] fix redundant stack in comments --- .../elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java index d355276ceaed..2b92c4fd8c12 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java @@ -47,7 +47,7 @@ public class GceDiscoveryPlugin extends Plugin { static { /* * GCE's http client changes access levels because its silly and we - * can't allow that on any old stack stack so we pull it here, up front, + * can't allow that on any old stack so we pull it here, up front, * so we can cleanly check the permissions for it. Without this changing * the permission can fail if any part of core is on the stack because * our plugin permissions don't allow core to "reach through" plugins to From 51d87d94dc5b565ae4ab964bf7f4bd64329e8331 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 10 Mar 2016 11:17:25 -0800 Subject: [PATCH 165/320] Add getClassLoader perm for tika in ingest --- .../main/java/org/elasticsearch/ingest/attachment/TikaImpl.java | 2 ++ .../src/main/plugin-metadata/plugin-security.policy | 2 ++ .../test/ingest_attachment/30_files_supported.yaml | 2 +- 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java index 2ea977b4dd1c..27a4cfebbcd2 100644 --- a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java +++ b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java @@ -137,6 +137,8 @@ final class TikaImpl { perms.add(new SecurityPermission("putProviderProperty.BC")); perms.add(new SecurityPermission("insertProvider")); perms.add(new ReflectPermission("suppressAccessChecks")); + // xmlbeans, use by POI, needs to get the context classloader + perms.add(new RuntimePermission("getClassLoader")); perms.setReadOnly(); return perms; } diff --git a/plugins/ingest-attachment/src/main/plugin-metadata/plugin-security.policy b/plugins/ingest-attachment/src/main/plugin-metadata/plugin-security.policy index e23e9f4d0cfe..adf76991b59a 100644 --- a/plugins/ingest-attachment/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/ingest-attachment/src/main/plugin-metadata/plugin-security.policy @@ -27,4 +27,6 @@ grant { permission java.security.SecurityPermission "insertProvider"; // TODO: fix POI XWPF to not do this: https://bz.apache.org/bugzilla/show_bug.cgi?id=58597 permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; + // needed by xmlbeans, as part of POI for MS xml docs + permission java.lang.RuntimePermission "getClassLoader"; }; diff --git a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/30_files_supported.yaml b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/30_files_supported.yaml index 48fded555b6b..4f56603a6787 100644 --- a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/30_files_supported.yaml +++ b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/30_files_supported.yaml @@ -73,7 +73,7 @@ - match: { _source.attachment.content: "Test elasticsearch" } - match: { _source.attachment.language: "et" } - match: { _source.attachment.author: "David Pilato" } - - match: { _source.attachment.date: "2016-03-10T08:25:00Z" } + - match: { _source.attachment.date: "2016-03-10T08:24:00Z" } - match: { _source.attachment.content_length: "19" } - match: { _source.attachment.content_type: "application/vnd.openxmlformats-officedocument.wordprocessingml.document" } From 5e5bb04e2f0225502d59ba87db4b1b17b090b079 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Thu, 10 Mar 2016 20:18:34 +0100 Subject: [PATCH 166/320] In es_release_notes.pl, added non-issue and docs --- dev-tools/es_release_notes.pl | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/dev-tools/es_release_notes.pl b/dev-tools/es_release_notes.pl index c3e93f91076c..1a7565bfc56d 100755 --- a/dev-tools/es_release_notes.pl +++ b/dev-tools/es_release_notes.pl @@ -29,7 +29,7 @@ my $Issue_URL = "http://github.com/${User_Repo}issues/"; my @Groups = qw( breaking deprecation feature - enhancement bug regression upgrade build doc test + enhancement bug regression upgrade non-issue build docs test ); my %Group_Labels = ( breaking => 'Breaking changes', @@ -42,6 +42,7 @@ my %Group_Labels = ( regression => 'Regressions', test => 'Tests', upgrade => 'Upgrades', + "non-issue" => 'Non-issue', other => 'NOT CLASSIFIED', ); @@ -157,6 +158,8 @@ sub fetch_issues { ISSUE: for my $issue (@issues) { next if $seen{ $issue->{number} } && !$issue->{pull_request}; + # uncomment for including/excluding PRs already issued in other versions + # next if grep {$_->{name}=~/^v2/} @{$issue->{labels}}; my %labels = map { $_->{name} => 1 } @{ $issue->{labels} }; my ($header) = map { substr( $_, 1 ) } grep {/^:/} keys %labels; $header ||= 'NOT CLASSIFIED'; From 39a067aa3f453480940d2124b813c13593a87c06 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Fri, 13 Nov 2015 11:51:55 -0500 Subject: [PATCH 167/320] Fix NPE in Derivative Pipeline when current bucket is null Sequence of events that lead to the NPE: - avg metric returns NaN for buckets - Movavg skips NaN or null buckets, and simply re-uses the existing bucket (e.g. doesn't add a 'movavg' field) - Derivative references Movavg, the bucket resolution returns null because Movavg wasn't added to the bucket, NPE when trying to subtract null values --- .../DerivativePipelineAggregator.java | 2 +- .../aggregations/pipeline/DerivativeIT.java | 38 +++++++++++++++++-- 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java index 1289da661c2f..cea99cf868e8 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java @@ -92,7 +92,7 @@ public class DerivativePipelineAggregator extends PipelineAggregator { for (InternalHistogram.Bucket bucket : buckets) { Long thisBucketKey = resolveBucketKeyAsLong(bucket); Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], gapPolicy); - if (lastBucketValue != null) { + if (lastBucketValue != null && thisBucketValue != null) { double gradient = thisBucketValue - lastBucketValue; double xDiff = -1; if (xAxisUnits != null) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java index 14e99f0d0379..8b7841f0fe48 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.search.aggregations.metrics.stats.Stats; import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.derivative.Derivative; +import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; @@ -43,11 +44,9 @@ import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.search.aggregations.AggregationBuilders.filters; -import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; -import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; -import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; +import static org.elasticsearch.search.aggregations.AggregationBuilders.*; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.derivative; +import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.movingAvg; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.closeTo; @@ -614,6 +613,37 @@ public class DerivativeIT extends ESIntegTestCase { } } + public void testAvgMovavgDerivNPE() throws Exception { + createIndex("movavg_npe"); + ensureYellow("movavg_npe"); + + for (int i = 0; i < 10; i++) { + Integer value = i; + if (i == 1 || i == 3) { + value = null; + } + + XContentBuilder doc = jsonBuilder() + .startObject() + .field("tick", i) + .field("value", value) + .endObject(); + client().prepareIndex("movavg_npe", "type").setSource(doc).get(); + } + + refresh(); + + SearchResponse response = client() + .prepareSearch("movavg_npe") + .addAggregation( + histogram("histo").field("tick").interval(1) + .subAggregation(avg("avg").field("value")) + .subAggregation(movingAvg("movavg", "avg").modelBuilder(new SimpleModel.SimpleModelBuilder()).window(3)) + .subAggregation(derivative("deriv", "movavg"))).execute().actionGet(); + + assertSearchResponse(response); + } + private void checkBucketKeyAndDocCount(final String msg, final Histogram.Bucket bucket, final long expectedKey, final long expectedDocCount) { assertThat(msg, bucket, notNullValue()); From b3bd189cd72b04841615857e725630f4154792d5 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Thu, 10 Mar 2016 13:35:12 -0700 Subject: [PATCH 168/320] [DOCS] Fix missing table end in function-score-query.asciidoc --- docs/reference/query-dsl/function-score-query.asciidoc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/reference/query-dsl/function-score-query.asciidoc b/docs/reference/query-dsl/function-score-query.asciidoc index 4c8f93bcede8..1e736efa11d7 100644 --- a/docs/reference/query-dsl/function-score-query.asciidoc +++ b/docs/reference/query-dsl/function-score-query.asciidoc @@ -215,7 +215,8 @@ There are a number of options for the `field_value_factor` function: `log1p`, `log2p`, `ln`, `ln1p`, `ln2p`, `square`, `sqrt`, or `reciprocal`. Defaults to `none`. -|==================================== +[cols="<,<",options="header",] +|======================================================================= | Modifier | Meaning | `none` | Do not apply any multiplier to the field value @@ -228,6 +229,7 @@ There are a number of options for the `field_value_factor` function: | `square` | Square the field value (multiply it by itself) | `sqrt` | Take the https://en.wikipedia.org/wiki/Square_root[square root] of the field value | `reciprocal` | https://en.wikipedia.org/wiki/Multiplicative_inverse[Reciprocate] the field value, same as `1/x` where `x` is the field's value +|======================================================================= `missing`:: From 9d340e6b082b6c394d101496fc8c89ecd3ced716 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Thu, 10 Mar 2016 16:05:59 -0500 Subject: [PATCH 169/320] Fix import formatting - do not use wildcard --- .../search/aggregations/pipeline/DerivativeIT.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java index 8b7841f0fe48..393e1bbfdea2 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java @@ -44,7 +44,11 @@ import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.search.aggregations.AggregationBuilders.*; +import static org.elasticsearch.search.aggregations.AggregationBuilders.filters; +import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; +import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; +import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; +import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.derivative; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.movingAvg; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; From 2f3efc3fe129cace575d88c485bc1b5f652fb9e8 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 10 Mar 2016 13:28:19 -0800 Subject: [PATCH 170/320] Add doc and docx rest test to mapper attachment along with getClassLoader permission --- .../index/mapper/FieldMapper.java | 3 +- .../mapper/attachments/TikaImpl.java | 2 + .../plugin-metadata/plugin-security.policy | 2 + .../50_files_supported.yaml | 78 +++++++++++++++++++ 4 files changed, 84 insertions(+), 1 deletion(-) create mode 100644 plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/50_files_supported.yaml diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index dba1355a3952..28ebb2546613 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -374,7 +374,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable { // this can happen if this mapper represents a mapping update return this; } else if (fieldType.getClass() != newFieldType.getClass()) { - throw new IllegalStateException("Mixing up field types: " + fieldType.getClass() + " != " + newFieldType.getClass()); + throw new IllegalStateException("Mixing up field types: " + + fieldType.getClass() + " != " + newFieldType.getClass() + " on field " + fieldType.name()); } MultiFields updatedMultiFields = multiFields.updateFieldType(fullNameToFieldType); if (fieldType == newFieldType && multiFields == updatedMultiFields) { diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/TikaImpl.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/TikaImpl.java index fa9a2d06f8e9..2babda8ad00f 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/TikaImpl.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/TikaImpl.java @@ -137,6 +137,8 @@ final class TikaImpl { perms.add(new SecurityPermission("putProviderProperty.BC")); perms.add(new SecurityPermission("insertProvider")); perms.add(new ReflectPermission("suppressAccessChecks")); + // xmlbeans, use by POI, needs to get the context classloader + perms.add(new RuntimePermission("getClassLoader")); perms.setReadOnly(); return perms; } diff --git a/plugins/mapper-attachments/src/main/plugin-metadata/plugin-security.policy b/plugins/mapper-attachments/src/main/plugin-metadata/plugin-security.policy index e23e9f4d0cfe..adf76991b59a 100644 --- a/plugins/mapper-attachments/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/mapper-attachments/src/main/plugin-metadata/plugin-security.policy @@ -27,4 +27,6 @@ grant { permission java.security.SecurityPermission "insertProvider"; // TODO: fix POI XWPF to not do this: https://bz.apache.org/bugzilla/show_bug.cgi?id=58597 permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; + // needed by xmlbeans, as part of POI for MS xml docs + permission java.lang.RuntimePermission "getClassLoader"; }; diff --git a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/50_files_supported.yaml b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/50_files_supported.yaml new file mode 100644 index 000000000000..69991b9d0c04 --- /dev/null +++ b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/50_files_supported.yaml @@ -0,0 +1,78 @@ +setup: + - do: + indices.create: + index: test + body: + mappings: + test: + properties: + file: + type: attachment + fields: + content: + store: true + author: + store: true + date: + store: true + content_length: + store: true + content_type: + store: true + + - do: + cluster.health: + wait_for_status: yellow +--- +"Test mapper attachment processor with .doc file": + + - do: + index: + index: test + type: test + id: 1 + refresh: true + body: { file: "0M8R4KGxGuEAAAAAAAAAAAAAAAAAAAAAPgADAP7/CQAGAAAAAAAAAAAAAAAEAAAAjAEAAAAAAAAAEAAAjgEAAAEAAAD+////AAAAAIgBAACJAQAAigEAAIsBAAD////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////spcEAg+kMBAAA8BK/AAAAAAABEQABAAEACAAAEwgAAA4AYmpiaoI4gjgAAAAAAAAAAAAAAAAAAAAAAAAMBBYANA4AAOBSAADgUgAAEwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//w8AAAAAAAAAAAD//w8AAAAAAAAAAAD//w8AAAAAAAAAAAAAAAAAAAAAALcAAAAAAFAHAAAAAAAAUAcAAMcUAAAAAAAAxxQAAAAAAADHFAAAAAAAAMcUAAAAAAAAxxQAABQAAAAAAAAAAAAAAP////8AAAAA2xQAAAAAAADbFAAAAAAAANsUAAAAAAAA2xQAAAwAAADnFAAADAAAANsUAAAAAAAA3hUAADABAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAAVRUAAAIAAABXFQAAAAAAAFcVAAAAAAAAVxUAAAAAAABXFQAAAAAAAFcVAAAAAAAAVxUAACwAAAAOFwAAtgIAAMQZAABaAAAAgxUAABUAAAAAAAAAAAAAAAAAAAAAAAAAxxQAAAAAAADzFAAAAAAAAAAAAAAAAAAAAAAAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAIMVAAAAAAAAGRUAAAAAAADHFAAAAAAAAMcUAAAAAAAA8xQAAAAAAAAAAAAAAAAAAPMUAAAAAAAAmBUAABYAAAAZFQAAAAAAABkVAAAAAAAAGRUAAAAAAADzFAAAFgAAAMcUAAAAAAAA8xQAAAAAAADHFAAAAAAAAPMUAAAAAAAAVRUAAAAAAAAAAAAAAAAAABkVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8xQAAAAAAABVFQAAAAAAAAAAAAAAAAAAGRUAAAAAAAAAAAAAAAAAABkVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGRUAAAAAAAAAAAAAAAAAAP////8AAAAAgI6XYKZ60QEAAAAAAAAAAP////8AAAAACRUAABAAAAAZFQAAAAAAAAAAAAAAAAAAQRUAABQAAACuFQAAMAAAAN4VAAAAAAAAGRUAAAAAAAAeGgAAAAAAABkVAAAAAAAAHhoAAAAAAAAZFQAAAAAAABkVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADHFAAAAAAAABkVAAAoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAAgxUAAAAAAACDFQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGRUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAN4VAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAAAAAAAAAAAAAP////8AAAAA/////wAAAAD/////AAAAAAAAAAAAAAAA/////wAAAAD/////AAAAAP////8AAAAA/////wAAAAD/////AAAAAP////8AAAAA/////wAAAAD/////AAAAAP////8AAAAA/////wAAAAD/////AAAAAP////8AAAAA/////wAAAAD/////AAAAAB4aAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAABQBwAAPQwAAI0TAAA6AQAABwAMAQ8ADQEAAAwEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFRlc3QgZWxhc3RpY3NlYXJjaA0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAABIIAAATCAAA/PgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYWaJVGuQAABhZo3wiGAAIACAAAEwgAAP0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAATIAMZBoATpwpBeqAB+wfC4gsMhBIbCJBSKwiQUjkIkFJJCJBSWwAAAXsMQCGLDEAgyQxAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALgYPABIAAQB8AQ8ACAADAAMAAwAAAAQACAAAAJgAAACeAAAAngAAAJ4AAACeAAAAngAAAJ4AAACeAAAAngAAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAAHYCAAB2AgAAdgIAAHYCAAB2AgAAdgIAAHYCAAB2AgAAdgIAADYGAAA2BgAANgYAADYGAAA2BgAANgYAAD4CAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAACoAAAANgYAADYGAAAWAAAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAC4AAAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAAaAEAAEgBAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAAHACAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAAMgYAABgAAADGAwAA1gMAAOYDAAD2AwAABgQAABYEAAAmBAAANgQAAEYEAABWBAAAZgQAAHYEAACGBAAAlgQAAMYDAADWAwAA5gMAAPYDAAAGBAAAFgQAADIGAAAoAgAA2AEAAOgBAAAmBAAANgQAAEYEAABWBAAAZgQAAHYEAACGBAAAlgQAAMYDAADWAwAA5gMAAPYDAAAGBAAAFgQAACYEAAA2BAAARgQAAFYEAABmBAAAdgQAAIYEAACWBAAAxgMAANYDAADmAwAA9gMAAAYEAAAWBAAAJgQAADYEAABGBAAAVgQAAGYEAAB2BAAAhgQAAJYEAADGAwAA1gMAAOYDAAD2AwAABgQAABYEAAAmBAAANgQAAEYEAABWBAAAZgQAAHYEAACGBAAAlgQAAMYDAADWAwAA5gMAAPYDAAAGBAAAFgQAACYEAAA2BAAARgQAAFYEAABmBAAAdgQAAIYEAACWBAAAxgMAANYDAADmAwAA9gMAAAYEAAAWBAAAJgQAADYEAABGBAAAVgQAAGYEAAB2BAAAhgQAAJYEAAA4AQAAWAEAAPgBAAAIAgAAGAIAAFYCAAB+AgAAkAIAAKACAACwAgAAwAIAANACAACAAgAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAADgAgAA8AIAAAADAAAQAwAAIAMAADADAABAAwAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAADgAgAA8AIAAAADAAAQAwAAIAMAADADAABAAwAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAADgAgAA8AIAAAADAAAQAwAAIAMAADADAABAAwAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAADgAgAA8AIAAAADAAAQAwAAIAMAADADAABAAwAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAAAgAAAAT0oDAFBKAwBRSgMAX0gBBG1IDARuSAwEc0gMBHRIDAQAAAAAQAAAYPH/AgBAAAwQAAAAAAAAAAAGAE4AbwByAG0AYQBsAAAAAgAAABgAQ0oYAF9IAQRhShgAbUgMBHNIDAR0SAkEAAAAAAAAAAAAAAAAAAAAAAAAOgBBIPL/oQA6AAwNAAAAAAAAEAARAFAAbwBsAGkAYwBlACAAcABhAHIAIABkAOkAZgBhAHUAdAAAAAAAVgBpAPP/swBWAAwNAAAAAAAAMAYOAFQAYQBiAGwAZQBhAHUAIABOAG8AcgBtAGEAbAAAABwAF/YDAAA01gYAAQoDbAA01gYAAQUDAABh9gMAAAIACwAAADIAayD0/8EAMgAADQAAAAAAADAGDABBAHUAYwB1AG4AZQAgAGwAaQBzAHQAZQAAAAIADAAAAAAAUEsDBBQABgAIAAAAIQCb6HBP/AAAABwCAAATAAAAW0NvbnRlbnRfVHlwZXNdLnhtbKyRy2rDMBBF94X+g9C22HK6KKXYzqKPXR+L9AMGeWyL2CMhTULy9x07LpQSAoVuBNLMvffMqFwfxkHtMSbnqdKrvNAKyfrGUVfpz81Ldq9VYqAGBk9Y6SMmva6vr8rNMWBSoqZU6Z45PBiTbI8jpNwHJKm0Po7Aco2dCWC30KG5LYo7Yz0xEmc8eei6fMIWdgOr54M8n0hErtXjqW+KqjSEMDgLLKBmqpqzuohDuiDcU/OLLlvIclHO5ql3Id0sCe+ymugaVB8Q+Q1G4TAsQ+LP8xVIRov5ZeYz0b5tncXG290o68hn48XsTwCr/4n+zjTz39ZfAAAA//8DAFBLAwQUAAYACAAAACEApdan58AAAAA2AQAACwAAAF9yZWxzLy5yZWxzhI/PasMwDIfvhb2D0X1R0sMYJXYvpZBDL6N9AOEof2giG9sb69tPxwYKuwiEpO/3qT3+rov54ZTnIBaaqgbD4kM/y2jhdj2/f4LJhaSnJQhbeHCGo3vbtV+8UNGjPM0xG6VItjCVEg+I2U+8Uq5CZNHJENJKRds0YiR/p5FxX9cfmJ4Z4DZM0/UWUtc3YK6PqMn/s8MwzJ5PwX+vLOVFBG43lExp5GKhqC/jU72QqGWq1B7Qtbj51v0BAAD//wMAUEsDBBQABgAIAAAAIQBreZYWgwAAAIoAAAAcAAAAdGhlbWUvdGhlbWUvdGhlbWVNYW5hZ2VyLnhtbAzMTQrDIBBA4X2hd5DZN2O7KEVissuuu/YAQ5waQceg0p/b1+XjgzfO3xTVm0sNWSycBw2KZc0uiLfwfCynG6jaSBzFLGzhxxXm6XgYybSNE99JyHNRfSPVkIWttd0g1rUr1SHvLN1euSRqPYtHV+jT9yniResrJgoCOP0BAAD//wMAUEsDBBQABgAIAAAAIQBtTVmryAYAAI4aAAAWAAAAdGhlbWUvdGhlbWUvdGhlbWUxLnhtbOxZ3YrbRhS+L/QdhO4d/0n+WeINtmxv2uwmIXbS5nJWHkuTHWmMZrwbEwJ9gkIhLb0p9K6F3gTaN+i7pLTpQ/TMSJZn7HH2hy2E0jUs8vg7Z7455+g7I83dey8T6pzjjBOW9tz6nZrr4DRkM5JGPffpdFzpuA4XKJ0hylLcc1eYu/cOP/3kLjoQMU6wA/YpP0A9NxZicVCt8hCGEb/DFjiF3+YsS5CAr1lUnWXoAvwmtNqo1VrVBJHUdVKUgNtp/PvP4OzRfE5C7B6uvY8oTJEKLgdCmk2kb1yYDJYZRkuFnZ3VJYKveEAz5xzRngsTzdjFFL8UrkMRF/BDz62pP7d6eLeKDgojKvbYanZj9VfYFQazs4aaM4tOy0k9z/da/dK/AlCxixu1R61Rq/SnACgMYaU5F92nP+gOhn6B1UD5pcX3sD1s1g285r+5w7nvy4+BV6Dcv7eDH48DiKKBV6Ac7+/gPa/dCDwDr0A5vrWDb9f6Q69t4BUopiQ920HX/FYzWK+2hMwZvW+Fd31v3G4UzjcoqIayuuQUc5aKfbWWoBcsGwNAAikSJHXEaoHnKIQyDhAlpxlxjkkUQ+EtUMo4DNcatXGtCf/lx1NXKiLoACPNWvICJnxnSPJxeJiRhei5n4NXV4M8XzpHTMQkLGZVTgyL+yiNdIv3P33z9w9fOX/9+uP7N9/mk27juY4f4jT6kqD0QxPAajdhePfd2z9+e/vu+6///OWNxX8/Q6c6fEoSzJ2H+MJ5whJYnGUF+DS7nsU0RkS36KcRRymSs1j8jyB+OvrhClFkwQ0gEjruWQYyYwMeLV8YhCdxthTE4vFBnBjAE8bogGXWKDyQc2lhni7TyD55ttRxTxA6t80doNTI82i5AH0lNpdBjA2ajylKBYpwioUjf2NnGFtW95wQI64nJMwYZ3PhPCfOABFrSKbk1KimjdF9kkBeVjaCkG8jNifPnAGjtlUP8bmJhLsDUQv5KaZGGI/QUqDE5nKKEqoH/BiJ2EZysspCHTfiAjIdYcqc0QxzbrN5lMF6taQ/AImxp/2ErhITmQlyZvN5jBjTkUN2FsQoWdiwE5LGOvYzfgYlipzHTNjgJ8y8Q+R3yAOIx750PyPYSPflavAU1FWntCkQ+csys+TyCDOjficrOkdYSQ2Iv6HpCUkvFfgtaff/PWk/IWkYM8uKbkvU7a6NjFxTzvsZsd5P97dEfB9uW7oDls3Ix6/cQ7RMH2O4WXbb1//C/b9wu/954d53P9++XG8UGsRbbl3zzbrauid7d+5zQulErCg+5mrzzqEvzcYwKO3UYysun+QWMVzKOxkmMHBRhpSNkzHxBRHxJEYL2OHXXekk4oXriDsLxmHjr4atviWeLpMTNssfWOt1+XCaiwdHYjNe88txeNgQObrV3jyEle4V20g9LK8JSNvrkNAmM0k0LSTa60EZJPVoDkGzkFAruxUWXQuLjnS/TtUOC6BWZgU2Tg5st3qu74EJGMEzFaJ4JvOUp3qdXZXM28z0vmAaFQC7iHUFbDLdlVz3Lk+uLi+1K2TaIKGVm0lCRUb1MB6jGS6qU45ehcZ1c93dpNSgJ0Oh5oPS2tBodz7E4qa5BrttbaCprhQ0dS56bqvpQ8mEaNFz5/DgD5fJAmqHyw0vohG8PgtFlt/wN1GWRcbFEPE4D7gSnVwNEiJw5lCS9Fy5/DINNFUaorjVGyAIHy25LsjKx0YOkm4mGc/nOBR62rURGen8Kyh8rhXWX5X5zcHSki0h3ZN4duGc0mX2BEGJ+e26DOCMcHj/U8+jOSPwQrMUsk39bTWmQnb1N4qqhvJxRBcxKjqKLuY5XEl5SUd9K2OgfSvWDAHVQlI0wtNINlg9qEY3LbtGzmFv173cSEZOE81NzzRURXZNu4oZM6zbwFYsb9bkNVbrEIOm6R0+l+5tye2utW5rn1B2CQh4GT9L171CQ9CobSYzqEnGuzIsNbsYNXvHeoGXULtKk9BUv7V2uxW3skdYp4PBG3V+sNuuWhiar/eVKtLq6EM/nGCnL0A8hvAaeEkFV6mEo4cMwYZoovYkuWzALfJSFLcGXDnLjPTcVzW/7wUNP6jUOv6o4jW9WqXj95uVvu836yO/XhsOGq+hsYg4qfv5scsYXkTRVXH4osZ3DmCS9bu2OyFLqkydrFQVcXUAU28YBzD5yYszlQcsrkNAdF61GuNusztoVbrN/rjiDQedSjdoDSrDVtAejoeB3+mOX7vOuQJ7/WbgtUadSqseBBWvVZP0O91K22s0+l673xl5/dfFNgZWnstHEQsIr+J1+A8AAAD//wMAUEsDBBQABgAIAAAAIQAN0ZCftgAAABsBAAAnAAAAdGhlbWUvdGhlbWUvX3JlbHMvdGhlbWVNYW5hZ2VyLnhtbC5yZWxzhI9NCsIwFIT3gncIb2/TuhCRJt2I0K3UA4TkNQ02PyRR7O0NriwILodhvplpu5edyRNjMt4xaKoaCDrplXGawW247I5AUhZOidk7ZLBggo5vN+0VZ5FLKE0mJFIoLjGYcg4nSpOc0IpU+YCuOKOPVuQio6ZByLvQSPd1faDxmwF8xSS9YhB71QAZllCa/7P9OBqJZy8fFl3+UUFz2YUFKKLGzOAjm6pMBMpburrE3wAAAP//AwBQSwECLQAUAAYACAAAACEAm+hwT/wAAAAcAgAAEwAAAAAAAAAAAAAAAAAAAAAAW0NvbnRlbnRfVHlwZXNdLnhtbFBLAQItABQABgAIAAAAIQCl1qfnwAAAADYBAAALAAAAAAAAAAAAAAAAAC0BAABfcmVscy8ucmVsc1BLAQItABQABgAIAAAAIQBreZYWgwAAAIoAAAAcAAAAAAAAAAAAAAAAABYCAAB0aGVtZS90aGVtZS90aGVtZU1hbmFnZXIueG1sUEsBAi0AFAAGAAgAAAAhAG1NWavIBgAAjhoAABYAAAAAAAAAAAAAAAAA0wIAAHRoZW1lL3RoZW1lL3RoZW1lMS54bWxQSwECLQAUAAYACAAAACEADdGQn7YAAAAbAQAAJwAAAAAAAAAAAAAAAADPCQAAdGhlbWUvdGhlbWUvX3JlbHMvdGhlbWVNYW5hZ2VyLnhtbC5yZWxzUEsFBgAAAAAFAAUAXQEAAMoKAAAAADw/eG1sIHZlcnNpb249IjEuMCIgZW5jb2Rpbmc9IlVURi04IiBzdGFuZGFsb25lPSJ5ZXMiPz4NCjxhOmNsck1hcCB4bWxuczphPSJodHRwOi8vc2NoZW1hcy5vcGVueG1sZm9ybWF0cy5vcmcvZHJhd2luZ21sLzIwMDYvbWFpbiIgYmcxPSJsdDEiIHR4MT0iZGsxIiBiZzI9Imx0MiIgdHgyPSJkazIiIGFjY2VudDE9ImFjY2VudDEiIGFjY2VudDI9ImFjY2VudDIiIGFjY2VudDM9ImFjY2VudDMiIGFjY2VudDQ9ImFjY2VudDQiIGFjY2VudDU9ImFjY2VudDUiIGFjY2VudDY9ImFjY2VudDYiIGhsaW5rPSJobGluayIgZm9sSGxpbms9ImZvbEhsaW5rIi8+AAAAABMAAAAUAAAOAAAIAP////8ACAAAEwgAAAUAAAAACAAAEwgAAAYAAAAAAAAABQAAABIAAAAVAAAABwAEAAcAAAAAABIAAAAVAAAABAAHAAQAAAAEAAAACAAAAOUAAAAAAAAAAwAAAN8IhgCkF6oAlUa5AH419AAAAAAAEwAAABUAAAAAAAAAAQAAAP9AAIABABIAAAASAAAAAEBDewEAAQASAAAAAAAAABIAAAAAAAAAAAAAAAAAAAACEAAAAAAAAAATAAAAoAAAEABAAAD//wEAAAAHAFUAbgBrAG4AbwB3AG4A//8BAAgAAAAAAAAAAAAAAP//AQAAAAAA//8AAAIA//8AAAAA//8AAAIA//8AAAAABQAAAEcOkAEAAAICBgMFBAUCAwTvKgDgQXgAwAkAAAAAAAAA/wEAAAAAAABUAGkAbQBlAHMAIABOAGUAdwAgAFIAbwBtAGEAbgAAADUOkAECAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAgAAAAABTAHkAbQBiAG8AbAAAADMOkAEAAAILBgQCAgICAgT/KgDgQ3gAwAkAAAAAAAAA/wEAAAAAAABBAHIAaQBhAGwAAAA3DpABAAACDwUCAgIEAwIE/wIA4P+sAEABAAAAAAAAAJ8BAAAAAAAAQwBhAGwAaQBiAHIAaQAAAEESkAEBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABDAGEAbQBiAHIAaQBhACAATQBhAHQAaAAAACAABADxCIgIAPDEAgAAqQEAAAAAWVJDh1lSQ4cAAAAAAgABAAAAAgAAABEAAAABAAEAAAAEAAOQAQAAAAIAAAARAAAAAQABAAAAAQAAAAAAAAAhAwDwEAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAClBsAHtAC0AIGBcjAAAAAAAAAAAAAAAAAAABIAAAASAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAAAAABAAAAA8BAACAD8/QEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACSFAAAAAACfH/DwAAJFAAABAnAAD///9/////f////3////9/////f////3////9/3wiGAAAEAAAyAAAAAAAAAAAAAAAAAAAAAAAAAAAAIQQAAAAAAAAAAAAAAAAAAAAAAAAQHAAABAAAAAAAAAAAAHgAAAB4AAAAAAAAAAAAAACgBQAAGkjOCAsAAAAAAAAA3AAAAAEAAAD//xIAAAAAAAAAAAAAAAAAAAAMAEQAYQB2AGkAZAAgAFAAaQBsAGEAdABvAAwARABhAHYAaQBkACAAUABpAGwAYQB0AG8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP7/AAADCgEAAAAAAAAAAAAAAAAAAAAAAAEAAADghZ/y+U9oEKuRCAArJ7PZMAAAANzSAgASAAAAAQAAAJgAAAACAAAAoAAAAAMAAACsAAAABAAAALgAAAAFAAAA0AAAAAYAAADcAAAABwAAAOgAAAAIAAAA/AAAAAkAAAAUAQAAEgAAACABAAAKAAAARAEAAAwAAABQAQAADQAAAFwBAAAOAAAAaAEAAA8AAABwAQAAEAAAAHgBAAATAAAAgAEAABEAAACIAQAAAgAAABAnAAAeAAAABAAAAAAAAAAeAAAABAAAAAAAAAAeAAAAEAAAAERhdmlkIFBpbGF0bwAAAAAeAAAABAAAAAAAAAAeAAAABAAAAAAAAAAeAAAADAAAAE5vcm1hbC5kb3RtAB4AAAAQAAAARGF2aWQgUGlsYXRvAAAAAB4AAAAEAAAAMgAAAB4AAAAcAAAATWljcm9zb2Z0IE1hY2ludG9zaCBXb3JkAAAAAEAAAAAARsMjAAAAAEAAAAAAFjZWpnrRAUAAAAAAFjZWpnrRAQMAAAABAAAAAwAAAAIAAAADAAAAEQAAAAMAAAAAAAAARwAAAEzRAgD/////DgAAAAEAAABsAAAAAAAAAAAAAAD/AAAAswAAAAAAAAAAAAAAZhkAANsRAAAgRU1GAAABAETRAgAIAAAAAQAAAAAAAAAAAAAAAAAAAOwEAACxAwAAQAEAAPAAAAAAAAAAAAAAAAAAAAAA4gQAgKkDABEAAAAMAAAACAAAAAoAAAAQAAAAAAAAAAAAAAAJAAAAEAAAAAABAAC0AAAADAAAABAAAAAAAAAAAAAAAAsAAAAQAAAAAAEAALQAAABRAAAAeNACAAAAAAAAAAAA/wAAALMAAAAAAAAAAAAAAAAAAAAAAAAAAAEAALQAAABQAAAAKAAAAHgAAAAA0AIAAAAAACAAzAAAAQAAtAAAACgAAAAAAQAAtAAAAAEAIAAAAAAAANACAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////vr6+/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/76+vv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////7vf//+rz7v/Yzc3/0NLY/+DX2f/N4PL/3tXI/8jV4v/Q0cX/1tDI/9ve2f/U0tX/0NLQ/83I0P/I2N7/4tnI/9LZ4v/v6tz/5eXl////9////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////83g9//e3M3/vrG3/8TCxv/Xwrz/vdfu/8W/rv/K1tX/x8bB/8LJxv/Oxb7/yMTE/8vCwv+3scH/zd7Z/9DNyP/BwcT/z97X/82xq/////v////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////u9/v/+/Lu////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////++vr7/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/vr6+//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////8OAAAAFAAAAAAAAAAQAAAAFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD+/wAAAwoBAAAAAAAAAAAAAAAAAAAAAAABAAAAAtXN1ZwuGxCTlwgAKyz5rjAAAADUAAAACwAAAAEAAABgAAAABQAAAGgAAAAGAAAAcAAAABEAAAB4AAAAFwAAAIAAAAALAAAAiAAAABAAAACQAAAAEwAAAJgAAAAWAAAAoAAAAA0AAACoAAAADAAAALUAAAACAAAAECcAAAMAAAABAAAAAwAAAAEAAAADAAAAEgAAAAMAAAAAAA8ACwAAAAAAAAALAAAAAAAAAAsAAAAAAAAACwAAAAAAAAAeEAAAAQAAAAEAAAAADBAAAAIAAAAeAAAABgAAAFRpdHJlAAMAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAIAAAADAAAABAAAAAUAAAAGAAAABwAAAP7///8JAAAACgAAAAsAAAAMAAAADQAAAA4AAAAPAAAAEAAAABEAAAASAAAAEwAAABQAAAAVAAAA/v///xcAAAAYAAAAGQAAABoAAAAbAAAAHAAAAB0AAAAeAAAAHwAAACAAAAAhAAAAIgAAACMAAAAkAAAAJQAAACYAAAAnAAAAKAAAACkAAAAqAAAAKwAAACwAAAAtAAAALgAAAC8AAAAwAAAAMQAAADIAAAAzAAAANAAAADUAAAA2AAAANwAAADgAAAA5AAAAOgAAADsAAAA8AAAAPQAAAD4AAAA/AAAAQAAAAEEAAABCAAAAQwAAAEQAAABFAAAARgAAAEcAAABIAAAASQAAAEoAAABLAAAATAAAAE0AAABOAAAATwAAAFAAAABRAAAAUgAAAFMAAABUAAAAVQAAAFYAAABXAAAAWAAAAFkAAABaAAAAWwAAAFwAAABdAAAAXgAAAF8AAABgAAAAYQAAAGIAAABjAAAAZAAAAGUAAABmAAAAZwAAAGgAAABpAAAAagAAAGsAAABsAAAAbQAAAG4AAABvAAAAcAAAAHEAAAByAAAAcwAAAHQAAAB1AAAAdgAAAHcAAAB4AAAAeQAAAHoAAAB7AAAAfAAAAH0AAAB+AAAAfwAAAIAAAACBAAAAggAAAIMAAACEAAAAhQAAAIYAAACHAAAAiAAAAIkAAACKAAAAiwAAAIwAAACNAAAAjgAAAI8AAACQAAAAkQAAAJIAAACTAAAAlAAAAJUAAACWAAAAlwAAAJgAAACZAAAAmgAAAJsAAACcAAAAnQAAAJ4AAACfAAAAoAAAAKEAAACiAAAAowAAAKQAAAClAAAApgAAAKcAAACoAAAAqQAAAKoAAACrAAAArAAAAK0AAACuAAAArwAAALAAAACxAAAAsgAAALMAAAC0AAAAtQAAALYAAAC3AAAAuAAAALkAAAC6AAAAuwAAALwAAAC9AAAAvgAAAL8AAADAAAAAwQAAAMIAAADDAAAAxAAAAMUAAADGAAAAxwAAAMgAAADJAAAAygAAAMsAAADMAAAAzQAAAM4AAADPAAAA0AAAANEAAADSAAAA0wAAANQAAADVAAAA1gAAANcAAADYAAAA2QAAANoAAADbAAAA3AAAAN0AAADeAAAA3wAAAOAAAADhAAAA4gAAAOMAAADkAAAA5QAAAOYAAADnAAAA6AAAAOkAAADqAAAA6wAAAOwAAADtAAAA7gAAAO8AAADwAAAA8QAAAPIAAADzAAAA9AAAAPUAAAD2AAAA9wAAAPgAAAD5AAAA+gAAAPsAAAD8AAAA/QAAAP4AAAD/AAAAAAEAAAEBAAACAQAAAwEAAAQBAAAFAQAABgEAAAcBAAAIAQAACQEAAAoBAAALAQAADAEAAA0BAAAOAQAADwEAABABAAARAQAAEgEAABMBAAAUAQAAFQEAABYBAAAXAQAAGAEAABkBAAAaAQAAGwEAABwBAAAdAQAAHgEAAB8BAAAgAQAAIQEAACIBAAAjAQAAJAEAACUBAAAmAQAAJwEAACgBAAApAQAAKgEAACsBAAAsAQAALQEAAC4BAAAvAQAAMAEAADEBAAAyAQAAMwEAADQBAAA1AQAANgEAADcBAAA4AQAAOQEAADoBAAA7AQAAPAEAAD0BAAA+AQAAPwEAAEABAABBAQAAQgEAAEMBAABEAQAARQEAAEYBAABHAQAASAEAAEkBAABKAQAASwEAAEwBAABNAQAATgEAAE8BAABQAQAAUQEAAFIBAABTAQAAVAEAAFUBAABWAQAAVwEAAFgBAABZAQAAWgEAAFsBAABcAQAAXQEAAF4BAABfAQAAYAEAAGEBAABiAQAAYwEAAGQBAABlAQAAZgEAAGcBAABoAQAAaQEAAGoBAABrAQAAbAEAAG0BAABuAQAAbwEAAHABAABxAQAAcgEAAHMBAAB0AQAAdQEAAHYBAAB3AQAAeAEAAHkBAAB6AQAAewEAAHwBAAB9AQAAfgEAAH8BAAD+////gQEAAIIBAACDAQAAhAEAAIUBAACGAQAAhwEAAP7////9/////f////3////9////jQEAAP7////+/////v////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////9SAG8AbwB0ACAARQBuAHQAcgB5AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFgAFAf//////////AwAAAAYJAgAAAAAAwAAAAAAAAEYAAAAAAAAAAAAAAAAgFZlgpnrRAY8BAACAAAAAAAAAADEAVABhAGIAbABlAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOAAIB/////wUAAAD/////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAB4aAAAAAAAAVwBvAHIAZABEAG8AYwB1AG0AZQBuAHQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABoAAgEBAAAA//////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAFAFMAdQBtAG0AYQByAHkASQBuAGYAbwByAG0AYQB0AGkAbwBuAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKAACAQIAAAAEAAAA/////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABYAAAAM0wIAAAAAAAUARABvAGMAdQBtAGUAbgB0AFMAdQBtAG0AYQByAHkASQBuAGYAbwByAG0AYQB0AGkAbwBuAAAAAAAAAAAAAAA4AAIB////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAEAAAAQAAAAAAAAAQBDAG8AbQBwAE8AYgBqAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABIAAgD///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAcgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAP7///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////8BAP7/AwoAAP////8GCQIAAAAAAMAAAAAAAABGIAAAAERvY3VtZW50IE1pY3Jvc29mdCBXb3JkIDk3LTIwMDQACgAAAE1TV29yZERvYwAQAAAAV29yZC5Eb2N1bWVudC44APQ5snEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==" } + + - do: + search: + index: test + body: + fields: [file.content, file.author, file.date, file.content_length, file.content_type] + - match: { hits.total: 1 } + - match: { hits.hits.0.fields: { + file.content: ["Test elasticsearch\n"], + file.author: ["David Pilato"], + file.date: ["2016-03-10T08:25:00Z"], + file.content_length: ["205312"], + file.content_type: ["application/msword"] + } + } + + +--- +"Test mapper attachment processor with .docx file": + + - do: + index: + index: test + type: test + id: 1 + refresh: true + body: { file: "UEsDBBQABgAIAAAAIQBtiidLZgEAAFQFAAATAAgCW0NvbnRlbnRfVHlwZXNdLnhtbCCiBAIooAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAC0lMtugzAQRfeV+g/I2wqcdFFVVUgWfSzbSE0/wLEH4tYv2c7r7ztAgqooAalJNkgwc+89A3hGk41WyQp8kNbkZJgNSAKGWyFNmZOv2Vv6SJIQmRFMWQM52UIgk/HtzWi2dRASVJuQk0WM7onSwBegWcisA4OVwnrNIt76kjrGf1gJ9H4weKDcmggmprHyIOPRCxRsqWLyusHHDQnKSfLc9FVROWHOKclZxDKtqvSozoMKHcKVEQd06Y4sQ2XdExbShbvTCd8OyoMEqavR6gJqPvB1eikgmTIf35nGBrq2XlBh+VKjKOse7gijLQrJodVXbs5bDiHgd9IqayuaSbNnP8kR4lZBuDxF49sfDzGi4BoAO+dehDXMP69G8ce8F6TA3BmbK7g8RmvdCxHx1EJzHZ7NUdt0RWLn1FsXcAv4f4y9P66VOsWBHfgou/+6NhGtz54Pqk0gQBzJpvVOHP8CAAD//wMAUEsDBBQABgAIAAAAIQDHwie8/wAAAN8CAAALAAgCX3JlbHMvLnJlbHMgogQCKKAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAArJLNSgMxEIDvgu8Q5t7NtoqINNuLCL2JrA8wJtPd6OaHZKrt2xtF1IVlEexx/j6+SWa9ObhBvFLKNngFy6oGQV4HY32n4LG9W1yDyIze4BA8KThShk1zfrZ+oAG5DOXexiwKxWcFPXO8kTLrnhzmKkTypbILySGXMHUyon7BjuSqrq9k+s2AZsQUW6Mgbc0FiPYY6X9s6YjRIKPUIdEipjKd2JZdRIupI1Zggr4v6fzZURUyyGmhy78Lhd3OaroNeu/I85QXHZi8ITOvhDHOGS1PaTTu+JF5C8lI85Wes1md9sO437snj3aYeJfvWvUcqfsQkqOzbN4BAAD//wMAUEsDBBQABgAIAAAAIQATqj6H9gAAADEDAAAcAAgBd29yZC9fcmVscy9kb2N1bWVudC54bWwucmVscyCiBAEooAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKySy2rDMBBF94X+g5h9LTt9UELkbEoh29b9AEUeP6gsCc304b+vaEjr0GC68PJeMfeeQbPZfg5WvGOk3jsFRZaDQGd83btWwUv1eHUPgli7WlvvUMGIBNvy8mLzhFZzGqKuDyRSiiMFHXNYS0mmw0FT5gO69NL4OGhOMrYyaPOqW5SrPL+TcZoB5Umm2NUK4q6+BlGNAf+T7ZumN/jgzduAjs9UyA/cPyNzWo5SrI4tsoKJmaVEkOdBbpYEabzjSu8t/mL8WHMQt0tCcJqdAHzLg1nMMRRLMhCPFiefcdBz9atF6/9cw9E5IsiTQy+/AAAA//8DAFBLAwQUAAYACAAAACEA9WKOYGUCAAAOBwAAEQAAAHdvcmQvZG9jdW1lbnQueG1spFXfb9owEH6ftP8h8jtNwijQiFDR0qI+TKpK9zwZx0ksYp9lGyj763dOIGSbVtGSh9j367vv7mJncvsmq2DLjRWgUhJfRSTgikEmVJGSH6+PvTEJrKMqoxUonpI9t+R2+vXLZJdkwDaSKxcghLLJTrOUlM7pJAwtK7mk9koKZsBC7q4YyBDyXDAe7sBkYT+Ko3qnDTBuLea7p2pLLTnASTgPTVJ23PajaIyyUC3Gv4xAc4XGHIykDkVTYIRZb3QPMTV1YiUq4fYea9jCbFOyMSo5YPRaHj4mQQLJVlZHZ3jPtyF6WI4R5hySTcj80PKaXmh4hYRB2VLoU98+i4bG8gjybsGdYnc6Hlw29LmhO1xOgOfQz5ogWTXM30eMozMm4iHaiHMo/JnzyKT78e0+15pOc+PrjwH0/wbQxWXDWRjY6BOauAztSa1bLH+VfADrMORuafYyMsuSajyBkiVPhQJDVxUywpEF2PXAf9ZkilfcCrK9XzWqB4mmhj5lKRmNhg/X9/GI1FrH31yjbR7UJnidZi8piaK7m8Hw5rpVzXlON5XzlvEwGs8f6yzGv9z0lVsX4JG2TjDLqWHlJPR6/65dVgBrf1ktHTUOIQVmjTy2ohLZ/1zAHWVrEnZ9H1TWeoY1lPZmy5l7Nv9nukS7185m8WjW9EIXy19oxdMRxzdRnbfE/XA8qJG9w3fqIR3gIY4HdX8SI4rSncQVOAfyJFc871hLTjOO1+EoGnsxB3Adsdi4WjykY1BZ1FpNGW98ajX+lRZG+KIrofizcAxZfhseq28Kr7fNcMPTj2z6GwAA//8DAFBLAwQUAAYACAAAACEAbU1ZqyEGAACOGgAAFQAAAHdvcmQvdGhlbWUvdGhlbWUxLnhtbOxZy47bNhTdF+g/ENo7lm3Jj0E8gS3bSZuZJMg4abOkJVpihhINkpoZIwjQLyhQIC26KdBdC3QToP2D/kuKNv2IUpRlkzbdQToOEBSxAYuPcy8P7yUPJev2nauUgAvEOKZZ32ncch2AspBGOIv7zpPppNZ1ABcwiyChGeo7S8SdO8effnIbHokEpQhI+4wfwb6TCLE4qtd5KJshv0UXKJN9c8pSKGSVxfWIwUvpNyX1puu26ynEmQMymEq30+T3n6Wzh/M5DpFzXHkfE/mTCV40hISdFb7RymSYMwRzhY3OG8WFL3lAGLiApO/IgSJ6OUVXwgEEciE7+o6rPk79+HZ9bUTEHlvNbqI+K7uVQXTeVHYsnq0NPc/32oO1fwUgYhc37ozb4/banwLAMJQzLbnoWH/YG478FVYDlUWL71Fn1GoYeM1/awc/8IuvgVegsujt4CeTYBNDDVQWfUtMOs3AM/AKVBbbO/iOOxh5HQOvQAnB2fkO2vXbraCa7Royp+SeFd7zvUmnuYJvUHVtdZX2mdi31lL4nLKJBKjkQoEzIJYLNIehxAWQ4BnD4ATHiVx4C5hRLpvdpjtxW/K3+HqqpCICjxDUrMumkO80FXwADxleiL7zufTqaJBnObhLRYLD1ai7FvdgFusWb3/65u8fvgJ//frj21ff2vFcx49QFn+JYfZvAwjd4M13r//47fWb77/+85dXFviAwZkOn+IUcfAAXYLHNJWTswyAZuzdLKYJxLrFIIs5zGBhY0GPZfx09IMlJNCCGyIzkk+ZlAob8G7+3CB8lrBcYAvwfpIawFNKyZAy65zuF2PpUciz2D44y3XcYwgvbGMHW3ke5wu55rHNZZAgg+YjIlMOY5QhAYo+eo6QxewZxkZcT3HIKKdzAZ5hMITYGpIpnhmraWN0D6cyL0sbQZlvIzanT8GQEpv7EbowkXJ3QGJziYgRxrswFzC1MoYp0ZEnUCQ2kmdLFhoB50JmOkaEgnGEOLfZPGRLg+59KTH2tJ+SZWoimcDnNuQJpNTY4PQ8SGC6sHLGWaJjP+PncolC8IgKKwlq7pCiLvMgxWNfup9iZKT7+r39RMqQfYEUPTmzbQlEzf24JHOIlPP6lqanOLtW4Lek3X9/0n6KszChds09iKjboTeR8wHD1v20LeL7cNvSHVAW4Q9fuUcwzx4huVks0I/C/VG4//fCvW8/H16uNwqtbuOrm3XlJt175z7HhJyJJUEnXGk7l9OLJrJRVZTR+kFhkcjiajgDFzOoyoBR8QUWyVkCF3KYhhoh5ivXMQcLyuXpoJqtvosOkqenNCpbG43q2VQaQLFpl6dL1S7PIlG2tjubh7C1e1WL1cNyRaCwfRcS2mAmiZaFRKdqvIaEmtlBWPQsLLqF+70s1GWVFbn/ACz+1/C9kpFcb5CgqMhTaV9l9+CZ3hdMc9pNy/R6BdfDZNogoS03k4S2DBMYoe3mA+e6t0mpQa8IxS6NTvd95LoQkS1tIJlZA5dyz7V86SaEi74zl/eFspgupD9e6CYkcdZ3QrEK9H9RlgXjYgR5UsJUVzn/FAvEAMGpXOt6Gki24dZodoo5fqDkeu6HFzl10ZOM5nMUij0tm6rsK51Ye28ILio0l6TPkugSzEjOHkMZKL/TKAIYYS7W0Yww0xb3JopbcrXaisZ/ZpstCskigasTRRfzEq7KazraPBTT7VmZ9dVkZnGRpBufutcbFR2aaO45QIpT064f7++Q11htdN9gVUr3ttb1Kq3bd0rc/EDQqG0GM6gVjC3UNq0mtQPeEGjDrZfmvjPi0KfB9qotDojqvlLVdl5O0NlzufJH8nY1J4IrquhKPiME1d/KpRKo1kpdrgTIGe47L1x/4AVNP6i5XX9c81qeW+v6g1Zt4PutxthvuKNh86UMikjShl+OPZHPM2S5evmi2ndewKTVbfatkKZ1qt6s1JWxegHTaBovYMo3L2Ba9DsAy8i8aDcnvVZv2K71WoNJzRsNu7Ve0B7WRu2gM5qMAr/bm7x0wIUCe4NW4LXH3Vq7EQQ1r+0W9Lu9WsdrNgdeZ9Ade4OXq1jLmVfXKryK1/E/AAAA//8DAFBLAwQKAAAAAAAAACEAvOgH/fQnAAD0JwAAFwAAAGRvY1Byb3BzL3RodW1ibmFpbC5qcGVn/9j/4AAQSkZJRgABAQAASABIAAD/4QCARXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUAAAABAAAARgEoAAMAAAABAAIAAIdpAAQAAAABAAAATgAAAAAAAABIAAAAAQAAAEgAAAABAAOgAQADAAAAAQABAACgAgAEAAAAAQAAAWmgAwAEAAAAAQAAAgAAAAAA/+0AOFBob3Rvc2hvcCAzLjAAOEJJTQQEAAAAAAAAOEJJTQQlAAAAAAAQ1B2M2Y8AsgTpgAmY7PhCfv/AABEIAgABaQMBEQACEQEDEQH/xAAfAAABBQEBAQEBAQAAAAAAAAAAAQIDBAUGBwgJCgv/xAC1EAACAQMDAgQDBQUEBAAAAX0BAgMABBEFEiExQQYTUWEHInEUMoGRoQgjQrHBFVLR8CQzYnKCCQoWFxgZGiUmJygpKjQ1Njc4OTpDREVGR0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4eLj5OXm5+jp6vHy8/T19vf4+fr/xAAfAQADAQEBAQEBAQEBAAAAAAAAAQIDBAUGBwgJCgv/xAC1EQACAQIEBAMEBwUEBAABAncAAQIDEQQFITEGEkFRB2FxEyIygQgUQpGhscEJIzNS8BVictEKFiQ04SXxFxgZGiYnKCkqNTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqCg4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1tre4ubrCw8TFxsfIycrS09TV1tfY2dri4+Tl5ufo6ery8/T19vf4+fr/2wBDAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/2wBDAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/3QAEAC7/2gAMAwEAAhEDEQA/AP7Yfgx8GPg9N8HvhRLL8KPhrLLL8NfAskkkngTws8kkj+F9LZ3d200s7uxLMzHczEk5JNAHpX/ClPg3/wBEl+GX/hBeFf8A5W0AH/ClPg3/ANEl+GX/AIQXhX/5W0AH/ClPg3/0SX4Zf+EF4V/+VtAB/wAKU+Df/RJfhl/4QXhX/wCVtAB/wpT4N/8ARJfhl/4QXhX/AOVtAB/wpT4N/wDRJfhl/wCEF4V/+VtAB/wpT4N/9El+GX/hBeFf/lbQAf8AClPg3/0SX4Zf+EF4V/8AlbQAf8KU+Df/AESX4Zf+EF4V/wDlbQAf8KU+Df8A0SX4Zf8AhBeFf/lbQAf8KU+Df/RJfhl/4QXhX/5W0AH/AApT4N/9El+GX/hBeFf/AJW0AH/ClPg3/wBEl+GX/hBeFf8A5W0AH/ClPg3/ANEl+GX/AIQXhX/5W0AH/ClPg3/0SX4Zf+EF4V/+VtAB/wAKU+Df/RJfhl/4QXhX/wCVtAB/wpT4N/8ARJfhl/4QXhX/AOVtAB/wpT4N/wDRJfhl/wCEF4V/+VtAB/wpT4N/9El+GX/hBeFf/lbQAf8AClPg3/0SX4Zf+EF4V/8AlbQAf8KU+Df/AESX4Zf+EF4V/wDlbQAf8KU+Df8A0SX4Zf8AhBeFf/lbQAf8KU+Df/RJfhl/4QXhX/5W0AH/AApT4N/9El+GX/hBeFf/AJW0AH/ClPg3/wBEl+GX/hBeFf8A5W0AH/ClPg3/ANEl+GX/AIQXhX/5W0AH/ClPg3/0SX4Zf+EF4V/+VtAB/wAKU+Df/RJfhl/4QXhX/wCVtAB/wpT4N/8ARJfhl/4QXhX/AOVtAB/wpT4N/wDRJfhl/wCEF4V/+VtAB/wpT4N/9El+GX/hBeFf/lbQAf8AClPg3/0SX4Zf+EF4V/8AlbQAf8KU+Df/AESX4Zf+EF4V/wDlbQAf8KU+Df8A0SX4Zf8AhBeFf/lbQAf8KU+Df/RJfhl/4QXhX/5W0AH/AApT4N/9El+GX/hBeFf/AJW0AH/ClPg3/wBEl+GX/hBeFf8A5W0AH/ClPg3/ANEl+GX/AIQXhX/5W0AH/ClPg3/0SX4Zf+EF4V/+VtAB/wAKU+Df/RJfhl/4QXhX/wCVtAB/wpT4N/8ARJfhl/4QXhX/AOVtAB/wpT4N/wDRJfhl/wCEF4V/+VtAB/wpT4N/9El+GX/hBeFf/lbQAf8AClPg3/0SX4Zf+EF4V/8AlbQB/Nd/wrT4c/8ARP8AwT/4Sug//INAH//Q/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9H+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/T/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9T+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAeDfEX4/+Hvhf8QfD3gbxN4W8Vx6ZrfgHxz8TNQ+JS33w9sPh34O8G/DOXRk8fav4uvfEHj3RPE1jD4Xt/Evhu/v307wrq0M1hrUU+ny3Z03Xk0oAxf8Ahrr9n+W68P2WneN7nXLrxJ4x0D4f2sHh7wj411x9L8aeI9T13RrPwx4qGmeHrr/hD9esNV8Na1YeJNG8Uto+q+Eriz2+KLLRxNbvKAXNd/am+C2h+MbHwIPFDa34kn8V6n4Q1Ox8OWU+sSaBf6R8P/iz8Q7+7v4YALrVNPt7T4K+PPCs58JW/ia/tfH2nf8ACHXun22rW2qRaWAZN7+2L+z3bRaLNY+N5/ECa/4p0XwRYyeG/DPinWLW38XeIPh9r/xR07w7rupW+jHSvC2qQ+B/Dt5r2vweJ77SB4Ns7zRZvGjeHoNc0qa6ANXSP2r/ANn7WUia2+JWhxg6X4r1q7mlNxLpWl6R4Asbe7+IWsaj4lsobvwrD4f+H91dQeHvGvimLXZ/C3hzxhJH4O1LWovE8sWkOANvf2s/2eNN06x1bUviZpmnadqdvey6fdahpXiSyS7v9N8cH4a6l4YgW50aGR/HenePlfwnf/D0L/wnNlrUctnc+HonikKgGaP2wfgC/wARfCHwzh8arNrHje68eaVoWsLYXkXhaTxJ8O/Gfw8+Hmu+GJtcuY4Ihqs/jj4m+HPB+l3MME+g3njFbzwS+tW/jP8As/QL8A+nKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA/mXoA//1f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQB5b8Sfgr8MPi/bahZ/EfwrB4otNV+HPxJ+Emo2t1qGsWltefDz4vW2gWfxF8OTw6ZqNlG0XiO18MaJDJf7Rq2mrZE6Pf6c11etcAHk2k/sUfs26Fq/hbXtJ8DavYaz4JstA07wrqNt8R/ifHc6LY+G/Gcfj/TbS0ZfGKqLZ/FKTX2p28ivBrFpqGs6LqsV3omu65p2oAEGtfsOfsueILvx9fal8MAbj4neIfEXivxo1j4y8f6Qmp+IvF3hL4ieCPE+rW0Ok+KLKDRbjXfDvxY+IUOprocenQXWpeJbrxBJEfEFtp+p2gBQ0H9gr9lfwv8AYz4f+HWraW2nW/hSx06S2+J3xZ86w0/wXbeLLLQ9MtLiTx01xBpf2Dx54107WdMSQWHiTT/FWvWXiKDVLbVLtJQB17+wd+y1qWhWfhXU/h9rOqeEdPvfH9/p3g7VPij8W9S8H6dc/FLRPEnh/wCIT6f4WvvHU2g2K+LNM8Y+K01aO10+FJbrxFrGoIqX17PcMAV9M/YB/ZM0jxR4H8Z2PwtuF8S/Di38NWvgzUZ/iF8T7saNF4R+Is3xY0DFjc+M5tNv5LP4gTvr8s2p2l5JfYTS79rnRkTT6AO9sf2UfgVp97a30PhPWJ5dP8S3vizR4NR+IPxI1XTvD+rah8V/AHxwuYvDek6n4vvNL8OaGPin8MPBPiu28LaJZ2Hhmxk0iXSbHSbfQNX1rS9SAPoqgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKAP5l6AP/W/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9f+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/R/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9L+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/U/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9X+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/X/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9D+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/S/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9P+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/V/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9b+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/Q/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9H+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/T/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9T+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/W/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9f+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/R/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9L+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/U/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9X+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/X/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9D+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/S/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9P+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/V/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9b+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/Q/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9H+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/T/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9T+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/W/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9f+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/R/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9L+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/U/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9X+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1v7Yfgx8Z/g9D8HvhRFL8V/hrFLF8NfAsckcnjvwskkcieF9LV0dG1IMjowKsrDcrAg4INAHpX/C6/g3/wBFa+GX/he+Ff8A5ZUAH/C6/g3/ANFa+GX/AIXvhX/5ZUAH/C6/g3/0Vr4Zf+F74V/+WVAB/wALr+Df/RWvhl/4XvhX/wCWVAB/wuv4N/8ARWvhl/4XvhX/AOWVAB/wuv4N/wDRWvhl/wCF74V/+WVAB/wuv4N/9Fa+GX/he+Ff/llQAf8AC6/g3/0Vr4Zf+F74V/8AllQAf8Lr+Df/AEVr4Zf+F74V/wDllQAf8Lr+Df8A0Vr4Zf8Ahe+Ff/llQAf8Lr+Df/RWvhl/4XvhX/5ZUAH/AAuv4N/9Fa+GX/he+Ff/AJZUAH/C6/g3/wBFa+GX/he+Ff8A5ZUAH/C6/g3/ANFa+GX/AIXvhX/5ZUAH/C6/g3/0Vr4Zf+F74V/+WVAB/wALr+Df/RWvhl/4XvhX/wCWVAB/wuv4N/8ARWvhl/4XvhX/AOWVAB/wuv4N/wDRWvhl/wCF74V/+WVAB/wuv4N/9Fa+GX/he+Ff/llQAf8AC6/g3/0Vr4Zf+F74V/8AllQAf8Lr+Df/AEVr4Zf+F74V/wDllQAf8Lr+Df8A0Vr4Zf8Ahe+Ff/llQAf8Lr+Df/RWvhl/4XvhX/5ZUAH/AAuv4N/9Fa+GX/he+Ff/AJZUAH/C6/g3/wBFa+GX/he+Ff8A5ZUAH/C6/g3/ANFa+GX/AIXvhX/5ZUAH/C6/g3/0Vr4Zf+F74V/+WVAB/wALr+Df/RWvhl/4XvhX/wCWVAB/wuv4N/8ARWvhl/4XvhX/AOWVAB/wuv4N/wDRWvhl/wCF74V/+WVAB/wuv4N/9Fa+GX/he+Ff/llQAf8AC6/g3/0Vr4Zf+F74V/8AllQAf8Lr+Df/AEVr4Zf+F74V/wDllQAf8Lr+Df8A0Vr4Zf8Ahe+Ff/llQAf8Lr+Df/RWvhl/4XvhX/5ZUAH/AAuv4N/9Fa+GX/he+Ff/AJZUAH/C6/g3/wBFa+GX/he+Ff8A5ZUAH/C6/g3/ANFa+GX/AIXvhX/5ZUAH/C6/g3/0Vr4Zf+F74V/+WVAB/wALr+Df/RWvhl/4XvhX/wCWVAB/wuv4N/8ARWvhl/4XvhX/AOWVAB/wuv4N/wDRWvhl/wCF74V/+WVAB/wuv4N/9Fa+GX/he+Ff/llQAf8AC6/g3/0Vr4Zf+F74V/8AllQB/Nd/wsv4c/8ARQPBP/hVaD/8nUAf/9kAAFBLAwQUAAYACAAAACEAuN5y8JsDAACACQAAEQAAAHdvcmQvc2V0dGluZ3MueG1stFZLj9s2EL4X6H8wdK5Wj8iOV403sL1xs8E6WazcS2+URNnE8iEMKatO0f/eESWunGYRuA3ii8n55s1vxn7z9k/BJ0cKmim58KKr0JtQWaiSyf3C+3238efeRBsiS8KVpAvvRLX39ubnn960qabGoJqeoAupU1EsvIMxdRoEujhQQfSVqqlEsFIgiMEr7ANB4Kmp/UKJmhiWM87MKYjDcOYNbtTCa0CmgwtfsAKUVpXpTFJVVaygw5ezgEvi9ia3qmgElcZGDIByzEFJfWC1dt7E//WG4ME5OX6riKPgTq+NwgvKbRWUzxaXpNcZ1KAKqjU+kOAuQSbHwMlXjp5jX2HsoUTrCs2j0J7OM5/+NwfxvxxofkklPXTPciDQ82QoQxTp3V4qIDlHVmI5E8zIu0FaflZKTNq0plDg2yCnw9ALOgA7oqrMEEMR1jXl3JK84JSgwzbdAxFITyexNiWtSMPNjuSZUTUqHQnm/Tqc9/DhVB+otCT6A8fD4Uk87fHiQIAUhkJWkwKjrZU0oLjTK9VHZdY4CoAv1VtocqQPQI+Mtg+sMA3Q3pGdl/GU9bOHjiQR2IAv5mmrStoV1AC7/I06A5tU5HJ/MZDCXQGspLuu8Zk5cbrBmjL2mS5l+aHRhqFH25DvyOBbCWC7MfInpMruVNMNJV2P9A8KZh9ow1m9ZQAK7mSJlPphwVhVUcAADCm6RdYxUK3t83tKSlzR3xk3OKcRLvxSu8OjUsaphuF8Fs5vN32mHXoJslxGr5fJS8jqOpldW0oFz1FF2i3LB3CnjkIT0VusiciBkcm2W6dBp5HD04pJh+cU9wM9R7Imd6Dv94AWhPMNjp4D7AoQacl0fUsre+ZbAvvR76ABL0pxDXx49tWtFQq/gWrqHm2B1D01nEqUJIMlk+aeCSfXTZ45K4kb7QxqZPnpCLZPY3va1OAT2xG7J5YqVrcCf/M4UIlD1tGAbkld92zK99HC42x/MFFHAIO3En917SXfxwMWWyzuMXshRVcZag+HURY72ZneKyd7NcoSJ0tG2dTJpqNs5mSzToZLlALu4icktjt28kpxrlpavh/xr0RuSxcMXzw7iXxcrr/0GGcaJ63GPWwUOOxXi0VJWqriDsmKp/655+t3yTxa9vDU7m+zQx49YWsfabUimpYD5kynvelfm+4zj1f+MrqN/WQ2XfnzeP3OX23iZbReXs+m6/jvYQ7cX6ebfwAAAP//AwBQSwMEFAAGAAgAAAAhAPC8NQHcAQAA8QUAABIAAAB3b3JkL2ZvbnRUYWJsZS54bWy8k9tq4zAQhu8LfQej+8ay4vRg6pQ0bWBh6cXSfQBFkW2xOhhJiTdvvyPZcQMhbJallUHI/4x+jT40j0+/lUx23DphdImyCUYJ18xshK5L9PN9dXOPEuep3lBpNC/Rnjv0NL++euyKymjvEtivXaFYiRrv2yJNHWu4om5iWq4hWBmrqIdfW6eK2l/b9oYZ1VIv1kIKv08JxrdosLGXuJiqEoy/GLZVXPu4P7VcgqPRrhGtO7h1l7h1xm5aaxh3Du6sZO+nqNCjTZafGCnBrHGm8hO4zFBRtILtGY4rJT8MZv9mQEYDxYpvtTaWriXAh0oSMEPzgX7SFZoqCCypFGsrYqCl2jieQWxHZYkwwSs8gzl8OZ6GGaUhkTXUOh5M+kTcyxVVQu4PKt160+ut8Kw5yDtqRaipDzlRQ2Dr1rhErxgGWa1Qr2QlykFYLEeFhKPiyAZlOio4KCz69BkPcReLPmMOnJn2AE5AvAvFXfLGu+SHUVSfAULwLYCYAY4AZvr5QMji9QjIEpS7+/xw/Q8gD38H0mO8HMgCypJnMDwDhnx4GfF1fD6G43cxYJh+BYahQZLvom782TYJzfFFbbIIFZPjVxHahOC75xMc8fL/2SbDws3/AAAA//8DAFBLAwQUAAYACAAAACEA4IvKVR8BAAARAgAAFAAAAHdvcmQvd2ViU2V0dGluZ3MueG1slNFRS8MwEAfwd8HvUPK+pRs6tKwbgkz2MgbVD5Cl1zWY5EIua7dv71nnRHyZbzku9+P+3Hx5dDbrIJJBX4rJOBcZeI218ftSvL2uRg8io6R8rSx6KMUJSCwXtzfzvuhhV0FK/JMyVjwVTpeiTSkUUpJuwSkaYwDPzQajU4nLuJdOxfdDGGl0QSWzM9akk5zm+UycmXiNgk1jNDyjPjjwaZiXESyL6Kk1gb61/hqtx1iHiBqIOI+zX55Txl+Yyd0fyBkdkbBJYw5z3migeHySDy9nf4D7/wHTC+B0sd57jGpn+QS8ScaYWPANlLXYbzcv8rOocYOpUh08UcUpLKyMhaETzBEsbSGuvW6zvuiULcXjTHBT/jrk4gMAAP//AwBQSwMEFAAGAAgAAAAhABZNBGBtAQAA7wIAABEACAFkb2NQcm9wcy9jb3JlLnhtbCCiBAEooAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJySUW+CMBSF35fsP5C+Q4suxhDAZDM+zcRkLlv21rVX7YS2aavIv18BxbH5tLd7e757uJw2nZ3KIjiCsULJDMURQQFIpriQ2wy9rhfhFAXWUclpoSRkqAaLZvn9Xcp0wpSBlVEajBNgA+8kbcJ0hnbO6QRjy3ZQUht5Qnpxo0xJnW/NFmvK9nQLeETIBJfgKKeO4sYw1L0jOlty1lvqgylaA84wFFCCdBbHUYyvrANT2psDrfKDLIWrNdxEL2JPn6zowaqqomrcon7/GL8vn1/aXw2FbLJigPKUs8QJV0Ce4mvpK3v4/ALmuuO+8TUzQJ0y+ZweBQ9WovBdC12EJvI91JUy3PrxQecxDpYZoZ2/yM58cODpglq39De7EcAf61/f+as3IwaOonkZedwSfZueY+52Ax74eJIuzIvyNn6arxcoH5F4EpJxGJM1mSajh4SQj2a9wfzVsDwv8G/Hi0GX0PCJ5t8AAAD//wMAUEsDBBQABgAIAAAAIQCBlv05MgsAAGRyAAAPAAAAd29yZC9zdHlsZXMueG1svJ3bctu6FYbvO9N34OiqvXB8jJ14trPHduLaUzvbO3Kaa4iEJNQgofLgQ5++IEhJkBdBcQGrvrIlan0A8eMHsEBS+u33l1RGTzwvhMrORvsf9kYRz2KViGx2Nvr5cLXzaRQVJcsSJlXGz0avvBj9/uWvf/nt+bQoXyUvIg3IitM0PhvNy3JxurtbxHOesuKDWvBMH5yqPGWlfpnPdlOWP1aLnVilC1aKiZCifN092Ns7HrWYfAhFTaci5l9VXKU8K038bs6lJqqsmItFsaQ9D6E9qzxZ5CrmRaFPOpUNL2UiW2H2jwAoFXGuCjUtP+iTaWtkUDp8f8/8l8o14CMOcLACpPHpzSxTOZtI3fq6JpGGjb7o5k9U/JVPWSXLon6Z3+fty/aV+XOlsrKInk9ZEQvxoEvWkFRo3vV5VoiRPsJZUZ4XgnUenNf/dB6Ji9J6+0IkYrRbl1j8Vx98YvJsdHC0fOeyrsHGe5Jls+V703zn6oddk7MRz3Z+juu3Jpp7NmL5zvi8DtxtT6z5a53uYvWq+dSbttFdQ3eUcdNf9VE+vVXxI0/GpT5wNtqri9Jv/ry5z4XKdZ88G33+3L455qm4FknCM+uD2Vwk/NecZz8Lnqzf//PK9Kv2jVhVmf7/8NOe0UsWybeXmC/qXqqPZqxuve91gKw/XYl14Sb8P0vYfttmXfFzzmqrRvtvEab6KMRBHVFYZ9vNrN6cu/kUqqDD9yro6L0K+vheBR2/V0En71XQp/cqyGD+nwWJLOEvjRFhMYC6jeNwI5rjMBua4/ASmuOwCprjcAKa4+joaI6jH6M5jm6K4JQqdvVCq7MfOnp7P3f7HOHH3T4l+HG3zwB+3O0Dvh93+/jux90+nPtxt4/eftztgzWe2yy1ohtts6wMdtlUqTJTJY9K/hJOY5lmmfyFhldPejwnOUkCTDOytRNxMC1m5vX2HmJM6j+fl3XKFalpNBWzKtdpb2jFefbEpU5AI5YkmkcIzHlZ5Y4W8enTOZ/ynGcxp+zYdFApMh5lVToh6JsLNiNj8Swhbr4lkWRQWHVoVpXz2iSCoFOnLM5VeNUUIxsfbkUR3lY1JLqopORErO80XcywwnMDgwlPDQwmPDMwmPDEwNKMqolaGlFLtTSiBmtpRO3W9E+qdmtpRO3W0ojaraWFt9uDKKUZ4u1Vx/7wvbtLqeod5+B6jMUsY3oBED7dtHum0T3L2Sxni3lU7x93Y+1zxpZzoZLX6IFiTluRqNb1potc6rMWWRXeoBs0KnOteET2WvGIDLbihVvsTi+T6wXaNU0+M64mZadpDWmQacdMVs2CNtxtrAzvYWsDXIm8ILNBN5agB3+vl7O1nBQj37qW4RVbs8Jt9XZUIq1eiySopVTxI80wfP264LlOyx6DSVdKSvXMEzriuMxV09dsyx8YSQZZ/lu6mLNCmFxpAzF8ql9eq47u2CL4hO4lExmNbt92UiZkRLeCuH64u40e1KJOM+uGoQFeqLJUKRmz3Qn82y8++TtNBc91Epy9Ep3tOdH2kIFdCoJJpiGphIikl5kiEyRzqOH9k79OFMsTGtp9zpvbQ0pORByzdNEsOgi8pcfFZz3+EKyGDO9fLBf1vhCVqR5IYNa2YVFN/s3j8KHuu4pIdob+qEqz/2iWuiaaDhe+TNjAhS8RjJp6eqj7L8HJbuDCT3YDR3Wyl5IVhXBeQvXmUZ3ukkd9vuHJX8tTUuXTStI14BJI1oJLIFkTKlmlWUF5xoZHeMKGR32+hF3G8Ai25AzvH7lIyMQwMColDIxKBgOj0sDASAUIv0PHgoXfpmPBwu/VaWBESwALRtXPSKd/oqs8FoyqnxkYVT8zMKp+ZmBU/ezwa8SnU70IpptiLCRVn7OQdBNNVvJ0oXKWvxIhv0k+YwQbpA3tPlfT+rkBlTU3cRMg6z1qSbjYbnBUIv/iE7Kq1SzKehHsiDIplSLaW1tPOCZy8961bWHmmYvgKpjN9lv+xClW4xaM6DJAAwuXzYKFT1MWLHyasmDh05QFC5+mLFj4NGXBwu9fvpcs5nMlE547jNhXkWi8YHF7bQlcox60V38rZvMyGs9Xl6hszPHe1sjlLtNG2PYCuwaK44OesDueiCpdVhQ+AXR8ODzYGHojePmgVk/wevm7EflxYCQs83h75Dq124g8GRgJy/w0MNKMUhuRfYP4V5Y/dnaEk77+s9qYcHS+k75etAruLLavI60iu7rgSV8v2rBKdB7H9SUuqM4wz7jjh5nHHY9xkZuCsZObMthXbkSfwX7wJ1EvRzGDpilvdcvP2+IOzZQ6aOT8s1LNxaaNq6TDn0S80av9rOBRJ+dw+NXWjVHG3Y6Dhxs3YvC440YMHoDciEEjkTMcNSS5KYPHJjdi8CDlRqBHKzgj4EYrGI8brWC8z2gFKT6jVcAqwI0YvBxwI9BGhQi0UQNWCm4Eyqgg3MuokII2KkSgjQoRaKPCBRjOqDAeZ1QY72NUSPExKqSgjQoRaKNCBNqoEIE2KkSgjeq5tneGexkVUtBGhQi0USECbVSzXgwwKozHGRXG+xgVUnyMCiloo0IE2qgQgTYqRKCNChFoo0IEyqgg3MuokII2KkSgjQoRaKM2z8f6GxXG44wK432MCik+RoUUtFEhAm1UiEAbFSLQRoUItFEhAmVUEO5lVEhBGxUi0EaFCLRRzaWDAKPCeJxRYbyPUSHFx6iQgjYqRKCNChFoo0IE2qgQgTYqRKCMCsK9jAopaKNCBNqoENHXP9vr6q5nQ/bxu57Ox0yGX7pqK/XD/v4BG3U4HLWslZs1/AGaC6Ueo86nZQ9NvjEMIiZSKLNF7bgXxOaaC6Soq/V/XPY/lmbTA78prH2Ax1zoB/CjoZFgT+Wor8vbkSDJO+rr6XYkWHUe9Y2+diSYBo/6Bl3jy+WdVHo6AsF9w4wVvO8I7xutrXDYxH1jtBUIW7hvZLYCYQP3jcdW4MeoHpzfRn8c2E7Hq5uiAaGvO1qEEzehr1tCrZbDMTTGUNHchKHquQlDZXQTUHo6MXhh3Si0wm6Un9TQZlip/Y3qJmClhgQvqQHGX2qI8pYaovykhgMjVmpIwErtPzi7CV5SA4y/1BDlLTVE+UkNpzKs1JCAlRoSsFIHTshOjL/UEOUtNUT5SQ0Xd1ipIQErNSRgpYYEL6kBxl9qiPKWGqL8pAZZMlpqSMBKDQlYqSHBS2qA8Zcaorylhqg+qc0uyobUKIWtcNwizArETchWIG5wtgI9siUr2jNbsgie2RLUaqk5LluyRXMThqrnJgyV0U1A6enE4IV1o9AKu1F+UuOypS6p/Y3qJmClxmVLTqlx2VKv1LhsqVdqXLbklhqXLXVJjcuWuqT2H5zdBC+pcdlSr9S4bKlXaly25JYaly11SY3LlrqkxmVLXVIHTshOjL/UuGypV2pctuSWGpctdUmNy5a6pMZlS11S47Ilp9S4bKlXaly21Cs1LltyS43LlrqkxmVLXVLjsqUuqXHZklNqXLbUKzUuW+qV2pEt7T5v/GpYzTa/d6c/XL4ueP3F8dYDM0nzxbntRUDzwZtk9etedXBdk6j9xbP2bVPh9oJhU6IJhEXFc11W3H7ll6OoeyWFPm+WJ/pwCYp0fLOvqcL65JefbhtzfRG0+dzGBc/eGpd1Y/fU1ojBqt72aRRzVfFz2wW31VHXaCKbH8PT/9xkiQY8t7+w1tQ1eWENSh+/5FLesebTauH+qOTTsjm6v2cen31zfNJ8YaEzPjeDhBOwu1mZ5mX7w3eOFm9+wqC9eu1o9fMqrjIutRt4R5ub+ylCm3tdweV/xZf/AQAA//8DAFBLAwQUAAYACAAAACEAQP7QLGkBAAC3AgAAEAAIAWRvY1Byb3BzL2FwcC54bWwgogQBKKAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACcUk1LxTAQvAv+h9K7L32CH8i+iCjiQUV4Vc8h2bbBNAnJKr5/78ZqrXgzp92ZZHZmCZy/j656w5Rt8Jt6vWrqCr0Oxvp+Uz+21wendZVJeaNc8Lipd5jrc7m/Bw8pRExkMVcs4fOmHojimRBZDziqvGLaM9OFNCriNvUidJ3VeBX064iexGHTHAt8J/QGzUGcBetJ8eyN/itqgi7+8lO7i6wnocUxOkUo78tLtzKBRhAzCm0g5Vo7omwYnht4UD1muQYxFfAcksnyEMRUwOWgktLE+5PrExCLFi5idFYr4sXKO6tTyKGj6k5p6ynkoSoKIJa3gENsUb8mS7viY9nCrfWTk6lgZ0n1ScXhy97cwVYrh5ccX3bKZQTxAxSVl/wY23BVYn/xv8FFpmdLwzYqXQafLtMtCNgyioa9zuNmAG54/ckVeX7rezTfd/4SZV9P0z+U66NVw+dzO98YZ5w/iPwAAAD//wMAUEsBAi0AFAAGAAgAAAAhAG2KJ0tmAQAAVAUAABMAAAAAAAAAAAAAAAAAAAAAAFtDb250ZW50X1R5cGVzXS54bWxQSwECLQAUAAYACAAAACEAx8InvP8AAADfAgAACwAAAAAAAAAAAAAAAACfAwAAX3JlbHMvLnJlbHNQSwECLQAUAAYACAAAACEAE6o+h/YAAAAxAwAAHAAAAAAAAAAAAAAAAADPBgAAd29yZC9fcmVscy9kb2N1bWVudC54bWwucmVsc1BLAQItABQABgAIAAAAIQD1Yo5gZQIAAA4HAAARAAAAAAAAAAAAAAAAAAcJAAB3b3JkL2RvY3VtZW50LnhtbFBLAQItABQABgAIAAAAIQBtTVmrIQYAAI4aAAAVAAAAAAAAAAAAAAAAAJsLAAB3b3JkL3RoZW1lL3RoZW1lMS54bWxQSwECLQAKAAAAAAAAACEAvOgH/fQnAAD0JwAAFwAAAAAAAAAAAAAAAADvEQAAZG9jUHJvcHMvdGh1bWJuYWlsLmpwZWdQSwECLQAUAAYACAAAACEAuN5y8JsDAACACQAAEQAAAAAAAAAAAAAAAAAYOgAAd29yZC9zZXR0aW5ncy54bWxQSwECLQAUAAYACAAAACEA8Lw1AdwBAADxBQAAEgAAAAAAAAAAAAAAAADiPQAAd29yZC9mb250VGFibGUueG1sUEsBAi0AFAAGAAgAAAAhAOCLylUfAQAAEQIAABQAAAAAAAAAAAAAAAAA7j8AAHdvcmQvd2ViU2V0dGluZ3MueG1sUEsBAi0AFAAGAAgAAAAhABZNBGBtAQAA7wIAABEAAAAAAAAAAAAAAAAAP0EAAGRvY1Byb3BzL2NvcmUueG1sUEsBAi0AFAAGAAgAAAAhAIGW/TkyCwAAZHIAAA8AAAAAAAAAAAAAAAAA40MAAHdvcmQvc3R5bGVzLnhtbFBLAQItABQABgAIAAAAIQBA/tAsaQEAALcCAAAQAAAAAAAAAAAAAAAAAEJPAABkb2NQcm9wcy9hcHAueG1sUEsFBgAAAAAMAAwABgMAAOFRAAAAAA==" } + + - do: + search: + index: test + body: + fields: [file.content, file.author, file.date, file.content_length, file.content_type] + - match: { hits.total: 1 } + - match: { hits.hits.0.fields: { + file.content: ["Test elasticsearch\n"], + file.author: ["David Pilato"], + file.date: ["2016-03-10T08:24:00Z"], + file.content_length: ["21757"], + file.content_type: ["application/vnd.openxmlformats-officedocument.wordprocessingml.document"] + } + } + From ebc12690bc24a8a35daa3a7864ece43ff75a4357 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 10 Mar 2016 15:22:35 -0500 Subject: [PATCH 171/320] [reindex] Move refresh tests to unit test The refresh tests were failing rarely due to refreshes happening automatically on indexes with -1 refresh intervals. This commit moves the refresh test into a unit test where we can check if it was attempted so we never get false failures from background refreshes. It also stopped refresh from being run if the reindex request was canceled. --- .../AbstractAsyncBulkByScrollAction.java | 15 +++++-- .../reindex/AsyncBulkByScrollActionTests.java | 37 +++++++++++++++- .../index/reindex/ReindexBasicTests.java | 38 ---------------- .../reindex/UpdateByQueryBasicTests.java | 43 ------------------- 4 files changed, 48 insertions(+), 85 deletions(-) diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java index 4de06c88b8d8..2eb0cc5ba78e 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java @@ -47,6 +47,7 @@ import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; @@ -267,9 +268,9 @@ public abstract class AbstractAsyncBulkByScrollAction indexingFailures, List searchFailures, boolean timedOut) { - if (false == mainRequest.isRefresh()) { + if (task.isCancelled() || false == mainRequest.isRefresh()) { finishHim(null, indexingFailures, searchFailures, timedOut); return; } @@ -390,6 +391,14 @@ public abstract class AbstractAsyncBulkByScrollAction indices) { + destinationIndices.addAll(indices); + } + /** * Wraps a backoffPolicy in another policy that counts the number of backoffs acquired. */ diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index 2aedd603fbc3..a4e9c42a33ed 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; @@ -74,10 +75,12 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; +import static java.util.Collections.singleton; import static org.apache.lucene.util.TestUtil.randomSimpleString; import static org.elasticsearch.action.bulk.BackoffPolicy.constantBackoff; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; @@ -388,6 +391,32 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { assertEquals(defaultBackoffBeforeFailing, millis); } + public void testRefreshIsFalseByDefault() throws Exception { + refreshTestCase(null, false); + } + + public void testRefreshFalseDoesntMakeVisible() throws Exception { + refreshTestCase(false, false); + } + + public void testRefreshTrueMakesVisible() throws Exception { + refreshTestCase(true, true); + } + + private void refreshTestCase(Boolean refresh, boolean shouldRefresh) { + if (refresh != null) { + mainRequest.setRefresh(refresh); + } + DummyAbstractAsyncBulkByScrollAction action = new DummyAbstractAsyncBulkByScrollAction(); + action.addDestinationIndices(singleton("foo")); + action.startNormalTermination(emptyList(), emptyList(), false); + if (shouldRefresh) { + assertArrayEquals(new String[] {"foo"}, client.lastRefreshRequest.get().indices()); + } else { + assertNull("No refresh was attempted", client.lastRefreshRequest.get()); + } + } + public void testCancelBeforeInitialSearch() throws Exception { cancelTaskCase((DummyAbstractAsyncBulkByScrollAction action) -> action.initialSearch()); } @@ -415,7 +444,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { // Refresh or not doesn't matter - we don't try to refresh. mainRequest.setRefresh(usually()); cancelTaskCase((DummyAbstractAsyncBulkByScrollAction action) -> action.startNormalTermination(emptyList(), emptyList(), false)); - // This wouldn't return if we called refresh - the action would hang waiting for the refresh that we haven't mocked. + assertNull("No refresh was attempted", client.lastRefreshRequest.get()); } private void cancelTaskCase(Consumer testMe) throws Exception { @@ -463,6 +492,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { private static class MyMockClient extends FilterClient { private final List scrollsCleared = new ArrayList<>(); private final AtomicInteger bulksAttempts = new AtomicInteger(); + private final AtomicReference lastRefreshRequest = new AtomicReference<>(); private int bulksToReject = 0; @@ -475,6 +505,11 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute( Action action, Request request, ActionListener listener) { + if (request instanceof RefreshRequest) { + lastRefreshRequest.set((RefreshRequest) request); + listener.onResponse(null); + return; + } if (request instanceof ClearScrollRequest) { ClearScrollRequest clearScroll = (ClearScrollRequest) request; scrollsCleared.addAll(clearScroll.getScrollIds()); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexBasicTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexBasicTests.java index 83dcd1483c14..c169f6819ea9 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexBasicTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexBasicTests.java @@ -19,14 +19,12 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import java.util.ArrayList; import java.util.List; import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; public class ReindexBasicTests extends ReindexTestCase { @@ -84,40 +82,4 @@ public class ReindexBasicTests extends ReindexTestCase { assertThat(copy.get(), responseMatcher().created(half).batches(half, 5)); assertHitCount(client().prepareSearch("dest").setTypes("half").setSize(0).get(), half); } - - public void testRefreshIsFalseByDefault() throws Exception { - refreshTestCase(null, false); - } - - public void testRefreshFalseDoesntMakeVisible() throws Exception { - refreshTestCase(false, false); - } - - public void testRefreshTrueMakesVisible() throws Exception { - refreshTestCase(true, true); - } - - /** - * Executes a reindex into an index with -1 refresh_interval and checks that - * the documents are visible properly. - */ - private void refreshTestCase(Boolean refresh, boolean visible) throws Exception { - CreateIndexRequestBuilder create = client().admin().indices().prepareCreate("dest").setSettings("refresh_interval", -1); - assertAcked(create); - ensureYellow(); - indexRandom(true, client().prepareIndex("source", "test", "1").setSource("foo", "a"), - client().prepareIndex("source", "test", "2").setSource("foo", "a"), - client().prepareIndex("source", "test", "3").setSource("foo", "b"), - client().prepareIndex("source", "test", "4").setSource("foo", "c")); - assertHitCount(client().prepareSearch("source").setSize(0).get(), 4); - - // Copy all the docs - ReindexRequestBuilder copy = reindex().source("source").destination("dest", "all"); - if (refresh != null) { - copy.refresh(refresh); - } - assertThat(copy.get(), responseMatcher().created(4)); - - assertHitCount(client().prepareSearch("dest").setTypes("all").setSize(0).get(), visible ? 4 : 0); - } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryBasicTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryBasicTests.java index e49afafca469..096967149fbc 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryBasicTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryBasicTests.java @@ -19,12 +19,9 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.search.sort.SortOrder; -import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; public class UpdateByQueryBasicTests extends UpdateByQueryTestCase { @@ -64,44 +61,4 @@ public class UpdateByQueryBasicTests extends UpdateByQueryTestCase { assertEquals(3, client().prepareGet("test", "test", "3").get().getVersion()); assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion()); } - - public void testRefreshIsFalseByDefault() throws Exception { - refreshTestCase(null, false); - } - - public void testRefreshFalseDoesntMakeVisible() throws Exception { - refreshTestCase(false, false); - } - - public void testRefreshTrueMakesVisible() throws Exception { - refreshTestCase(true, true); - } - - /** - * Executes an update_by_query on an index with -1 refresh_interval and - * checks that the documents are visible properly. - */ - private void refreshTestCase(Boolean refresh, boolean visible) throws Exception { - CreateIndexRequestBuilder create = client().admin().indices().prepareCreate("test").setSettings("refresh_interval", -1); - create.addMapping("test", "{\"dynamic\": \"false\"}"); - assertAcked(create); - ensureYellow(); - indexRandom(true, client().prepareIndex("test", "test", "1").setSource("foo", "a"), - client().prepareIndex("test", "test", "2").setSource("foo", "a"), - client().prepareIndex("test", "test", "3").setSource("foo", "b"), - client().prepareIndex("test", "test", "4").setSource("foo", "c")); - assertHitCount(client().prepareSearch("test").setQuery(matchQuery("foo", "a")).setSize(0).get(), 0); - - // Now make foo searchable - assertAcked(client().admin().indices().preparePutMapping("test").setType("test") - .setSource("{\"test\": {\"properties\":{\"foo\": {\"type\": \"text\"}}}}")); - UpdateByQueryRequestBuilder update = request().source("test"); - if (refresh != null) { - update.refresh(refresh); - } - assertThat(update.get(), responseMatcher().updated(4)); - - assertHitCount(client().prepareSearch("test").setQuery(matchQuery("foo", "a")).setSize(0).get(), visible ? 2 : 0); - } - } From c4934f5250c2a7e422c8236e2dd2f9a6d009f7d4 Mon Sep 17 00:00:00 2001 From: Ed Winn Date: Thu, 10 Mar 2016 15:53:03 -0600 Subject: [PATCH 172/320] Current link returns 404. Updated --- docs/plugins/mapper-attachments.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugins/mapper-attachments.asciidoc b/docs/plugins/mapper-attachments.asciidoc index ed992623a503..63742b518b59 100644 --- a/docs/plugins/mapper-attachments.asciidoc +++ b/docs/plugins/mapper-attachments.asciidoc @@ -176,7 +176,7 @@ need to specify the `type` (like `string` or `date`) since it is already known. [[mapper-attachments-copy-to]] ==== Copy To feature -If you want to use http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/mapping-core-types.html#copy-to[copy_to] +If you want to use https://www.elastic.co/guide/en/elasticsearch/reference/current/copy-to.html[copy_to] feature, you need to define it on each sub-field you want to copy to another field: [source,js] From 96ec48afcde8b8689cbce09e6f8afe5c3153d9ae Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 10 Mar 2016 17:28:13 -0800 Subject: [PATCH 173/320] Fix dynamic mapper when its parent already has an update The change to move dynamic mapping handling to the end of document parsing has an edge case which can cause dynamic mappings to fail document parsing. If field a.b is added as an as part of the root update, followed by a.c.d, then we need to expand the mappers on the stack, since a is hidden inside the root update which exists on the stack. This change adds a test for this case, as well as tries to better document how the logic works for building up the stack before adding a dynamic mapper. --- .../index/mapper/DocumentParser.java | 88 +++++++++++++------ .../index/mapper/DocumentParserTests.java | 66 ++++++++------ 2 files changed, 104 insertions(+), 50 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 36c1cf106f4a..8c8ded9b543b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -23,18 +23,14 @@ import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; -import java.util.HashSet; import java.util.Iterator; import java.util.List; -import java.util.Set; import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.CloseableThreadLocal; import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.FormatDateTimeFormatter; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexSettings; @@ -240,28 +236,26 @@ final class DocumentParser implements Closeable { } previousMapper = newMapper; String[] nameParts = newMapper.name().split("\\."); - // find common elements with the previously processed dynamic mapper - int keepBefore = 1; - while (keepBefore < parentMappers.size() && - parentMappers.get(keepBefore).simpleName().equals(nameParts[keepBefore - 1])) { - ++keepBefore; - } - popMappers(parentMappers, keepBefore, true); - if (keepBefore < nameParts.length) { - String updateParentName = nameParts[keepBefore - 1]; - final ObjectMapper lastParent = parentMappers.get(parentMappers.size() - 1); - Mapper updateParent = lastParent.getMapper(updateParentName); - if (updateParent == null) { - // the parent we need is not on the stack, so look it up in the full mappings - if (keepBefore > 1) { - // only prefix with parent mapper if the parent mapper isn't the root (which has a fake name) - updateParentName = lastParent.name() + '.' + updateParentName; - } - updateParent = docMapper.objectMappers().get(updateParentName); - } - assert updateParent instanceof ObjectMapper; - newMapper = createUpdate((ObjectMapper)updateParent, nameParts, keepBefore, newMapper); + // We first need the stack to only contain mappers in common with the previously processed mapper + // For example, if the first mapper processed was a.b.c, and we now have a.d, the stack will contain + // a.b, and we want to merge b back into the stack so it just contains a + int i = removeUncommonMappers(parentMappers, nameParts); + + // Then we need to add back mappers that may already exist within the stack, but are not on it. + // For example, if we processed a.b, followed by an object mapper a.c.d, and now are adding a.c.d.e + // then the stack will only have a on it because we will have already merged a.c.d into the stack. + // So we need to pull a.c, followed by a.c.d, onto the stack so e can be added to the end. + i = expandCommonMappers(parentMappers, nameParts, i); + + // If there are still parents of the new mapper which are not on the stack, we need to pull them + // from the existing mappings. In order to maintain the invariant that the stack only contains + // fields which are updated, we cannot simply add the existing mappers to the stack, since they + // may have other subfields which will not be updated. Instead, we pull the mapper from the existing + // mappings, and build an update with only the new mapper and its parents. This then becomes our + // "new mapper", and can be added to the stack. + if (i < nameParts.length - 1) { + newMapper = createExistingMapperUpdate(parentMappers, nameParts, i, docMapper, newMapper); } if (newMapper instanceof ObjectMapper) { @@ -299,12 +293,56 @@ final class DocumentParser implements Closeable { parentMappers.set(lastIndex, withNewMapper); } + /** + * Removes mappers that exist on the stack, but are not part of the path of the current nameParts, + * Returns the next unprocessed index from nameParts. + */ + private static int removeUncommonMappers(List parentMappers, String[] nameParts) { + int keepBefore = 1; + while (keepBefore < parentMappers.size() && + parentMappers.get(keepBefore).simpleName().equals(nameParts[keepBefore - 1])) { + ++keepBefore; + } + popMappers(parentMappers, keepBefore, true); + return keepBefore - 1; + } + + /** + * Adds mappers from the end of the stack that exist as updates within those mappers. + * Returns the next unprocessed index from nameParts. + */ + private static int expandCommonMappers(List parentMappers, String[] nameParts, int i) { + ObjectMapper last = parentMappers.get(parentMappers.size() - 1); + while (i < nameParts.length - 1 && last.getMapper(nameParts[i]) != null) { + Mapper newLast = last.getMapper(nameParts[i]); + assert newLast instanceof ObjectMapper; + parentMappers.add((ObjectMapper)newLast); + ++i; + } + return i; + } + + /** Creates an update for intermediate object mappers that are not on the stack, but parents of newMapper. */ + private static ObjectMapper createExistingMapperUpdate(List parentMappers, String[] nameParts, int i, + DocumentMapper docMapper, Mapper newMapper) { + String updateParentName = nameParts[i]; + final ObjectMapper lastParent = parentMappers.get(parentMappers.size() - 1); + if (parentMappers.size() > 1) { + // only prefix with parent mapper if the parent mapper isn't the root (which has a fake name) + updateParentName = lastParent.name() + '.' + nameParts[i]; + } + ObjectMapper updateParent = docMapper.objectMappers().get(updateParentName); + assert updateParent != null : updateParentName + " doesn't exist"; + return createUpdate(updateParent, nameParts, i + 1, newMapper); + } + /** Build an update for the parent which will contain the given mapper and any intermediate fields. */ private static ObjectMapper createUpdate(ObjectMapper parent, String[] nameParts, int i, Mapper mapper) { List parentMappers = new ArrayList<>(); ObjectMapper previousIntermediate = parent; for (; i < nameParts.length - 1; ++i) { Mapper intermediate = previousIntermediate.getMapper(nameParts[i]); + assert intermediate != null : "Field " + previousIntermediate.name() + " does not have a subfield " + nameParts[i]; assert intermediate instanceof ObjectMapper; parentMappers.add((ObjectMapper)intermediate); previousIntermediate = (ObjectMapper)intermediate; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index cbc858b642db..e4d1e306af3f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -72,9 +72,10 @@ public class DocumentParserTests extends ESSingleNodeTestCase { DocumentMapper createDummyMapping(MapperService mapperService) throws Exception { String mapping = jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("a").startObject("properties") - .startObject("b").field("type", "object").startObject("properties") - .startObject("c").field("type", "object") + .startObject("y").field("type", "object").endObject() + .startObject("x").startObject("properties") + .startObject("subx").field("type", "object").startObject("properties") + .startObject("subsubx").field("type", "object") .endObject().endObject().endObject().endObject().endObject().endObject().endObject().endObject().string(); DocumentMapper defaultMapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -109,40 +110,55 @@ public class DocumentParserTests extends ESSingleNodeTestCase { public void testSubfieldMappingUpdate() throws Exception { DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService()); - List updates = Collections.singletonList(new MockFieldMapper("a.foo")); + List updates = Collections.singletonList(new MockFieldMapper("x.foo")); Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates); - Mapper aMapper = mapping.root().getMapper("a"); - assertNotNull(aMapper); - assertTrue(aMapper instanceof ObjectMapper); - assertNotNull(((ObjectMapper)aMapper).getMapper("foo")); - assertNull(((ObjectMapper)aMapper).getMapper("b")); + Mapper xMapper = mapping.root().getMapper("x"); + assertNotNull(xMapper); + assertTrue(xMapper instanceof ObjectMapper); + assertNotNull(((ObjectMapper)xMapper).getMapper("foo")); + assertNull(((ObjectMapper)xMapper).getMapper("subx")); } public void testMultipleSubfieldMappingUpdate() throws Exception { DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService()); List updates = new ArrayList<>(); - updates.add(new MockFieldMapper("a.foo")); - updates.add(new MockFieldMapper("a.bar")); + updates.add(new MockFieldMapper("x.foo")); + updates.add(new MockFieldMapper("x.bar")); Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates); - Mapper aMapper = mapping.root().getMapper("a"); - assertNotNull(aMapper); - assertTrue(aMapper instanceof ObjectMapper); - assertNotNull(((ObjectMapper)aMapper).getMapper("foo")); - assertNotNull(((ObjectMapper)aMapper).getMapper("bar")); - assertNull(((ObjectMapper)aMapper).getMapper("b")); + Mapper xMapper = mapping.root().getMapper("x"); + assertNotNull(xMapper); + assertTrue(xMapper instanceof ObjectMapper); + assertNotNull(((ObjectMapper)xMapper).getMapper("foo")); + assertNotNull(((ObjectMapper)xMapper).getMapper("bar")); + assertNull(((ObjectMapper)xMapper).getMapper("subx")); } public void testDeepSubfieldMappingUpdate() throws Exception { DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService()); - List updates = Collections.singletonList(new MockFieldMapper("a.b.foo")); + List updates = Collections.singletonList(new MockFieldMapper("x.subx.foo")); Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates); - Mapper aMapper = mapping.root().getMapper("a"); - assertNotNull(aMapper); - assertTrue(aMapper instanceof ObjectMapper); - Mapper bMapper = ((ObjectMapper)aMapper).getMapper("b"); - assertTrue(bMapper instanceof ObjectMapper); - assertNotNull(((ObjectMapper)bMapper).getMapper("foo")); - assertNull(((ObjectMapper)bMapper).getMapper("c")); + Mapper xMapper = mapping.root().getMapper("x"); + assertNotNull(xMapper); + assertTrue(xMapper instanceof ObjectMapper); + Mapper subxMapper = ((ObjectMapper)xMapper).getMapper("subx"); + assertTrue(subxMapper instanceof ObjectMapper); + assertNotNull(((ObjectMapper)subxMapper).getMapper("foo")); + assertNull(((ObjectMapper)subxMapper).getMapper("subsubx")); + } + + public void testDeepSubfieldAfterSubfieldMappingUpdate() throws Exception { + DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService()); + List updates = new ArrayList<>(); + updates.add(new MockFieldMapper("x.a")); + updates.add(new MockFieldMapper("x.subx.b")); + Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates); + Mapper xMapper = mapping.root().getMapper("x"); + assertNotNull(xMapper); + assertTrue(xMapper instanceof ObjectMapper); + assertNotNull(((ObjectMapper)xMapper).getMapper("a")); + Mapper subxMapper = ((ObjectMapper)xMapper).getMapper("subx"); + assertTrue(subxMapper instanceof ObjectMapper); + assertNotNull(((ObjectMapper)subxMapper).getMapper("b")); } public void testObjectMappingUpdate() throws Exception { From f6ae9ec4f608eeffbb4ecbcea98c0916ee523d12 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 7 Mar 2016 11:04:24 +0100 Subject: [PATCH 174/320] Remove ShardsAllocators and merge allocateUnassigned, moveShards and rebalance to improve performance --- .../routing/allocation/AllocationService.java | 26 +++-- .../allocator/BalancedShardsAllocator.java | 39 ++++--- .../allocation/allocator/ShardsAllocator.java | 36 +------ .../allocator/ShardsAllocators.java | 100 ------------------ .../cluster/ClusterModuleTests.java | 14 +-- .../allocation/BalanceConfigurationTests.java | 30 +----- .../NodeVersionAllocationDeciderTests.java | 6 +- .../RandomAllocationDeciderTests.java | 4 +- .../decider/DiskThresholdDeciderTests.java | 30 +++--- .../zen/NodeJoinControllerTests.java | 2 +- .../test/ESAllocationTestCase.java | 16 +-- 11 files changed, 66 insertions(+), 237 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocators.java diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java index eeeb6e3389cb..5c383bcae836 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.routing.allocation; import com.carrotsearch.hppc.cursors.ObjectCursor; -import org.apache.lucene.util.ArrayUtil; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; @@ -36,13 +35,13 @@ import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; -import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; +import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; -import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.gateway.GatewayAllocator; import java.util.ArrayList; import java.util.Collections; @@ -63,14 +62,17 @@ import java.util.stream.Collectors; public class AllocationService extends AbstractComponent { private final AllocationDeciders allocationDeciders; + private final GatewayAllocator gatewayAllocator; + private final ShardsAllocator shardsAllocator; private final ClusterInfoService clusterInfoService; - private final ShardsAllocators shardsAllocators; @Inject - public AllocationService(Settings settings, AllocationDeciders allocationDeciders, ShardsAllocators shardsAllocators, ClusterInfoService clusterInfoService) { + public AllocationService(Settings settings, AllocationDeciders allocationDeciders, GatewayAllocator gatewayAllocator, + ShardsAllocator shardsAllocator, ClusterInfoService clusterInfoService) { super(settings); this.allocationDeciders = allocationDeciders; - this.shardsAllocators = shardsAllocators; + this.gatewayAllocator = gatewayAllocator; + this.shardsAllocator = shardsAllocator; this.clusterInfoService = clusterInfoService; } @@ -92,7 +94,7 @@ public class AllocationService extends AbstractComponent { if (!changed) { return new RoutingAllocation.Result(false, clusterState.routingTable(), clusterState.metaData()); } - shardsAllocators.applyStartedShards(allocation); + gatewayAllocator.applyStartedShards(allocation); if (withReroute) { reroute(allocation); } @@ -192,7 +194,7 @@ public class AllocationService extends AbstractComponent { if (!changed) { return new RoutingAllocation.Result(false, clusterState.routingTable(), clusterState.metaData()); } - shardsAllocators.applyFailedShards(allocation); + gatewayAllocator.applyFailedShards(allocation); reroute(allocation); final RoutingAllocation.Result result = buildChangedResult(clusterState.metaData(), routingNodes); String failedShardsAsString = firstListElementsToCommaDelimitedString(failedShards, s -> s.shard.shardId().toString()); @@ -306,14 +308,10 @@ public class AllocationService extends AbstractComponent { if (allocation.routingNodes().unassigned().size() > 0) { updateLeftDelayOfUnassignedShards(allocation, settings); - changed |= shardsAllocators.allocateUnassigned(allocation); + changed |= gatewayAllocator.allocateUnassigned(allocation); } - // move shards that no longer can be allocated - changed |= shardsAllocators.moveShards(allocation); - - // rebalance - changed |= shardsAllocators.rebalance(allocation); + changed |= shardsAllocator.allocate(allocation); assert RoutingNodes.assertShardStats(allocation.routingNodes()); return changed; } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 0c40b26ca67b..3e5b0847b0ad 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -103,27 +103,26 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } @Override - public void applyStartedShards(StartedRerouteAllocation allocation) { /* ONLY FOR GATEWAYS */ } - - @Override - public void applyFailedShards(FailedRerouteAllocation allocation) { /* ONLY FOR GATEWAYS */ } - - @Override - public boolean allocateUnassigned(RoutingAllocation allocation) { + public boolean allocate(RoutingAllocation allocation) { final Balancer balancer = new Balancer(logger, allocation, weightFunction, threshold); - return balancer.allocateUnassigned(); - } - - @Override - public boolean rebalance(RoutingAllocation allocation) { - final Balancer balancer = new Balancer(logger, allocation, weightFunction, threshold); - return balancer.balance(); - } - - @Override - public boolean moveShards(RoutingAllocation allocation) { - final Balancer balancer = new Balancer(logger, allocation, weightFunction, threshold); - return balancer.moveShards(); + boolean changed = false; + if (allocation.routingNodes().unassigned().size() > 0) { + changed |= balancer.allocateUnassigned(); + } + changed |= balancer.moveShards(); + if (allocation.hasPendingAsyncFetch() == false) { + /* + * see https://github.com/elastic/elasticsearch/issues/14387 + * if we allow rebalance operations while we are still fetching shard store data + * we might end up with unnecessary rebalance operations which can be super confusion/frustrating + * since once the fetches come back we might just move all the shards back again. + * Therefore we only do a rebalance if we have fetched all information. + */ + changed |= balancer.balance(); + } else { + logger.debug("skipping rebalance due to in-flight shard/store fetches"); + } + return changed; } /** diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocator.java index 4d9c05527d3e..2656e2e31673 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocator.java @@ -19,11 +19,7 @@ package org.elasticsearch.cluster.routing.allocation.allocator; -import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.cluster.routing.ShardRoutingState; -import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.routing.allocation.StartedRerouteAllocation; /** *

      @@ -34,41 +30,13 @@ import org.elasticsearch.cluster.routing.allocation.StartedRerouteAllocation; */ public interface ShardsAllocator { - /** - * Applies changes on started nodes based on the implemented algorithm. For example if a - * shard has changed to {@link ShardRoutingState#STARTED} from {@link ShardRoutingState#RELOCATING} - * this allocator might apply some cleanups on the node that used to hold the shard. - * @param allocation all started {@link ShardRouting shards} - */ - void applyStartedShards(StartedRerouteAllocation allocation); - - /** - * Applies changes on failed nodes based on the implemented algorithm. - * @param allocation all failed {@link ShardRouting shards} - */ - void applyFailedShards(FailedRerouteAllocation allocation); - /** * Assign all unassigned shards to nodes - * - * @param allocation current node allocation - * @return true if the allocation has changed, otherwise false - */ - boolean allocateUnassigned(RoutingAllocation allocation); - - /** + * Move started shards that can not be allocated to a node anymore * Rebalancing number of shards on all nodes * * @param allocation current node allocation * @return true if the allocation has changed, otherwise false */ - boolean rebalance(RoutingAllocation allocation); - - /** - * Move started shards that can not be allocated to a node anymore - * - * @param allocation current node allocation - * @return true if the allocation has changed, otherwise false - */ - boolean moveShards(RoutingAllocation allocation); + boolean allocate(RoutingAllocation allocation); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocators.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocators.java deleted file mode 100644 index f3eb1ebbf143..000000000000 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocators.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cluster.routing.allocation.allocator; - -import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; -import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.routing.allocation.StartedRerouteAllocation; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.gateway.GatewayAllocator; - -/** - * The {@link ShardsAllocator} class offers methods for allocating shard within a cluster. - * These methods include moving shards and re-balancing the cluster. It also allows management - * of shards by their state. - */ -public class ShardsAllocators extends AbstractComponent implements ShardsAllocator { - - private final GatewayAllocator gatewayAllocator; - private final ShardsAllocator allocator; - - public ShardsAllocators(GatewayAllocator allocator) { - this(Settings.Builder.EMPTY_SETTINGS, allocator); - } - - public ShardsAllocators(Settings settings, GatewayAllocator allocator) { - this(settings, allocator, new BalancedShardsAllocator(settings)); - } - - @Inject - public ShardsAllocators(Settings settings, GatewayAllocator gatewayAllocator, ShardsAllocator allocator) { - super(settings); - this.gatewayAllocator = gatewayAllocator; - this.allocator = allocator; - } - - @Override - public void applyStartedShards(StartedRerouteAllocation allocation) { - gatewayAllocator.applyStartedShards(allocation); - allocator.applyStartedShards(allocation); - } - - @Override - public void applyFailedShards(FailedRerouteAllocation allocation) { - gatewayAllocator.applyFailedShards(allocation); - allocator.applyFailedShards(allocation); - } - - @Override - public boolean allocateUnassigned(RoutingAllocation allocation) { - boolean changed = false; - changed |= gatewayAllocator.allocateUnassigned(allocation); - changed |= allocator.allocateUnassigned(allocation); - return changed; - } - - protected long nanoTime() { - return System.nanoTime(); - } - - @Override - public boolean rebalance(RoutingAllocation allocation) { - if (allocation.hasPendingAsyncFetch() == false) { - /* - * see https://github.com/elastic/elasticsearch/issues/14387 - * if we allow rebalance operations while we are still fetching shard store data - * we might end up with unnecessary rebalance operations which can be super confusion/frustrating - * since once the fetches come back we might just move all the shards back again. - * Therefore we only do a rebalance if we have fetched all information. - */ - return allocator.rebalance(allocation); - } else { - logger.debug("skipping rebalance due to in-flight shard/store fetches"); - return false; - } - } - - @Override - public boolean moveShards(RoutingAllocation allocation) { - return allocator.moveShards(allocation); - } -} diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java index 24635a980a7b..42f0e3a06014 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java @@ -48,19 +48,7 @@ public class ClusterModuleTests extends ModuleTestCase { static class FakeShardsAllocator implements ShardsAllocator { @Override - public void applyStartedShards(StartedRerouteAllocation allocation) {} - @Override - public void applyFailedShards(FailedRerouteAllocation allocation) {} - @Override - public boolean allocateUnassigned(RoutingAllocation allocation) { - return false; - } - @Override - public boolean rebalance(RoutingAllocation allocation) { - return false; - } - @Override - public boolean moveShards(RoutingAllocation allocation) { + public boolean allocate(RoutingAllocation allocation) { return false; } } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java index 707129578c95..56a66b52d6f4 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator; -import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; @@ -311,29 +310,9 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { public void testNoRebalanceOnPrimaryOverload() { Settings.Builder settings = settingsBuilder(); AllocationService strategy = new AllocationService(settings.build(), randomAllocationDeciders(settings.build(), - new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), getRandom()), new ShardsAllocators(settings.build(), + new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), getRandom()), NoopGatewayAllocator.INSTANCE, new ShardsAllocator() { - @Override - public boolean rebalance(RoutingAllocation allocation) { - return false; - } - - @Override - public boolean moveShards(RoutingAllocation allocation) { - return false; - } - - @Override - public void applyStartedShards(StartedRerouteAllocation allocation) { - - - } - - @Override - public void applyFailedShards(FailedRerouteAllocation allocation) { - } - /* * // this allocator tries to rebuild this scenario where a rebalance is * // triggered solely by the primary overload on node [1] where a shard @@ -354,9 +333,8 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { --------[test][2], node[3], [P], s[STARTED] --------[test][3], node[3], [P], s[STARTED] ---- unassigned - */ - @Override - public boolean allocateUnassigned(RoutingAllocation allocation) { + */ + public boolean allocate(RoutingAllocation allocation) { RoutingNodes.UnassignedShards unassigned = allocation.routingNodes().unassigned(); boolean changed = !unassigned.isEmpty(); ShardRouting[] drain = unassigned.drain(); @@ -403,7 +381,7 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { } return changed; } - }), EmptyClusterInfoService.INSTANCE); + }, EmptyClusterInfoService.INSTANCE); MetaData.Builder metaDataBuilder = MetaData.builder(); RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); IndexMetaData.Builder indexMeta = IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(5).numberOfReplicas(1); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index 4e5be0f26b77..813bee8f80eb 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -36,7 +36,7 @@ import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; -import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; +import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; @@ -333,7 +333,7 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { AllocationDeciders allocationDeciders = new AllocationDeciders(Settings.EMPTY, new AllocationDecider[] {new NodeVersionAllocationDecider(Settings.EMPTY)}); AllocationService strategy = new MockAllocationService(Settings.EMPTY, allocationDeciders, - new ShardsAllocators(Settings.EMPTY, NoopGatewayAllocator.INSTANCE), EmptyClusterInfoService.INSTANCE); + NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); RoutingAllocation.Result result = strategy.reroute(state, new AllocationCommands(), true); // the two indices must stay as is, the replicas cannot move to oldNode2 because versions don't match state = ClusterState.builder(state).routingResult(result).build(); @@ -363,7 +363,7 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { new NodeVersionAllocationDecider(Settings.EMPTY)}); AllocationService strategy = new MockAllocationService(Settings.EMPTY, allocationDeciders, - new ShardsAllocators(Settings.EMPTY, NoopGatewayAllocator.INSTANCE), EmptyClusterInfoService.INSTANCE); + NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); RoutingAllocation.Result result = strategy.reroute(state, new AllocationCommands(), true); // Make sure that primary shards are only allocated on the new node diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java index abc561a0916f..0bdab7a11588 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java @@ -30,7 +30,7 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; -import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; +import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; @@ -59,7 +59,7 @@ public class RandomAllocationDeciderTests extends ESAllocationTestCase { RandomAllocationDecider randomAllocationDecider = new RandomAllocationDecider(getRandom()); AllocationService strategy = new AllocationService(settingsBuilder().build(), new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList(new SameShardAllocationDecider(Settings.EMPTY), new ReplicaAfterPrimaryActiveAllocationDecider(Settings.EMPTY), - randomAllocationDecider))), new ShardsAllocators(NoopGatewayAllocator.INSTANCE), EmptyClusterInfoService.INSTANCE); + randomAllocationDecider))), NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); int indices = scaledRandomIntBetween(1, 20); Builder metaBuilder = MetaData.builder(); int maxNumReplicas = 1; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index 0855263dd065..928756fec015 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -39,7 +39,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; +import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; @@ -65,10 +65,6 @@ import static org.hamcrest.Matchers.nullValue; public class DiskThresholdDeciderTests extends ESAllocationTestCase { - private static ShardsAllocators makeShardsAllocators() { - return new ShardsAllocators(NoopGatewayAllocator.INSTANCE); - } - public void testDiskThreshold() { Settings diskSettings = settingsBuilder() .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), true) @@ -109,7 +105,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, makeShardsAllocators(), cis); + .build(), deciders, NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), cis); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) @@ -194,7 +190,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, makeShardsAllocators(), cis); + .build(), deciders, NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), cis); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); @@ -225,7 +221,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, makeShardsAllocators(), cis); + .build(), deciders, NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), cis); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); @@ -305,7 +301,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, makeShardsAllocators(), cis); + .build(), deciders, NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), cis); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(2)) @@ -362,7 +358,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, makeShardsAllocators(), cis); + .build(), deciders, NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), cis); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); @@ -429,7 +425,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, makeShardsAllocators(), cis); + .build(), deciders, NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), cis); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); @@ -460,7 +456,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, makeShardsAllocators(), cis); + .build(), deciders, NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), cis); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); @@ -569,7 +565,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, makeShardsAllocators(), cis); + .build(), deciders, NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), cis); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0)) @@ -637,7 +633,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, makeShardsAllocators(), cis); + .build(), deciders, NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), cis); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0)) @@ -740,7 +736,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, makeShardsAllocators(), cis); + .build(), deciders, NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), cis); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) @@ -902,7 +898,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, makeShardsAllocators(), cis); + .build(), deciders, NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), cis); // Ensure that the reroute call doesn't alter the routing table, since the first primary is relocating away // and therefor we will have sufficient disk space on node1. RoutingAllocation.Result result = strategy.reroute(clusterState, "reroute"); @@ -1003,7 +999,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, makeShardsAllocators(), cis); + .build(), deciders, NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), cis); RoutingAllocation.Result result = strategy.reroute(clusterState, "reroute"); assertThat(result.routingTable().index("test").getShards().get(0).primaryShard().state(), equalTo(STARTED)); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java index 0ca261cbf658..67501d55a956 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java @@ -492,7 +492,7 @@ public class NodeJoinControllerTests extends ESTestCase { static class NoopAllocationService extends AllocationService { public NoopAllocationService(Settings settings) { - super(settings, null, null, null); + super(settings, null, null, null, null); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java index f653819c1402..1a38e32cf1af 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java @@ -33,7 +33,8 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.StartedRerouteAllocation; -import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; +import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; +import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; @@ -79,19 +80,19 @@ public abstract class ESAllocationTestCase extends ESTestCase { public static MockAllocationService createAllocationService(Settings settings, ClusterSettings clusterSettings, Random random) { return new MockAllocationService(settings, randomAllocationDeciders(settings, clusterSettings, random), - new ShardsAllocators(settings, NoopGatewayAllocator.INSTANCE), EmptyClusterInfoService.INSTANCE); + NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(settings), EmptyClusterInfoService.INSTANCE); } public static MockAllocationService createAllocationService(Settings settings, ClusterInfoService clusterInfoService) { return new MockAllocationService(settings, randomAllocationDeciders(settings, new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), getRandom()), - new ShardsAllocators(settings, NoopGatewayAllocator.INSTANCE), clusterInfoService); + NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(settings), clusterInfoService); } - public static MockAllocationService createAllocationService(Settings settings, GatewayAllocator allocator) { + public static MockAllocationService createAllocationService(Settings settings, GatewayAllocator gatewayAllocator) { return new MockAllocationService(settings, randomAllocationDeciders(settings, new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), getRandom()), - new ShardsAllocators(settings, allocator), EmptyClusterInfoService.INSTANCE); + gatewayAllocator, new BalancedShardsAllocator(settings), EmptyClusterInfoService.INSTANCE); } @@ -193,8 +194,9 @@ public abstract class ESAllocationTestCase extends ESTestCase { private Long nanoTimeOverride = null; - public MockAllocationService(Settings settings, AllocationDeciders allocationDeciders, ShardsAllocators shardsAllocators, ClusterInfoService clusterInfoService) { - super(settings, allocationDeciders, shardsAllocators, clusterInfoService); + public MockAllocationService(Settings settings, AllocationDeciders allocationDeciders, GatewayAllocator gatewayAllocator, + ShardsAllocator shardsAllocator, ClusterInfoService clusterInfoService) { + super(settings, allocationDeciders, gatewayAllocator, shardsAllocator, clusterInfoService); } public void setNanoTimeOverride(long nanoTime) { From 64e84dcc76660f3d62c0901137fb01980891f0f5 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 7 Mar 2016 17:21:41 +0100 Subject: [PATCH 175/320] Reuse shard model across 3 phases in BalancedShardsAllocator --- .../cluster/routing/RoutingNodes.java | 7 + .../allocator/BalancedShardsAllocator.java | 486 ++++++++---------- .../allocation/allocator/ShardsAllocator.java | 9 +- 3 files changed, 228 insertions(+), 274 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java index 4f2f9d06097c..a6ef564904c3 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java @@ -597,6 +597,13 @@ public class RoutingNodes implements Iterable { } + /** + * Returns the number of routing nodes + */ + public int size() { + return nodesToShards.size(); + } + public static final class UnassignedShards implements Iterable { private final RoutingNodes nodes; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 3e5b0847b0ad..6377e06e245a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.routing.allocation.allocator; -import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.IntroSorter; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -28,9 +27,7 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; -import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.routing.allocation.StartedRerouteAllocation; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; @@ -42,18 +39,14 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.PriorityComparator; -import java.util.ArrayList; -import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.IdentityHashMap; import java.util.Iterator; -import java.util.List; import java.util.Map; import java.util.Set; -import java.util.function.Predicate; import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; @@ -104,24 +97,14 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards @Override public boolean allocate(RoutingAllocation allocation) { + if (allocation.routingNodes().size() == 0) { + /* with no nodes this is pointless */ + return false; + } final Balancer balancer = new Balancer(logger, allocation, weightFunction, threshold); - boolean changed = false; - if (allocation.routingNodes().unassigned().size() > 0) { - changed |= balancer.allocateUnassigned(); - } + boolean changed = balancer.allocateUnassigned(); changed |= balancer.moveShards(); - if (allocation.hasPendingAsyncFetch() == false) { - /* - * see https://github.com/elastic/elasticsearch/issues/14387 - * if we allow rebalance operations while we are still fetching shard store data - * we might end up with unnecessary rebalance operations which can be super confusion/frustrating - * since once the fetches come back we might just move all the shards back again. - * Therefore we only do a rebalance if we have fetched all information. - */ - changed |= balancer.balance(); - } else { - logger.debug("skipping rebalance due to in-flight shard/store fetches"); - } + changed |= balancer.balance(); return changed; } @@ -202,8 +185,8 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } private float weight(Balancer balancer, ModelNode node, String index, int numAdditionalShards) { - final float weightShard = (node.numShards() + numAdditionalShards - balancer.avgShardsPerNode()); - final float weightIndex = (node.numShards(index) + numAdditionalShards - balancer.avgShardsPerNode(index)); + final float weightShard = node.numShards() + numAdditionalShards - balancer.avgShardsPerNode(); + final float weightIndex = node.numShards(index) + numAdditionalShards - balancer.avgShardsPerNode(index); return theta0 * weightShard + theta1 * weightIndex; } @@ -215,7 +198,6 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards public static class Balancer { private final ESLogger logger; private final Map nodes = new HashMap<>(); - private final HashSet indices = new HashSet<>(); private final RoutingAllocation allocation; private final RoutingNodes routingNodes; private final WeightFunction weight; @@ -224,19 +206,15 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards private final MetaData metaData; private final float avgShardsPerNode; - private final Predicate assignedFilter = shard -> shard.assignedToNode(); - public Balancer(ESLogger logger, RoutingAllocation allocation, WeightFunction weight, float threshold) { this.logger = logger; this.allocation = allocation; this.weight = weight; this.threshold = threshold; this.routingNodes = allocation.routingNodes(); - for (RoutingNode node : routingNodes) { - nodes.put(node.nodeId(), new ModelNode(node.nodeId())); - } metaData = routingNodes.metaData(); - avgShardsPerNode = ((float) metaData.totalNumberOfShards()) / nodes.size(); + avgShardsPerNode = ((float) metaData.totalNumberOfShards()) / routingNodes.size(); + buildModelFromAssigned(); } /** @@ -270,17 +248,6 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards return new NodeSorter(nodesArray(), weight, this); } - private boolean initialize(RoutingNodes routing, RoutingNodes.UnassignedShards unassigned) { - if (logger.isTraceEnabled()) { - logger.trace("Start distributing Shards"); - } - for (ObjectCursor index : allocation.routingTable().indicesRouting().keys()) { - indices.add(index.value); - } - buildModelFromAssigned(routing.shards(assignedFilter)); - return allocateUnassigned(unassigned); - } - private static float absDelta(float lower, float higher) { assert higher >= lower : higher + " lt " + lower +" but was expected to be gte"; return Math.abs(higher - lower); @@ -294,12 +261,36 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } /** - * Allocates all possible unassigned shards + * Balances the nodes on the cluster model according to the weight function. + * The actual balancing is delegated to {@link #balanceByWeights()} + * * @return true if the current configuration has been * changed, otherwise false */ - final boolean allocateUnassigned() { - return balance(true); + private boolean balance() { + if (logger.isTraceEnabled()) { + logger.trace("Start balancing cluster"); + } + if (allocation.hasPendingAsyncFetch()) { + /* + * see https://github.com/elastic/elasticsearch/issues/14387 + * if we allow rebalance operations while we are still fetching shard store data + * we might end up with unnecessary rebalance operations which can be super confusion/frustrating + * since once the fetches come back we might just move all the shards back again. + * Therefore we only do a rebalance if we have fetched all information. + */ + logger.debug("skipping rebalance due to in-flight shard/store fetches"); + return false; + } + if (allocation.deciders().canRebalance(allocation).type() != Type.YES) { + logger.trace("skipping rebalance as it is disabled"); + return false; + } + if (nodes.size() < 2) { /* skip if we only have one node */ + logger.trace("skipping rebalance as single node only"); + return false; + } + return balanceByWeights(); } /** @@ -316,120 +307,100 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards * @return true if the current configuration has been * changed, otherwise false */ - public boolean balance() { - return balance(false); - } + private boolean balanceByWeights() { + boolean changed = false; + final NodeSorter sorter = newNodeSorter(); + final AllocationDeciders deciders = allocation.deciders(); + final ModelNode[] modelNodes = sorter.modelNodes; + final float[] weights = sorter.weights; + for (String index : buildWeightOrderedIndices(sorter)) { + IndexMetaData indexMetaData = metaData.index(index); - private boolean balance(boolean onlyAssign) { - if (this.nodes.isEmpty()) { - /* with no nodes this is pointless */ - return false; - } - if (logger.isTraceEnabled()) { - if (onlyAssign) { - logger.trace("Start balancing cluster"); - } else { - logger.trace("Start assigning unassigned shards"); + // find nodes that have a shard of this index or where shards of this index are allowed to stay + // move these nodes to the front of modelNodes so that we can only balance based on these nodes + int relevantNodes = 0; + for (int i = 0; i < modelNodes.length; i++) { + ModelNode modelNode = modelNodes[i]; + if (modelNode.getIndex(index) != null + || deciders.canAllocate(indexMetaData, modelNode.getRoutingNode(), allocation).type() != Type.NO) { + // swap nodes at position i and relevantNodes + modelNodes[i] = modelNodes[relevantNodes]; + modelNodes[relevantNodes] = modelNode; + relevantNodes++; + } } - } - final RoutingNodes.UnassignedShards unassigned = routingNodes.unassigned(); - boolean changed = initialize(routingNodes, unassigned); - if (onlyAssign == false && changed == false && allocation.deciders().canRebalance(allocation).type() == Type.YES) { - NodeSorter sorter = newNodeSorter(); - if (nodes.size() > 1) { /* skip if we only have one node */ - AllocationDeciders deciders = allocation.deciders(); - final ModelNode[] modelNodes = sorter.modelNodes; - final float[] weights = sorter.weights; - for (String index : buildWeightOrderedIndices(sorter)) { - IndexMetaData indexMetaData = metaData.index(index); - // find nodes that have a shard of this index or where shards of this index are allowed to stay - // move these nodes to the front of modelNodes so that we can only balance based on these nodes - int relevantNodes = 0; - for (int i = 0; i < modelNodes.length; i++) { - ModelNode modelNode = modelNodes[i]; - if (modelNode.getIndex(index) != null - || deciders.canAllocate(indexMetaData, modelNode.getRoutingNode(routingNodes), allocation).type() != Type.NO) { - // swap nodes at position i and relevantNodes - modelNodes[i] = modelNodes[relevantNodes]; - modelNodes[relevantNodes] = modelNode; - relevantNodes++; + if (relevantNodes < 2) { + continue; + } + + sorter.reset(index, 0, relevantNodes); + int lowIdx = 0; + int highIdx = relevantNodes - 1; + while (true) { + final ModelNode minNode = modelNodes[lowIdx]; + final ModelNode maxNode = modelNodes[highIdx]; + advance_range: + if (maxNode.numShards(index) > 0) { + final float delta = absDelta(weights[lowIdx], weights[highIdx]); + if (lessThan(delta, threshold)) { + if (lowIdx > 0 && highIdx-1 > 0 // is there a chance for a higher delta? + && (absDelta(weights[0], weights[highIdx-1]) > threshold) // check if we need to break at all + ) { + /* This is a special case if allocations from the "heaviest" to the "lighter" nodes is not possible + * due to some allocation decider restrictions like zone awareness. if one zone has for instance + * less nodes than another zone. so one zone is horribly overloaded from a balanced perspective but we + * can't move to the "lighter" shards since otherwise the zone would go over capacity. + * + * This break jumps straight to the condition below were we start moving from the high index towards + * the low index to shrink the window we are considering for balance from the other direction. + * (check shrinking the window from MAX to MIN) + * See #3580 + */ + break advance_range; } + if (logger.isTraceEnabled()) { + logger.trace("Stop balancing index [{}] min_node [{}] weight: [{}] max_node [{}] weight: [{}] delta: [{}]", + index, maxNode.getNodeId(), weights[highIdx], minNode.getNodeId(), weights[lowIdx], delta); + } + break; } - - if (relevantNodes < 2) { + if (logger.isTraceEnabled()) { + logger.trace("Balancing from node [{}] weight: [{}] to node [{}] weight: [{}] delta: [{}]", + maxNode.getNodeId(), weights[highIdx], minNode.getNodeId(), weights[lowIdx], delta); + } + /* pass the delta to the replication function to prevent relocations that only swap the weights of the two nodes. + * a relocation must bring us closer to the balance if we only achieve the same delta the relocation is useless */ + if (tryRelocateShard(minNode, maxNode, index, delta)) { + /* + * TODO we could be a bit smarter here, we don't need to fully sort necessarily + * we could just find the place to insert linearly but the win might be minor + * compared to the added complexity + */ + weights[lowIdx] = sorter.weight(modelNodes[lowIdx]); + weights[highIdx] = sorter.weight(modelNodes[highIdx]); + sorter.sort(0, relevantNodes); + lowIdx = 0; + highIdx = relevantNodes - 1; + changed = true; continue; } - - sorter.reset(index, 0, relevantNodes); - int lowIdx = 0; - int highIdx = relevantNodes - 1; - while (true) { - final ModelNode minNode = modelNodes[lowIdx]; - final ModelNode maxNode = modelNodes[highIdx]; - advance_range: - if (maxNode.numShards(index) > 0) { - final float delta = absDelta(weights[lowIdx], weights[highIdx]); - if (lessThan(delta, threshold)) { - if (lowIdx > 0 && highIdx-1 > 0 // is there a chance for a higher delta? - && (absDelta(weights[0], weights[highIdx-1]) > threshold) // check if we need to break at all - ) { - /* This is a special case if allocations from the "heaviest" to the "lighter" nodes is not possible - * due to some allocation decider restrictions like zone awareness. if one zone has for instance - * less nodes than another zone. so one zone is horribly overloaded from a balanced perspective but we - * can't move to the "lighter" shards since otherwise the zone would go over capacity. - * - * This break jumps straight to the condition below were we start moving from the high index towards - * the low index to shrink the window we are considering for balance from the other direction. - * (check shrinking the window from MAX to MIN) - * See #3580 - */ - break advance_range; - } - if (logger.isTraceEnabled()) { - logger.trace("Stop balancing index [{}] min_node [{}] weight: [{}] max_node [{}] weight: [{}] delta: [{}]", - index, maxNode.getNodeId(), weights[highIdx], minNode.getNodeId(), weights[lowIdx], delta); - } - break; - } - if (logger.isTraceEnabled()) { - logger.trace("Balancing from node [{}] weight: [{}] to node [{}] weight: [{}] delta: [{}]", - maxNode.getNodeId(), weights[highIdx], minNode.getNodeId(), weights[lowIdx], delta); - } - /* pass the delta to the replication function to prevent relocations that only swap the weights of the two nodes. - * a relocation must bring us closer to the balance if we only achieve the same delta the relocation is useless */ - if (tryRelocateShard(minNode, maxNode, index, delta)) { - /* - * TODO we could be a bit smarter here, we don't need to fully sort necessarily - * we could just find the place to insert linearly but the win might be minor - * compared to the added complexity - */ - weights[lowIdx] = sorter.weight(modelNodes[lowIdx]); - weights[highIdx] = sorter.weight(modelNodes[highIdx]); - sorter.sort(0, relevantNodes); - lowIdx = 0; - highIdx = relevantNodes - 1; - changed = true; - continue; - } - } - if (lowIdx < highIdx - 1) { - /* Shrinking the window from MIN to MAX - * we can't move from any shard from the min node lets move on to the next node - * and see if the threshold still holds. We either don't have any shard of this - * index on this node of allocation deciders prevent any relocation.*/ - lowIdx++; - } else if (lowIdx > 0) { - /* Shrinking the window from MAX to MIN - * now we go max to min since obviously we can't move anything to the max node - * lets pick the next highest */ - lowIdx = 0; - highIdx--; - } else { - /* we are done here, we either can't relocate anymore or we are balanced */ - break; - } - } + } + if (lowIdx < highIdx - 1) { + /* Shrinking the window from MIN to MAX + * we can't move from any shard from the min node lets move on to the next node + * and see if the threshold still holds. We either don't have any shard of this + * index on this node of allocation deciders prevent any relocation.*/ + lowIdx++; + } else if (lowIdx > 0) { + /* Shrinking the window from MAX to MIN + * now we go max to min since obviously we can't move anything to the max node + * lets pick the next highest */ + lowIdx = 0; + highIdx--; + } else { + /* we are done here, we either can't relocate anymore or we are balanced */ + break; } } } @@ -450,7 +421,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards * to the nodes we relocated them from. */ private String[] buildWeightOrderedIndices(NodeSorter sorter) { - final String[] indices = this.indices.toArray(new String[this.indices.size()]); + final String[] indices = allocation.routingTable().indicesRouting().keys().toArray(String.class); final float[] deltas = new float[indices.length]; for (int i = 0; i < deltas.length; i++) { sorter.reset(indices[i]); @@ -502,20 +473,16 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards * @return true if the allocation has changed, otherwise false */ public boolean moveShards() { - if (nodes.isEmpty()) { - /* with no nodes this is pointless */ - return false; - } - - // Create a copy of the started shards interleaving between nodes, and check if they can remain. In the presence of throttling + // Iterate over the started shards interleaving between nodes, and check if they can remain. In the presence of throttling // shard movements, the goal of this iteration order is to achieve a fairer movement of shards from the nodes that are // offloading the shards. - List shards = new ArrayList<>(); + boolean changed = false; int index = 0; boolean found = true; + final NodeSorter sorter = newNodeSorter(); while (found) { found = false; - for (RoutingNode routingNode : routingNodes) { + for (RoutingNode routingNode : allocation.routingNodes()) { if (index >= routingNode.size()) { continue; } @@ -523,64 +490,52 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards ShardRouting shardRouting = routingNode.get(index); // we can only move started shards... if (shardRouting.started()) { - shards.add(shardRouting); + final ModelNode sourceNode = nodes.get(shardRouting.currentNodeId()); + assert sourceNode != null && sourceNode.containsShard(shardRouting); + Decision decision = allocation.deciders().canRemain(shardRouting, routingNode, allocation); + if (decision.type() == Decision.Type.NO) { + changed |= moveShard(sorter, shardRouting, sourceNode, routingNode); + } } } index++; } - if (shards.isEmpty()) { - return false; - } - final RoutingNodes.UnassignedShards unassigned = routingNodes.unassigned(); - boolean changed = initialize(routingNodes, unassigned); - if (changed == false) { - final NodeSorter sorter = newNodeSorter(); - final ModelNode[] modelNodes = sorter.modelNodes; - for (ShardRouting shardRouting : shards) { - final ModelNode sourceNode = nodes.get(shardRouting.currentNodeId()); - assert sourceNode != null && sourceNode.containsShard(shardRouting); - final RoutingNode routingNode = sourceNode.getRoutingNode(routingNodes); - Decision decision = allocation.deciders().canRemain(shardRouting, routingNode, allocation); - if (decision.type() == Decision.Type.NO) { - logger.debug("[{}][{}] allocated on [{}], but can no longer be allocated on it, moving...", shardRouting.index(), shardRouting.id(), routingNode.node()); - sorter.reset(shardRouting.getIndexName()); - /* - * the sorter holds the minimum weight node first for the shards index. - * We now walk through the nodes until we find a node to allocate the shard. - * This is not guaranteed to be balanced after this operation we still try best effort to - * allocate on the minimal eligible node. - */ - boolean moved = false; - for (ModelNode currentNode : modelNodes) { - if (currentNode == sourceNode) { - continue; - } - RoutingNode target = currentNode.getRoutingNode(routingNodes); - Decision allocationDecision = allocation.deciders().canAllocate(shardRouting, target, allocation); - Decision rebalanceDecision = allocation.deciders().canRebalance(shardRouting, allocation); - if (allocationDecision.type() == Type.YES && rebalanceDecision.type() == Type.YES) { // TODO maybe we can respect throttling here too? - Decision sourceDecision = sourceNode.removeShard(shardRouting); - ShardRouting targetRelocatingShard = routingNodes.relocate(shardRouting, target.nodeId(), allocation.clusterInfo().getShardSize(shardRouting, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); - // re-add (now relocating shard) to source node - sourceNode.addShard(shardRouting, sourceDecision); - Decision targetDecision = new Decision.Multi().add(allocationDecision).add(rebalanceDecision); - currentNode.addShard(targetRelocatingShard, targetDecision); - if (logger.isTraceEnabled()) { - logger.trace("Moved shard [{}] to node [{}]", shardRouting, routingNode.node()); - } - moved = true; - changed = true; - break; - } - } - if (moved == false) { - logger.debug("[{}][{}] can't move", shardRouting.index(), shardRouting.id()); + return changed; + } + + /** + * Move started shard to the minimal eligible node with respect to the weight function + * + * @return true if the shard was moved successfully, otherwise false + */ + private boolean moveShard(NodeSorter sorter, ShardRouting shardRouting, ModelNode sourceNode, RoutingNode routingNode) { + logger.debug("[{}][{}] allocated on [{}], but can no longer be allocated on it, moving...", shardRouting.index(), shardRouting.id(), routingNode.node()); + sorter.reset(shardRouting.getIndexName()); + /* + * the sorter holds the minimum weight node first for the shards index. + * We now walk through the nodes until we find a node to allocate the shard. + * This is not guaranteed to be balanced after this operation we still try best effort to + * allocate on the minimal eligible node. + */ + for (ModelNode currentNode : sorter.modelNodes) { + if (currentNode != sourceNode) { + RoutingNode target = currentNode.getRoutingNode(); + Decision allocationDecision = allocation.deciders().canAllocate(shardRouting, target, allocation); + Decision rebalanceDecision = allocation.deciders().canRebalance(shardRouting, allocation); + if (allocationDecision.type() == Type.YES && rebalanceDecision.type() == Type.YES) { // TODO maybe we can respect throttling here too? + sourceNode.removeShard(shardRouting); + ShardRouting targetRelocatingShard = routingNodes.relocate(shardRouting, target.nodeId(), allocation.clusterInfo().getShardSize(shardRouting, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); + currentNode.addShard(targetRelocatingShard); + if (logger.isTraceEnabled()) { + logger.trace("Moved shard [{}] to node [{}]", shardRouting, routingNode.node()); } + return true; } } } - return changed; + logger.debug("[{}][{}] can't move", shardRouting.index(), shardRouting.id()); + return false; } /** @@ -592,18 +547,19 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards * on the target node which we respect during the allocation / balancing * process. In short, this method recreates the status-quo in the cluster. */ - private void buildModelFromAssigned(Iterable shards) { - for (ShardRouting shard : shards) { - assert shard.assignedToNode(); - /* we skip relocating shards here since we expect an initializing shard with the same id coming in */ - if (shard.state() == RELOCATING) { - continue; - } - ModelNode node = nodes.get(shard.currentNodeId()); - assert node != null; - node.addShard(shard, Decision.single(Type.YES, "Already allocated on node", node.getNodeId())); - if (logger.isTraceEnabled()) { - logger.trace("Assigned shard [{}] to node [{}]", shard, node.getNodeId()); + private void buildModelFromAssigned() { + for (RoutingNode rn : routingNodes) { + ModelNode node = new ModelNode(rn); + nodes.put(rn.nodeId(), node); + for (ShardRouting shard : rn) { + assert rn.nodeId().equals(shard.currentNodeId()); + /* we skip relocating shards here since we expect an initializing shard with the same id coming in */ + if (shard.state() != RELOCATING) { + node.addShard(shard); + if (logger.isTraceEnabled()) { + logger.trace("Assigned shard [{}] to node [{}]", shard, node.getNodeId()); + } + } } } } @@ -611,8 +567,11 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards /** * Allocates all given shards on the minimal eligible node for the shards index * with respect to the weight function. All given shards must be unassigned. + * @return true if the current configuration has been + * changed, otherwise false */ - private boolean allocateUnassigned(RoutingNodes.UnassignedShards unassigned) { + private boolean allocateUnassigned() { + RoutingNodes.UnassignedShards unassigned = routingNodes.unassigned(); assert !nodes.isEmpty(); if (logger.isTraceEnabled()) { logger.trace("Start allocating unassigned shards"); @@ -656,7 +615,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards int secondaryLength = 0; int primaryLength = primary.length; ArrayUtil.timSort(primary, comparator); - final Set throttledNodes = Collections.newSetFromMap(new IdentityHashMap()); + final Set throttledNodes = Collections.newSetFromMap(new IdentityHashMap<>()); do { for (int i = 0; i < primaryLength; i++) { ShardRouting shard = primary[i]; @@ -694,7 +653,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards * don't check deciders */ if (currentWeight <= minWeight) { - Decision currentDecision = deciders.canAllocate(shard, node.getRoutingNode(routingNodes), allocation); + Decision currentDecision = deciders.canAllocate(shard, node.getRoutingNode(), allocation); NOUPDATE: if (currentDecision.type() == Type.YES || currentDecision.type() == Type.THROTTLE) { if (currentWeight == minWeight) { @@ -735,7 +694,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } assert decision != null && minNode != null || decision == null && minNode == null; if (minNode != null) { - minNode.addShard(shard, decision); + minNode.addShard(shard); if (decision.type() == Type.YES) { if (logger.isTraceEnabled()) { logger.trace("Assigned shard [{}] to [{}]", shard, minNode.getNodeId()); @@ -744,7 +703,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards changed = true; continue; // don't add to ignoreUnassigned } else { - final RoutingNode node = minNode.getRoutingNode(routingNodes); + final RoutingNode node = minNode.getRoutingNode(); if (deciders.canAllocate(node, allocation).type() != Type.YES) { if (logger.isTraceEnabled()) { logger.trace("Can not allocate on node [{}] remove from round decision [{}]", node, decision.type()); @@ -790,10 +749,10 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } ShardRouting candidate = null; final AllocationDeciders deciders = allocation.deciders(); - for (ShardRouting shard : index.getAllShards()) { + for (ShardRouting shard : index) { if (shard.started()) { // skip initializing, unassigned and relocating shards we can't relocate them anyway - Decision allocationDecision = deciders.canAllocate(shard, minNode.getRoutingNode(routingNodes), allocation); + Decision allocationDecision = deciders.canAllocate(shard, minNode.getRoutingNode(), allocation); Decision rebalanceDecision = deciders.canRebalance(shard, allocation); if (((allocationDecision.type() == Type.YES) || (allocationDecision.type() == Type.THROTTLE)) && ((rebalanceDecision.type() == Type.YES) || (rebalanceDecision.type() == Type.THROTTLE))) { @@ -814,24 +773,17 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } if (candidate != null) { - /* allocate on the model even if not throttled */ maxNode.removeShard(candidate); - minNode.addShard(candidate, decision); + minNode.addShard(candidate); if (decision.type() == Type.YES) { /* only allocate on the cluster if we are not throttled */ if (logger.isTraceEnabled()) { logger.trace("Relocate shard [{}] from node [{}] to node [{}]", candidate, maxNode.getNodeId(), minNode.getNodeId()); } - /* now allocate on the cluster - if we are started we need to relocate the shard */ - if (candidate.started()) { - routingNodes.relocate(candidate, minNode.getNodeId(), allocation.clusterInfo().getShardSize(candidate, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); - - } else { - routingNodes.initialize(candidate, minNode.getNodeId(), null, allocation.clusterInfo().getShardSize(candidate, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); - } + /* now allocate on the cluster */ + routingNodes.relocate(candidate, minNode.getNodeId(), allocation.clusterInfo().getShardSize(candidate, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); return true; - } } } @@ -845,14 +797,12 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } static class ModelNode implements Iterable { - private final String id; private final Map indices = new HashMap<>(); private int numShards = 0; - // lazily calculated - private RoutingNode routingNode; + private final RoutingNode routingNode; - public ModelNode(String id) { - this.id = id; + public ModelNode(RoutingNode routingNode) { + this.routingNode = routingNode; } public ModelIndex getIndex(String indexId) { @@ -860,13 +810,10 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } public String getNodeId() { - return id; + return routingNode.nodeId(); } - public RoutingNode getRoutingNode(RoutingNodes routingNodes) { - if (routingNode == null) { - routingNode = routingNodes.node(id); - } + public RoutingNode getRoutingNode() { return routingNode; } @@ -887,33 +834,31 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards return -1; } - public void addShard(ShardRouting shard, Decision decision) { + public void addShard(ShardRouting shard) { ModelIndex index = indices.get(shard.getIndexName()); if (index == null) { index = new ModelIndex(shard.getIndexName()); indices.put(index.getIndexId(), index); } - index.addShard(shard, decision); + index.addShard(shard); numShards++; } - public Decision removeShard(ShardRouting shard) { + public void removeShard(ShardRouting shard) { ModelIndex index = indices.get(shard.getIndexName()); - Decision removed = null; if (index != null) { - removed = index.removeShard(shard); - if (removed != null && index.numShards() == 0) { + index.removeShard(shard); + if (index.numShards() == 0) { indices.remove(shard.getIndexName()); } } numShards--; - return removed; } @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("Node(").append(id).append(")"); + sb.append("Node(").append(routingNode.nodeId()).append(")"); return sb.toString(); } @@ -929,9 +874,9 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } - static final class ModelIndex { + static final class ModelIndex implements Iterable { private final String id; - private final Map shards = new HashMap<>(); + private final Set shards = new HashSet<>(4); // expect few shards of same index to be allocated on same node private int highestPrimary = -1; public ModelIndex(String id) { @@ -941,7 +886,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards public int highestPrimary() { if (highestPrimary == -1) { int maxId = -1; - for (ShardRouting shard : shards.keySet()) { + for (ShardRouting shard : shards) { if (shard.primary()) { maxId = Math.max(maxId, shard.id()); } @@ -959,24 +904,25 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards return shards.size(); } - public Collection getAllShards() { - return shards.keySet(); + @Override + public Iterator iterator() { + return shards.iterator(); } - public Decision removeShard(ShardRouting shard) { + public void removeShard(ShardRouting shard) { highestPrimary = -1; - return shards.remove(shard); + assert shards.contains(shard) : "Shard not allocated on current node: " + shard; + shards.remove(shard); } - public void addShard(ShardRouting shard, Decision decision) { + public void addShard(ShardRouting shard) { highestPrimary = -1; - assert decision != null; - assert !shards.containsKey(shard) : "Shard already allocated on current node: " + shards.get(shard) + " " + shard; - shards.put(shard, decision); + assert !shards.contains(shard) : "Shard already allocated on current node: " + shard; + shards.add(shard); } public boolean containsShard(ShardRouting shard) { - return shards.containsKey(shard); + return shards.contains(shard); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocator.java index 2656e2e31673..0bf07e8cba98 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocator.java @@ -25,15 +25,16 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; *

      * A {@link ShardsAllocator} is the main entry point for shard allocation on nodes in the cluster. * The allocator makes basic decision where a shard instance will be allocated, if already allocated instances - * need relocate to other nodes due to node failures or due to rebalancing decisions. + * need to relocate to other nodes due to node failures or due to rebalancing decisions. *

      */ public interface ShardsAllocator { /** - * Assign all unassigned shards to nodes - * Move started shards that can not be allocated to a node anymore - * Rebalancing number of shards on all nodes + * Allocates shards to nodes in the cluster. An implementation of this method should: + * - assign unassigned shards + * - relocate shards that cannot stay on a node anymore + * - relocate shards to find a good shard balance in the cluster * * @param allocation current node allocation * @return true if the allocation has changed, otherwise false From cb2ed50aeba7bced8666880e6bd6825d94a742f1 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Mon, 7 Mar 2016 18:23:33 +0100 Subject: [PATCH 176/320] Remove friction from the mapping changes in 5.0. #16991 This tries to remove friction to upgrade to 5.0 that would be caused by mapping changes: - old ways to specify mapping settings (eg. store: yes instead of store:true) will still work but a deprecation warning will be logged - string mappings that only use the most common options will be upgraded automatically to text/keyword --- .../index/mapper/core/StringFieldMapper.java | 42 ++++- .../index/mapper/core/TypeParsers.java | 76 ++++---- .../core/StringMappingUpgradeTests.java | 177 ++++++++++++++++++ 3 files changed, 256 insertions(+), 39 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/index/mapper/core/StringMappingUpgradeTests.java diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index c4659a6571ec..656d6effcfa6 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -26,6 +26,9 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -39,9 +42,12 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; import static org.apache.lucene.index.IndexOptions.NONE; import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; @@ -52,6 +58,11 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc public static final String CONTENT_TYPE = "string"; private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1; + private static final Set SUPPORTED_PARAMETERS_FOR_AUTO_UPGRADE = new HashSet<>(Arrays.asList( + "type", + // most common parameters, for which the upgrade is straightforward + "index", "store", "doc_values", "omit_norms", "norms", "fields", "copy_to")); + public static class Defaults { public static final MappedFieldType FIELD_TYPE = new StringFieldType(); @@ -130,13 +141,33 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc } public static class TypeParser implements Mapper.TypeParser { + private final DeprecationLogger deprecationLogger; + + public TypeParser() { + ESLogger logger = Loggers.getLogger(getClass()); + this.deprecationLogger = new DeprecationLogger(logger); + } + @Override public Mapper.Builder parse(String fieldName, Map node, ParserContext parserContext) throws MapperParsingException { - // TODO: temporarily disabled to give Kibana time to upgrade to text/keyword mappings - /*if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0)) { + if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0)) { + // Automatically upgrade simple mappings for ease of upgrade, otherwise fail + if (SUPPORTED_PARAMETERS_FOR_AUTO_UPGRADE.containsAll(node.keySet())) { + deprecationLogger.deprecated("The [string] field is deprecated, please use [text] or [keyword] instead on [{}]", + fieldName); + final Object index = node.remove("index"); + final boolean keyword = index != null && "analyzed".equals(index) == false; + // upgrade the index setting + node.put("index", "no".equals(index) == false); + if (keyword) { + return new KeywordFieldMapper.TypeParser().parse(fieldName, node, parserContext); + } else { + return new TextFieldMapper.TypeParser().parse(fieldName, node, parserContext); + } + } throw new IllegalArgumentException("The [string] type is removed in 5.0. You should now use either a [text] " + "or [keyword] field instead for field [" + fieldName + "]"); - }*/ + } StringFieldMapper.Builder builder = new StringFieldMapper.Builder(fieldName); // hack for the fact that string can't just accept true/false for // the index property and still accepts no/not_analyzed/analyzed @@ -241,11 +272,10 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc int positionIncrementGap, int ignoreAbove, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); - // TODO: temporarily disabled to give Kibana time to upgrade to text/keyword mappings - /*if (Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0)) { + if (Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0)) { throw new IllegalArgumentException("The [string] type is removed in 5.0. You should now use either a [text] " + "or [keyword] field instead for field [" + fieldType.name() + "]"); - }*/ + } if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) { throw new MapperParsingException("Field [" + fieldType.name() + "] cannot be analyzed and have doc values"); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index 15fcd9220e2d..c42de2f611f1 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -25,7 +25,9 @@ import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLoggerFactory; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -39,11 +41,14 @@ import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.similarity.SimilarityProvider; import org.elasticsearch.index.similarity.SimilarityService; +import java.util.Arrays; import java.util.Collections; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Set; import static org.elasticsearch.common.xcontent.support.XContentMapValues.isArray; import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; @@ -63,10 +68,18 @@ public class TypeParsers { public static final String INDEX_OPTIONS_POSITIONS = "positions"; public static final String INDEX_OPTIONS_OFFSETS = "offsets"; - private static boolean nodeBooleanValue(Object node, Mapper.TypeParser.ParserContext parserContext) { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0)) { + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(TypeParsers.class)); + private static final Set BOOLEAN_STRINGS = new HashSet<>(Arrays.asList("true", "false")); + + private static boolean nodeBooleanValue(String name, Object node, Mapper.TypeParser.ParserContext parserContext) { + // Hook onto ParseFieldMatcher so that parsing becomes strict when setting index.query.parse.strict + if (parserContext.parseFieldMatcher().isStrict()) { return XContentMapValues.nodeBooleanValue(node); } else { + // TODO: remove this leniency in 6.0 + if (BOOLEAN_STRINGS.contains(node.toString()) == false) { + DEPRECATION_LOGGER.deprecated("Expected a boolean for property [{}] but got [{}]", name, node); + } return XContentMapValues.lenientNodeBooleanValue(node); } } @@ -81,13 +94,13 @@ public class TypeParsers { builder.precisionStep(nodeIntegerValue(propNode)); iterator.remove(); } else if (propName.equals("ignore_malformed")) { - builder.ignoreMalformed(nodeBooleanValue(propNode, parserContext)); + builder.ignoreMalformed(nodeBooleanValue("ignore_malformed", propNode, parserContext)); iterator.remove(); } else if (propName.equals("coerce")) { - builder.coerce(nodeBooleanValue(propNode, parserContext)); + builder.coerce(nodeBooleanValue("coerce", propNode, parserContext)); iterator.remove(); } else if (propName.equals("omit_norms")) { - builder.omitNorms(nodeBooleanValue(propNode, parserContext)); + builder.omitNorms(nodeBooleanValue("omit_norms", propNode, parserContext)); iterator.remove(); } else if (propName.equals("similarity")) { SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString()); @@ -112,16 +125,16 @@ public class TypeParsers { parseTermVector(name, propNode.toString(), builder); iterator.remove(); } else if (propName.equals("store_term_vectors")) { - builder.storeTermVectors(nodeBooleanValue(propNode, parserContext)); + builder.storeTermVectors(nodeBooleanValue("store_term_vectors", propNode, parserContext)); iterator.remove(); } else if (propName.equals("store_term_vector_offsets")) { - builder.storeTermVectorOffsets(nodeBooleanValue(propNode, parserContext)); + builder.storeTermVectorOffsets(nodeBooleanValue("store_term_vector_offsets", propNode, parserContext)); iterator.remove(); } else if (propName.equals("store_term_vector_positions")) { - builder.storeTermVectorPositions(nodeBooleanValue(propNode, parserContext)); + builder.storeTermVectorPositions(nodeBooleanValue("store_term_vector_positions", propNode, parserContext)); iterator.remove(); } else if (propName.equals("store_term_vector_payloads")) { - builder.storeTermVectorPayloads(nodeBooleanValue(propNode, parserContext)); + builder.storeTermVectorPayloads(nodeBooleanValue("store_term_vector_payloads", propNode, parserContext)); iterator.remove(); } else if (propName.equals("analyzer")) { NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString()); @@ -199,13 +212,13 @@ public class TypeParsers { builder.index(parseIndex(name, propNode.toString(), parserContext)); iterator.remove(); } else if (propName.equals(DOC_VALUES)) { - builder.docValues(nodeBooleanValue(propNode, parserContext)); + builder.docValues(nodeBooleanValue(DOC_VALUES, propNode, parserContext)); iterator.remove(); } else if (propName.equals("boost")) { builder.boost(nodeFloatValue(propNode)); iterator.remove(); } else if (propName.equals("omit_norms")) { - builder.omitNorms(nodeBooleanValue(propNode, parserContext)); + builder.omitNorms(nodeBooleanValue("omit_norms", propNode, parserContext)); iterator.remove(); } else if (propName.equals("norms")) { final Map properties = nodeMapValue(propNode, "norms"); @@ -227,7 +240,7 @@ public class TypeParsers { builder.indexOptions(nodeIndexOptionValue(propNode)); iterator.remove(); } else if (propName.equals("include_in_all")) { - builder.includeInAll(nodeBooleanValue(propNode, parserContext)); + builder.includeInAll(nodeBooleanValue("include_in_all", propNode, parserContext)); iterator.remove(); } else if (propName.equals("similarity")) { SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString()); @@ -353,35 +366,32 @@ public class TypeParsers { } public static boolean parseIndex(String fieldName, String index, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0)) { - switch (index) { - case "true": - return true; - case "false": - return false; - default: + switch (index) { + case "true": + return true; + case "false": + return false; + case "not_analyzed": + case "analyzed": + case "no": + if (parserContext.parseFieldMatcher().isStrict() == false) { + DEPRECATION_LOGGER.deprecated("Expected a boolean for property [index] but got [{}]", index); + return "no".equals(index) == false; + } else { throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true] or [false]"); } - } else { - final String normalizedIndex = Strings.toUnderscoreCase(index); - switch (normalizedIndex) { - case "true": - case "not_analyzed": - case "analyzed": - return true; - case "false": - case "no": - return false; - default: - throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true], [false], [no], [not_analyzed] or [analyzed]"); - } + default: + throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true] or [false]"); } } public static boolean parseStore(String fieldName, String store, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0)) { + if (parserContext.parseFieldMatcher().isStrict()) { return XContentMapValues.nodeBooleanValue(store); } else { + if (BOOLEAN_STRINGS.contains(store) == false) { + DEPRECATION_LOGGER.deprecated("Expected a boolean for property [store] but got [{}]", store); + } if ("no".equals(store)) { return false; } else if ("yes".equals(store)) { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/StringMappingUpgradeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/StringMappingUpgradeTests.java new file mode 100644 index 000000000000..4b2fe9a71020 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/StringMappingUpgradeTests.java @@ -0,0 +1,177 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.core; + +import com.carrotsearch.randomizedtesting.generators.RandomPicks; + +import org.apache.lucene.index.IndexOptions; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; + +public class StringMappingUpgradeTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + + public void testUpgradeDefaults() throws IOException { + IndexService indexService = createIndex("test"); + DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "string").endObject().endObject() + .endObject().endObject().string(); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + FieldMapper field = mapper.mappers().getMapper("field"); + assertThat(field, instanceOf(TextFieldMapper.class)); + } + + public void testUpgradeAnalyzedString() throws IOException { + IndexService indexService = createIndex("test"); + DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "string").field("index", "analyzed").endObject().endObject() + .endObject().endObject().string(); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + FieldMapper field = mapper.mappers().getMapper("field"); + assertThat(field, instanceOf(TextFieldMapper.class)); + } + + public void testUpgradeNotAnalyzedString() throws IOException { + IndexService indexService = createIndex("test"); + DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "string") + .field("index", "not_analyzed").endObject().endObject() + .endObject().endObject().string(); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + FieldMapper field = mapper.mappers().getMapper("field"); + assertThat(field, instanceOf(KeywordFieldMapper.class)); + } + + public void testUpgradeNotIndexedString() throws IOException { + IndexService indexService = createIndex("test"); + DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "string").field("index", "no").endObject().endObject() + .endObject().endObject().string(); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + FieldMapper field = mapper.mappers().getMapper("field"); + assertThat(field, instanceOf(KeywordFieldMapper.class)); + assertEquals(IndexOptions.NONE, field.fieldType().indexOptions()); + } + + public void testNotSupportedUpgrade() throws IOException { + IndexService indexService = createIndex("test"); + DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "string").field("analyzer", "keyword").endObject().endObject() + .endObject().endObject().string(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(mapping))); + assertThat(e.getMessage(), containsString("The [string] type is removed in 5.0")); + } + + public void testUpgradeRandomMapping() throws IOException { + final int iters = 20; + for (int i = 0; i < iters; ++i) { + doTestUpgradeRandomMapping(i); + } + } + + private void doTestUpgradeRandomMapping(int iter) throws IOException { + IndexService indexService; + boolean oldIndex = randomBoolean(); + String indexName = "test" + iter; + if (oldIndex) { + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0) + .build(); + indexService = createIndex(indexName, settings); + } else { + indexService = createIndex(indexName); + } + DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "string"); + boolean keyword = randomBoolean(); + boolean shouldUpgrade = true; + if (keyword) { + mapping.field("index", randomBoolean() ? "not_analyzed" : "no"); + } else if (randomBoolean()) { + mapping.field("index", "analyzed"); + } + if (randomBoolean()) { + mapping.field("store", RandomPicks.randomFrom(random(), Arrays.asList("yes", "no", true, false))); + } + if (keyword && randomBoolean()) { + mapping.field("doc_values", randomBoolean()); + } + if (randomBoolean()) { + mapping.field("omit_norms", randomBoolean()); + } + if (randomBoolean()) { + mapping.startObject("fields").startObject("raw").field("type", "keyword").endObject().endObject(); + } + if (randomBoolean()) { + mapping.field("copy_to", "bar"); + } + if (randomBoolean()) { + // this option is not upgraded automatically + mapping.field("index_options", "docs"); + shouldUpgrade = false; + } + mapping.endObject().endObject().endObject().endObject(); + + if (oldIndex == false && shouldUpgrade == false) { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(mapping.string()))); + assertThat(e.getMessage(), containsString("The [string] type is removed in 5.0")); + } else { + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); + FieldMapper field = mapper.mappers().getMapper("field"); + if (oldIndex) { + assertThat(field, instanceOf(StringFieldMapper.class)); + } else if (keyword) { + assertThat(field, instanceOf(KeywordFieldMapper.class)); + } else { + assertThat(field, instanceOf(TextFieldMapper.class)); + } + } + } +} From 94aa025b93893eec68cff962d002c23b13751c63 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Fri, 11 Mar 2016 08:46:10 +0100 Subject: [PATCH 177/320] Document breaking change in ClusterHealthResponse in 2.2 --- docs/reference/migration/migrate_2_2.asciidoc | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/docs/reference/migration/migrate_2_2.asciidoc b/docs/reference/migration/migrate_2_2.asciidoc index 8772c1017b06..d6035c83b8ae 100644 --- a/docs/reference/migration/migrate_2_2.asciidoc +++ b/docs/reference/migration/migrate_2_2.asciidoc @@ -43,6 +43,21 @@ changed to now route standard output to the journal and standard error to inherit this setting (these are the defaults for systemd). These settings can be modified by editing the `elasticsearch.service` file. +[float] +=== Java Client + +Previously it was possible to iterate over `ClusterHealthResponse` to get information about `ClusterIndexHealth`. +While this is still possible, it requires now iterating over the values returned from `getIndices()`: + +[source,java] +--------------- +ClusterHealthResponse clusterHealthResponse = client.admin().cluster().prepareHealth().get(); +for (Map.Entry index : clusterHealthResponse.getIndices().entrySet()) { + String indexName = index.getKey(); + ClusterIndexHealth health = index.getValue(); +} +--------------- + [float] === Cloud AWS Plugin From a46d2f21c6504c8ff8cb98a2180293994b857a03 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 11 Mar 2016 09:55:21 +0100 Subject: [PATCH 178/320] Fix dynamic mapper bug with deeply nested fields. --- .../java/org/elasticsearch/index/mapper/DocumentParser.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 8c8ded9b543b..cf0c0fbba33b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -316,7 +316,8 @@ final class DocumentParser implements Closeable { while (i < nameParts.length - 1 && last.getMapper(nameParts[i]) != null) { Mapper newLast = last.getMapper(nameParts[i]); assert newLast instanceof ObjectMapper; - parentMappers.add((ObjectMapper)newLast); + last = (ObjectMapper) newLast; + parentMappers.add(last); ++i; } return i; From b17f4b40bae92fffd515d3835cbe12a0dde0eabf Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Wed, 17 Feb 2016 00:32:21 +0100 Subject: [PATCH 179/320] Check that number of placeholders in log message matches number of parameters --- build.gradle | 1 + .../gradle/precommit/LoggerUsageTask.groovy | 98 ++++ .../gradle/precommit/PrecommitTasks.groovy | 22 +- .../resources/forbidden/es-all-signatures.txt | 30 ++ ...-signatures.txt => es-core-signatures.txt} | 0 ...-signatures.txt => es-test-signatures.txt} | 0 ...{all-signatures.txt => jdk-signatures.txt} | 14 - .../common/SuppressLoggerChecks.java | 33 ++ settings.gradle | 1 + test/build.gradle | 5 +- test/logger-usage/build.gradle | 33 ++ .../loggerusage/ESLoggerUsageChecker.java | 460 ++++++++++++++++++ .../test/loggerusage/ESLoggerUsageTests.java | 165 +++++++ 13 files changed, 843 insertions(+), 19 deletions(-) create mode 100644 buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LoggerUsageTask.groovy create mode 100644 buildSrc/src/main/resources/forbidden/es-all-signatures.txt rename buildSrc/src/main/resources/forbidden/{core-signatures.txt => es-core-signatures.txt} (100%) rename buildSrc/src/main/resources/forbidden/{test-signatures.txt => es-test-signatures.txt} (100%) rename buildSrc/src/main/resources/forbidden/{all-signatures.txt => jdk-signatures.txt} (85%) create mode 100644 core/src/main/java/org/elasticsearch/common/SuppressLoggerChecks.java create mode 100644 test/logger-usage/build.gradle create mode 100644 test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/ESLoggerUsageChecker.java create mode 100644 test/logger-usage/src/test/java/org/elasticsearch/test/loggerusage/ESLoggerUsageTests.java diff --git a/build.gradle b/build.gradle index b419bf01e152..6ab00d73881e 100644 --- a/build.gradle +++ b/build.gradle @@ -116,6 +116,7 @@ subprojects { "org.elasticsearch.distribution.tar:elasticsearch:${version}": ':distribution:tar', "org.elasticsearch.distribution.rpm:elasticsearch:${version}": ':distribution:rpm', "org.elasticsearch.distribution.deb:elasticsearch:${version}": ':distribution:deb', + "org.elasticsearch.test:logger-usage:${version}": ':test:logger-usage', ] configurations.all { resolutionStrategy.dependencySubstitution { DependencySubstitutions subs -> diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LoggerUsageTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LoggerUsageTask.groovy new file mode 100644 index 000000000000..b280a74db58f --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LoggerUsageTask.groovy @@ -0,0 +1,98 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.precommit + +import org.elasticsearch.gradle.LoggedExec +import org.gradle.api.file.FileCollection +import org.gradle.api.tasks.InputFiles +import org.gradle.api.tasks.OutputFile + +/** + * Runs LoggerUsageCheck on a set of directories. + */ +public class LoggerUsageTask extends LoggedExec { + + /** + * We use a simple "marker" file that we touch when the task succeeds + * as the task output. This is compared against the modified time of the + * inputs (ie the jars/class files). + */ + private File successMarker = new File(project.buildDir, 'markers/loggerUsage') + + private FileCollection classpath; + + private List classDirectories; + + public LoggerUsageTask() { + project.afterEvaluate { + dependsOn(classpath) + description = "Runs LoggerUsageCheck on ${classDirectories}" + executable = new File(project.javaHome, 'bin/java') + if (classDirectories == null) { + classDirectories = [] + if (project.sourceSets.findByName("main") && project.sourceSets.main.output.classesDir.exists()) { + classDirectories += [project.sourceSets.main.output.classesDir] + dependsOn project.tasks.classes + } + if (project.sourceSets.findByName("test") && project.sourceSets.test.output.classesDir.exists()) { + classDirectories += [project.sourceSets.test.output.classesDir] + dependsOn project.tasks.testClasses + } + } + doFirst({ + args('-cp', getClasspath().asPath, 'org.elasticsearch.test.loggerusage.ESLoggerUsageChecker') + getClassDirectories().each { + args it.getAbsolutePath() + } + }) + doLast({ + successMarker.parentFile.mkdirs() + successMarker.setText("", 'UTF-8') + }) + } + } + + @InputFiles + FileCollection getClasspath() { + return classpath + } + + void setClasspath(FileCollection classpath) { + this.classpath = classpath + } + + @InputFiles + List getClassDirectories() { + return classDirectories + } + + void setClassDirectories(List classDirectories) { + this.classDirectories = classDirectories + } + + @OutputFile + File getSuccessMarker() { + return successMarker + } + + void setSuccessMarker(File successMarker) { + this.successMarker = successMarker + } +} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index ab5243512745..cbd72f2c7da6 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -34,6 +34,7 @@ class PrecommitTasks { configureForbiddenApis(project), configureCheckstyle(project), configureNamingConventions(project), + configureLoggerUsage(project), project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class), project.tasks.create('licenseHeaders', LicenseHeadersTask.class), project.tasks.create('jarHell', JarHellTask.class), @@ -64,20 +65,21 @@ class PrecommitTasks { internalRuntimeForbidden = true failOnUnsupportedJava = false bundledSignatures = ['jdk-unsafe', 'jdk-deprecated'] - signaturesURLs = [getClass().getResource('/forbidden/all-signatures.txt')] + signaturesURLs = [getClass().getResource('/forbidden/jdk-signatures.txt'), + getClass().getResource('/forbidden/es-all-signatures.txt')] suppressAnnotations = ['**.SuppressForbidden'] } Task mainForbidden = project.tasks.findByName('forbiddenApisMain') if (mainForbidden != null) { mainForbidden.configure { bundledSignatures += 'jdk-system-out' - signaturesURLs += getClass().getResource('/forbidden/core-signatures.txt') + signaturesURLs += getClass().getResource('/forbidden/es-core-signatures.txt') } } Task testForbidden = project.tasks.findByName('forbiddenApisTest') if (testForbidden != null) { testForbidden.configure { - signaturesURLs += getClass().getResource('/forbidden/test-signatures.txt') + signaturesURLs += getClass().getResource('/forbidden/es-test-signatures.txt') } } Task forbiddenApis = project.tasks.findByName('forbiddenApis') @@ -117,4 +119,18 @@ class PrecommitTasks { } return null } + + private static Task configureLoggerUsage(Project project) { + Task loggerUsageTask = project.tasks.create('loggerUsageCheck', LoggerUsageTask.class) + + project.configurations.create('loggerUsagePlugin') + project.dependencies.add('loggerUsagePlugin', + "org.elasticsearch.test:logger-usage:${org.elasticsearch.gradle.VersionProperties.elasticsearch}") + + loggerUsageTask.configure { + classpath = project.configurations.loggerUsagePlugin + } + + return loggerUsageTask + } } diff --git a/buildSrc/src/main/resources/forbidden/es-all-signatures.txt b/buildSrc/src/main/resources/forbidden/es-all-signatures.txt new file mode 100644 index 000000000000..d258c0989117 --- /dev/null +++ b/buildSrc/src/main/resources/forbidden/es-all-signatures.txt @@ -0,0 +1,30 @@ +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on +# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +# either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +java.nio.file.Paths @ Use org.elasticsearch.common.io.PathUtils.get() instead. +java.nio.file.FileSystems#getDefault() @ use org.elasticsearch.common.io.PathUtils.getDefaultFileSystem() instead. + +java.nio.file.Files#getFileStore(java.nio.file.Path) @ Use org.elasticsearch.env.Environment.getFileStore() instead, impacted by JDK-8034057 +java.nio.file.Files#isWritable(java.nio.file.Path) @ Use org.elasticsearch.env.Environment.isWritable() instead, impacted by JDK-8034057 + +@defaultMessage Use org.elasticsearch.common.Randomness#get for reproducible sources of randomness +java.util.Random#() +java.util.concurrent.ThreadLocalRandom + +java.security.MessageDigest#clone() @ use org.elasticsearch.common.hash.MessageDigests + +@defaultMessage this should not have been added to lucene in the first place +org.apache.lucene.index.IndexReader#getCombinedCoreAndDeletesKey() diff --git a/buildSrc/src/main/resources/forbidden/core-signatures.txt b/buildSrc/src/main/resources/forbidden/es-core-signatures.txt similarity index 100% rename from buildSrc/src/main/resources/forbidden/core-signatures.txt rename to buildSrc/src/main/resources/forbidden/es-core-signatures.txt diff --git a/buildSrc/src/main/resources/forbidden/test-signatures.txt b/buildSrc/src/main/resources/forbidden/es-test-signatures.txt similarity index 100% rename from buildSrc/src/main/resources/forbidden/test-signatures.txt rename to buildSrc/src/main/resources/forbidden/es-test-signatures.txt diff --git a/buildSrc/src/main/resources/forbidden/all-signatures.txt b/buildSrc/src/main/resources/forbidden/jdk-signatures.txt similarity index 85% rename from buildSrc/src/main/resources/forbidden/all-signatures.txt rename to buildSrc/src/main/resources/forbidden/jdk-signatures.txt index 9bc370055145..994b1ad3a4a0 100644 --- a/buildSrc/src/main/resources/forbidden/all-signatures.txt +++ b/buildSrc/src/main/resources/forbidden/jdk-signatures.txt @@ -33,9 +33,6 @@ java.util.Formatter#(java.lang.String,java.lang.String,java.util.Locale) java.io.RandomAccessFile java.nio.file.Path#toFile() -java.nio.file.Paths @ Use org.elasticsearch.common.io.PathUtils.get() instead. -java.nio.file.FileSystems#getDefault() @ use org.elasticsearch.common.io.PathUtils.getDefaultFileSystem() instead. - @defaultMessage Specify a location for the temp file/directory instead. java.nio.file.Files#createTempDirectory(java.lang.String,java.nio.file.attribute.FileAttribute[]) java.nio.file.Files#createTempFile(java.lang.String,java.lang.String,java.nio.file.attribute.FileAttribute[]) @@ -48,9 +45,6 @@ java.io.ObjectInput java.nio.file.Files#isHidden(java.nio.file.Path) @ Dependent on the operating system, use FileSystemUtils.isHidden instead -java.nio.file.Files#getFileStore(java.nio.file.Path) @ Use org.elasticsearch.env.Environment.getFileStore() instead, impacted by JDK-8034057 -java.nio.file.Files#isWritable(java.nio.file.Path) @ Use org.elasticsearch.env.Environment.isWritable() instead, impacted by JDK-8034057 - @defaultMessage Resolve hosts explicitly to the address(es) you want with InetAddress. java.net.InetSocketAddress#(java.lang.String,int) java.net.Socket#(java.lang.String,int) @@ -89,9 +83,6 @@ java.lang.Class#getDeclaredMethods() @ Do not violate java's access system: Use java.lang.reflect.AccessibleObject#setAccessible(boolean) java.lang.reflect.AccessibleObject#setAccessible(java.lang.reflect.AccessibleObject[], boolean) -@defaultMessage this should not have been added to lucene in the first place -org.apache.lucene.index.IndexReader#getCombinedCoreAndDeletesKey() - @defaultMessage this method needs special permission java.lang.Thread#getAllStackTraces() @@ -112,8 +103,3 @@ java.util.Collections#EMPTY_MAP java.util.Collections#EMPTY_SET java.util.Collections#shuffle(java.util.List) @ Use java.util.Collections#shuffle(java.util.List, java.util.Random) with a reproducible source of randomness -@defaultMessage Use org.elasticsearch.common.Randomness#get for reproducible sources of randomness -java.util.Random#() -java.util.concurrent.ThreadLocalRandom - -java.security.MessageDigest#clone() @ use org.elasticsearch.common.hash.MessageDigests diff --git a/core/src/main/java/org/elasticsearch/common/SuppressLoggerChecks.java b/core/src/main/java/org/elasticsearch/common/SuppressLoggerChecks.java new file mode 100644 index 000000000000..c6f23f72f96a --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/SuppressLoggerChecks.java @@ -0,0 +1,33 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common; + + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +/** + * Annotation to suppress logging usage checks errors inside a whole class or a method. + */ +@Retention(RetentionPolicy.CLASS) +@Target({ ElementType.CONSTRUCTOR, ElementType.METHOD, ElementType.TYPE }) +public @interface SuppressLoggerChecks { + String reason(); +} diff --git a/settings.gradle b/settings.gradle index f2518e69b12f..b1bb374fff1d 100644 --- a/settings.gradle +++ b/settings.gradle @@ -11,6 +11,7 @@ List projects = [ 'test:framework', 'test:fixtures:example-fixture', 'test:fixtures:hdfs-fixture', + 'test:logger-usage', 'modules:ingest-grok', 'modules:lang-expression', 'modules:lang-groovy', diff --git a/test/build.gradle b/test/build.gradle index 7e1b5725147b..fcf4f5bb7617 100644 --- a/test/build.gradle +++ b/test/build.gradle @@ -30,8 +30,9 @@ subprojects { // the main files are actually test files, so use the appropriate forbidden api sigs forbiddenApisMain { bundledSignatures = ['jdk-unsafe', 'jdk-deprecated'] - signaturesURLs = [PrecommitTasks.getResource('/forbidden/all-signatures.txt'), - PrecommitTasks.getResource('/forbidden/test-signatures.txt')] + signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'), + PrecommitTasks.getResource('/forbidden/es-signatures.txt'), + PrecommitTasks.getResource('/forbidden/es-test-signatures.txt')] } // TODO: should we have licenses for our test deps? diff --git a/test/logger-usage/build.gradle b/test/logger-usage/build.gradle new file mode 100644 index 000000000000..1a5815cf76e5 --- /dev/null +++ b/test/logger-usage/build.gradle @@ -0,0 +1,33 @@ +import org.elasticsearch.gradle.precommit.PrecommitTasks + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +dependencies { + compile 'org.ow2.asm:asm-debug-all:5.0.4' // use asm-debug-all as asm-all is broken + testCompile "org.elasticsearch.test:framework:${version}" +} + +loggerUsageCheck.enabled = false + +forbiddenApisMain.enabled = true // disabled by parent project +forbiddenApisMain { + signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] // does not depend on core, only jdk signatures +} +jarHell.enabled = true // disabled by parent project \ No newline at end of file diff --git a/test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/ESLoggerUsageChecker.java b/test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/ESLoggerUsageChecker.java new file mode 100644 index 000000000000..57ec37cb695d --- /dev/null +++ b/test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/ESLoggerUsageChecker.java @@ -0,0 +1,460 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test.loggerusage; + +import org.objectweb.asm.AnnotationVisitor; +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Opcodes; +import org.objectweb.asm.Type; +import org.objectweb.asm.tree.AbstractInsnNode; +import org.objectweb.asm.tree.IntInsnNode; +import org.objectweb.asm.tree.LdcInsnNode; +import org.objectweb.asm.tree.LineNumberNode; +import org.objectweb.asm.tree.MethodInsnNode; +import org.objectweb.asm.tree.MethodNode; +import org.objectweb.asm.tree.TypeInsnNode; +import org.objectweb.asm.tree.analysis.Analyzer; +import org.objectweb.asm.tree.analysis.AnalyzerException; +import org.objectweb.asm.tree.analysis.BasicInterpreter; +import org.objectweb.asm.tree.analysis.BasicValue; +import org.objectweb.asm.tree.analysis.Frame; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; +import java.util.Arrays; +import java.util.List; +import java.util.function.Consumer; +import java.util.function.Predicate; + +public class ESLoggerUsageChecker { + public static final String LOGGER_CLASS = "org.elasticsearch.common.logging.ESLogger"; + public static final String THROWABLE_CLASS = "java.lang.Throwable"; + public static final List LOGGER_METHODS = Arrays.asList("trace", "debug", "info", "warn", "error"); + public static final String IGNORE_CHECKS_ANNOTATION = "org.elasticsearch.common.SuppressLoggerChecks"; + + public static void main(String... args) throws Exception { + System.out.println("checking for wrong usages of ESLogger..."); + boolean[] wrongUsageFound = new boolean[1]; + checkLoggerUsage(wrongLoggerUsage -> { + System.err.println(wrongLoggerUsage.getErrorLines()); + wrongUsageFound[0] = true; + }, args); + if (wrongUsageFound[0]) { + throw new Exception("Wrong logger usages found"); + } else { + System.out.println("No wrong usages found"); + } + } + + private static void checkLoggerUsage(Consumer wrongUsageCallback, String... classDirectories) + throws IOException { + for (String classDirectory : classDirectories) { + Path root = Paths.get(classDirectory); + if (Files.isDirectory(root) == false) { + throw new IllegalArgumentException(root + " should be an existing directory"); + } + Files.walkFileTree(root, new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + if (Files.isRegularFile(file) && file.endsWith(".class")) { + try (InputStream in = Files.newInputStream(file)) { + ESLoggerUsageChecker.check(wrongUsageCallback, in); + } + } + return super.visitFile(file, attrs); + } + }); + } + } + + public static void check(Consumer wrongUsageCallback, InputStream inputStream) throws IOException { + check(wrongUsageCallback, inputStream, s -> true); + } + + // used by tests + static void check(Consumer wrongUsageCallback, InputStream inputStream, Predicate methodsToCheck) + throws IOException { + ClassReader cr = new ClassReader(inputStream); + cr.accept(new ClassChecker(wrongUsageCallback, methodsToCheck), 0); + } + + public static class WrongLoggerUsage { + private final String className; + private final String methodName; + private final String logMethodName; + private final int line; + private final String errorMessage; + + public WrongLoggerUsage(String className, String methodName, String logMethodName, int line, String errorMessage) { + this.className = className; + this.methodName = methodName; + this.logMethodName = logMethodName; + this.line = line; + this.errorMessage = errorMessage; + } + + @Override + public String toString() { + return "WrongLoggerUsage{" + + "className='" + className + '\'' + + ", methodName='" + methodName + '\'' + + ", logMethodName='" + logMethodName + '\'' + + ", line=" + line + + ", errorMessage='" + errorMessage + '\'' + + '}'; + } + + /** + * Returns an error message that has the form of stack traces emitted by {@link Throwable#printStackTrace} + */ + public String getErrorLines() { + String fullClassName = Type.getObjectType(className).getClassName(); + String simpleClassName = fullClassName.substring(fullClassName.lastIndexOf(".") + 1, fullClassName.length()); + int innerClassIndex = simpleClassName.indexOf("$"); + if (innerClassIndex > 0) { + simpleClassName = simpleClassName.substring(0, innerClassIndex); + } + simpleClassName = simpleClassName + ".java"; + StringBuilder sb = new StringBuilder(); + sb.append("Bad usage of "); + sb.append(LOGGER_CLASS).append("#").append(logMethodName); + sb.append(": "); + sb.append(errorMessage); + sb.append("\n\tat "); + sb.append(fullClassName); + sb.append("."); + sb.append(methodName); + sb.append("("); + sb.append(simpleClassName); + sb.append(":"); + sb.append(line); + sb.append(")"); + return sb.toString(); + } + } + + private static class ClassChecker extends ClassVisitor { + private String className; + private boolean ignoreChecks; + private final Consumer wrongUsageCallback; + private final Predicate methodsToCheck; + + public ClassChecker(Consumer wrongUsageCallback, Predicate methodsToCheck) { + super(Opcodes.ASM5); + this.wrongUsageCallback = wrongUsageCallback; + this.methodsToCheck = methodsToCheck; + } + + @Override + public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) { + this.className = name; + } + + @Override + public AnnotationVisitor visitAnnotation(String desc, boolean visible) { + if (IGNORE_CHECKS_ANNOTATION.equals(Type.getType(desc).getClassName())) { + ignoreChecks = true; + } + return super.visitAnnotation(desc, visible); + } + + @Override + public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) { + if (ignoreChecks == false && methodsToCheck.test(name)) { + return new MethodChecker(this.className, access, name, desc, wrongUsageCallback); + } else { + return super.visitMethod(access, name, desc, signature, exceptions); + } + } + } + + private static class MethodChecker extends MethodVisitor { + private final String className; + private final Consumer wrongUsageCallback; + private boolean ignoreChecks; + + public MethodChecker(String className, int access, String name, String desc, Consumer wrongUsageCallback) { + super(Opcodes.ASM5, new MethodNode(access, name, desc, null, null)); + this.className = className; + this.wrongUsageCallback = wrongUsageCallback; + } + + @Override + public AnnotationVisitor visitAnnotation(String desc, boolean visible) { + if (IGNORE_CHECKS_ANNOTATION.equals(Type.getType(desc).getClassName())) { + ignoreChecks = true; + } + return super.visitAnnotation(desc, visible); + } + + @Override + public void visitEnd() { + if (ignoreChecks == false) { + findBadLoggerUsages((MethodNode) mv); + } + super.visitEnd(); + } + + public void findBadLoggerUsages(MethodNode methodNode) { + Analyzer stringPlaceHolderAnalyzer = new Analyzer<>(new PlaceHolderStringInterpreter()); + Analyzer arraySizeAnalyzer = new Analyzer<>(new ArraySizeInterpreter()); + try { + stringPlaceHolderAnalyzer.analyze(className, methodNode); + arraySizeAnalyzer.analyze(className, methodNode); + } catch (AnalyzerException e) { + throw new RuntimeException("Internal error: failed in analysis step", e); + } + Frame[] stringFrames = stringPlaceHolderAnalyzer.getFrames(); + Frame[] arraySizeFrames = arraySizeAnalyzer.getFrames(); + AbstractInsnNode[] insns = methodNode.instructions.toArray(); + int lineNumber = -1; + for (int i = 0; i < insns.length; i++) { + AbstractInsnNode insn = insns[i]; + if (insn instanceof LineNumberNode) { + LineNumberNode lineNumberNode = (LineNumberNode) insn; + lineNumber = lineNumberNode.line; + } + if (insn.getOpcode() == Opcodes.INVOKEVIRTUAL) { + MethodInsnNode methodInsn = (MethodInsnNode) insn; + if (Type.getObjectType(methodInsn.owner).getClassName().equals(LOGGER_CLASS) == false) { + continue; + } + if (LOGGER_METHODS.contains(methodInsn.name) == false) { + continue; + } + Type[] argumentTypes = Type.getArgumentTypes(methodInsn.desc); + BasicValue logMessageLengthObject = getStackValue(stringFrames[i], argumentTypes.length - 1); // first argument + if (logMessageLengthObject instanceof PlaceHolderStringBasicValue == false) { + wrongUsageCallback.accept(new WrongLoggerUsage(className, methodNode.name, methodInsn.name, lineNumber, + "First argument must be a string constant so that we can statically ensure proper place holder usage")); + continue; + } + PlaceHolderStringBasicValue logMessageLength = (PlaceHolderStringBasicValue) logMessageLengthObject; + if (logMessageLength.minValue != logMessageLength.maxValue) { + wrongUsageCallback.accept(new WrongLoggerUsage(className, methodNode.name, methodInsn.name, lineNumber, + "Multiple log messages with conflicting number of place holders")); + continue; + } + BasicValue varArgsSizeObject = getStackValue(arraySizeFrames[i], 0); // last argument + if (varArgsSizeObject instanceof ArraySizeBasicValue == false) { + wrongUsageCallback.accept(new WrongLoggerUsage(className, methodNode.name, methodInsn.name, lineNumber, + "Could not determine size of varargs array")); + continue; + } + ArraySizeBasicValue varArgsSize = (ArraySizeBasicValue) varArgsSizeObject; + if (varArgsSize.minValue != varArgsSize.maxValue) { + wrongUsageCallback.accept(new WrongLoggerUsage(className, methodNode.name, methodInsn.name, lineNumber, + "Multiple parameter arrays with conflicting sizes")); + continue; + } + assert logMessageLength.minValue == logMessageLength.maxValue && varArgsSize.minValue == varArgsSize.maxValue; + if (logMessageLength.minValue != varArgsSize.minValue) { + wrongUsageCallback.accept(new WrongLoggerUsage(className, methodNode.name, methodInsn.name, lineNumber, + "Expected " + logMessageLength.minValue + " arguments but got " + varArgsSize.minValue)); + continue; + } + } + } + } + } + + private static int calculateNumberOfPlaceHolders(String message) { + int count = 0; + for (int i = 1; i < message.length(); i++) { + if (message.charAt(i - 1) == '{' && message.charAt(i) == '}') { + count++; + i += 1; + } + } + return count; + } + + private static BasicValue getStackValue(Frame f, int index) { + int top = f.getStackSize() - 1; + return index <= top ? f.getStack(top - index) : null; + } + + private static class IntMinMaxTrackingBasicValue extends BasicValue { + protected final int minValue; + protected final int maxValue; + + public IntMinMaxTrackingBasicValue(Type type, int value) { + super(type); + this.minValue = value; + this.maxValue = value; + } + + public IntMinMaxTrackingBasicValue(Type type, int minValue, int maxValue) { + super(type); + this.minValue = minValue; + this.maxValue = maxValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + + IntMinMaxTrackingBasicValue that = (IntMinMaxTrackingBasicValue) o; + + if (minValue != that.minValue) return false; + return maxValue == that.maxValue; + + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + minValue; + result = 31 * result + maxValue; + return result; + } + + @Override + public String toString() { + return "IntMinMaxTrackingBasicValue{" + + "minValue=" + minValue + + ", maxValue=" + maxValue + + '}'; + } + } + + private static final class PlaceHolderStringBasicValue extends IntMinMaxTrackingBasicValue { + public static final Type STRING_OBJECT_TYPE = Type.getObjectType("java/lang/String"); + + public PlaceHolderStringBasicValue(int placeHolders) { + super(STRING_OBJECT_TYPE, placeHolders); + } + + public PlaceHolderStringBasicValue(int minPlaceHolders, int maxPlaceHolders) { + super(STRING_OBJECT_TYPE, minPlaceHolders, maxPlaceHolders); + } + } + + private static final class ArraySizeBasicValue extends IntMinMaxTrackingBasicValue { + public ArraySizeBasicValue(Type type, int minArraySize, int maxArraySize) { + super(type, minArraySize, maxArraySize); + } + } + + private static final class IntegerConstantBasicValue extends IntMinMaxTrackingBasicValue { + public IntegerConstantBasicValue(Type type, int constant) { + super(type, constant); + } + + public IntegerConstantBasicValue(Type type, int minConstant, int maxConstant) { + super(type, minConstant, maxConstant); + } + } + + private static final class PlaceHolderStringInterpreter extends BasicInterpreter { + @Override + public BasicValue newOperation(AbstractInsnNode insnNode) throws AnalyzerException { + if (insnNode.getOpcode() == Opcodes.LDC) { + Object constant = ((LdcInsnNode) insnNode).cst; + if (constant instanceof String) { + return new PlaceHolderStringBasicValue(calculateNumberOfPlaceHolders((String) constant)); + } + } + return super.newOperation(insnNode); + } + + @Override + public BasicValue merge(BasicValue value1, BasicValue value2) { + if (value1 instanceof PlaceHolderStringBasicValue && value2 instanceof PlaceHolderStringBasicValue + && value1.equals(value2) == false) { + PlaceHolderStringBasicValue c1 = (PlaceHolderStringBasicValue) value1; + PlaceHolderStringBasicValue c2 = (PlaceHolderStringBasicValue) value2; + return new PlaceHolderStringBasicValue(Math.min(c1.minValue, c2.minValue), Math.max(c1.maxValue, c2.maxValue)); + } + return super.merge(value1, value2); + } + } + + private static final class ArraySizeInterpreter extends BasicInterpreter { + @Override + public BasicValue newOperation(AbstractInsnNode insnNode) throws AnalyzerException { + switch (insnNode.getOpcode()) { + case ICONST_0: return new IntegerConstantBasicValue(Type.INT_TYPE, 0); + case ICONST_1: return new IntegerConstantBasicValue(Type.INT_TYPE, 1); + case ICONST_2: return new IntegerConstantBasicValue(Type.INT_TYPE, 2); + case ICONST_3: return new IntegerConstantBasicValue(Type.INT_TYPE, 3); + case ICONST_4: return new IntegerConstantBasicValue(Type.INT_TYPE, 4); + case ICONST_5: return new IntegerConstantBasicValue(Type.INT_TYPE, 5); + case BIPUSH: + case SIPUSH: return new IntegerConstantBasicValue(Type.INT_TYPE, ((IntInsnNode)insnNode).operand); + case Opcodes.LDC: { + Object constant = ((LdcInsnNode)insnNode).cst; + if (constant instanceof Integer) { + return new IntegerConstantBasicValue(Type.INT_TYPE, (Integer)constant); + } else { + return super.newOperation(insnNode); + } + } + default: return super.newOperation(insnNode); + } + } + + @Override + public BasicValue merge(BasicValue value1, BasicValue value2) { + if (value1 instanceof IntegerConstantBasicValue && value2 instanceof IntegerConstantBasicValue) { + IntegerConstantBasicValue c1 = (IntegerConstantBasicValue) value1; + IntegerConstantBasicValue c2 = (IntegerConstantBasicValue) value2; + return new IntegerConstantBasicValue(Type.INT_TYPE, Math.min(c1.minValue, c2.minValue), Math.max(c1.maxValue, c2.maxValue)); + } else if (value1 instanceof ArraySizeBasicValue && value2 instanceof ArraySizeBasicValue) { + ArraySizeBasicValue c1 = (ArraySizeBasicValue) value1; + ArraySizeBasicValue c2 = (ArraySizeBasicValue) value2; + return new ArraySizeBasicValue(Type.INT_TYPE, Math.min(c1.minValue, c2.minValue), Math.max(c1.maxValue, c2.maxValue)); + } + return super.merge(value1, value2); + } + + @Override + public BasicValue unaryOperation(AbstractInsnNode insnNode, BasicValue value) throws AnalyzerException { + if (insnNode.getOpcode() == Opcodes.ANEWARRAY && value instanceof IntegerConstantBasicValue) { + IntegerConstantBasicValue constantBasicValue = (IntegerConstantBasicValue) value; + String desc = ((TypeInsnNode) insnNode).desc; + return new ArraySizeBasicValue(Type.getType("[" + Type.getObjectType(desc)), constantBasicValue.minValue, + constantBasicValue.maxValue); + } + return super.unaryOperation(insnNode, value); + } + + @Override + public BasicValue ternaryOperation(AbstractInsnNode insnNode, BasicValue value1, BasicValue value2, BasicValue value3) + throws AnalyzerException { + if (insnNode.getOpcode() == Opcodes.AASTORE && value1 instanceof ArraySizeBasicValue) { + return value1; + } + return super.ternaryOperation(insnNode, value1, value2, value3); + } + } +} diff --git a/test/logger-usage/src/test/java/org/elasticsearch/test/loggerusage/ESLoggerUsageTests.java b/test/logger-usage/src/test/java/org/elasticsearch/test/loggerusage/ESLoggerUsageTests.java new file mode 100644 index 000000000000..ab07ecbf45ee --- /dev/null +++ b/test/logger-usage/src/test/java/org/elasticsearch/test/loggerusage/ESLoggerUsageTests.java @@ -0,0 +1,165 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test.loggerusage; + +import org.elasticsearch.common.SuppressLoggerChecks; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.test.loggerusage.ESLoggerUsageChecker.WrongLoggerUsage; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.io.InputStream; +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Predicate; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.notNullValue; + +public class ESLoggerUsageTests extends ESTestCase { + + public void testLoggerUsageChecks() throws IOException { + for (Method method : getClass().getMethods()) { + if (method.getDeclaringClass().equals(getClass())) { + if (method.getName().startsWith("check")) { + logger.info("Checking logger usage for method {}", method.getName()); + InputStream classInputStream = getClass().getResourceAsStream(getClass().getSimpleName() + ".class"); + List errors = new ArrayList<>(); + ESLoggerUsageChecker.check(errors::add, classInputStream, Predicate.isEqual(method.getName())); + if (method.getName().startsWith("checkFail")) { + assertFalse("Expected " + method.getName() + " to have wrong ESLogger usage", errors.isEmpty()); + } else { + assertTrue("Method " + method.getName() + " has unexpected ESLogger usage errors: " + errors, errors.isEmpty()); + } + } else { + assertTrue("only allow methods starting with test or check in this class", method.getName().startsWith("test")); + } + } + } + } + + public void testLoggerUsageCheckerCompatibilityWithESLogger() throws NoSuchMethodException { + assertThat(ESLoggerUsageChecker.LOGGER_CLASS, equalTo(ESLogger.class.getName())); + assertThat(ESLoggerUsageChecker.THROWABLE_CLASS, equalTo(Throwable.class.getName())); + int varargsMethodCount = 0; + for (Method method : ESLogger.class.getMethods()) { + if (method.isVarArgs()) { + // check that logger usage checks all varargs methods + assertThat(ESLoggerUsageChecker.LOGGER_METHODS, hasItem(method.getName())); + varargsMethodCount++; + } + } + // currently we have two overloaded methods for each of debug, info, ... + // if that changes, we might want to have another look at the usage checker + assertThat(varargsMethodCount, equalTo(ESLoggerUsageChecker.LOGGER_METHODS.size() * 2)); + + // check that signature is same as we expect in the usage checker + for (String methodName : ESLoggerUsageChecker.LOGGER_METHODS) { + assertThat(ESLogger.class.getMethod(methodName, String.class, Object[].class), notNullValue()); + assertThat(ESLogger.class.getMethod(methodName, String.class, Throwable.class, Object[].class), notNullValue()); + } + } + + public void checkNumberOfArguments1() { + logger.info("Hello {}", "world"); + } + + public void checkFailNumberOfArguments1() { + logger.info("Hello {}"); + } + + @SuppressLoggerChecks(reason = "test ignore functionality") + public void checkIgnoreWhenAnnotationPresent() { + logger.info("Hello {}"); + } + + public void checkNumberOfArguments2() { + logger.info("Hello {}, {}, {}", "world", 2, "third argument"); + } + + public void checkFailNumberOfArguments2() { + logger.info("Hello {}, {}", "world", 2, "third argument"); + } + + public void checkNumberOfArguments3() { + // long argument list (> 5), emits different bytecode + logger.info("Hello {}, {}, {}, {}, {}, {}, {}", "world", 2, "third argument", 4, 5, 6, new String("last arg")); + } + + public void checkFailNumberOfArguments3() { + logger.info("Hello {}, {}, {}, {}, {}, {}, {}", "world", 2, "third argument", 4, 5, 6, 7, new String("last arg")); + } + + public void checkOrderOfExceptionArgument() { + logger.info("Hello", new Exception()); + } + + public void checkOrderOfExceptionArgument1() { + logger.info("Hello {}", new Exception(), "world"); + } + + public void checkFailOrderOfExceptionArgument1() { + logger.info("Hello {}", "world", new Exception()); + } + + public void checkOrderOfExceptionArgument2() { + logger.info("Hello {}, {}", new Exception(), "world", 42); + } + + public void checkFailOrderOfExceptionArgument2() { + logger.info("Hello {}, {}", "world", 42, new Exception()); + } + + public void checkFailNonConstantMessage(boolean b) { + logger.info(Boolean.toString(b)); + } + + public void checkComplexUsage(boolean b) { + String message = "Hello {}, {}"; + Object[] args = new Object[] { "world", 42 }; + if (b) { + message = "also two args {}{}"; + args = new Object[] { "world", 43 }; + } + logger.info(message, args); + } + + public void checkFailComplexUsage1(boolean b) { + String message = "Hello {}, {}"; + Object[] args = new Object[] { "world", 42 }; + if (b) { + message = "just one arg {}"; + args = new Object[] { "world", 43 }; + } + logger.info(message, args); + } + + public void checkFailComplexUsage2(boolean b) { + String message = "Hello {}, {}"; + Object[] args = new Object[] { "world", 42 }; + if (b) { + message = "also two args {}{}"; + args = new Object[] { "world", 43, "another argument" }; + } + logger.info(message, args); + } +} From 718876a941a5a934066f8461e6ee6f23b1bbb896 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Wed, 17 Feb 2016 07:27:23 +0100 Subject: [PATCH 180/320] Fix wrong placeholder usage in logging statements --- .../cluster/action/index/NodeIndexDeletedAction.java | 4 ++-- .../cluster/action/shard/ShardStateAction.java | 2 +- .../common/geo/builders/PolygonBuilder.java | 4 ++-- .../java/org/elasticsearch/common/lucene/Lucene.java | 2 +- .../java/org/elasticsearch/index/engine/Engine.java | 4 ++-- .../java/org/elasticsearch/index/shard/IndexShard.java | 2 +- .../indices/flush/SyncedFlushService.java | 2 +- .../indices/recovery/RecoveriesCollection.java | 2 +- .../indices/recovery/RecoverySourceHandler.java | 2 +- .../repositories/blobstore/BlobStoreRepository.java | 2 +- .../org/elasticsearch/snapshots/SnapshotsService.java | 2 +- .../elasticsearch/transport/netty/NettyTransport.java | 4 ++-- .../java/org/elasticsearch/tribe/TribeService.java | 2 +- .../bwcompat/BasicBackwardsCompatibilityIT.java | 2 +- .../elasticsearch/gateway/RecoveryFromGatewayIT.java | 2 +- .../gateway/ReusePeerRecoverySharedTest.java | 2 +- .../percolator/ConcurrentPercolatorIT.java | 2 +- .../org/elasticsearch/percolator/TTLPercolatorIT.java | 2 +- .../search/morelikethis/MoreLikeThisIT.java | 2 +- .../snapshots/DedicatedClusterSnapshotRestoreIT.java | 4 ++-- .../snapshots/SharedClusterSnapshotRestoreIT.java | 10 +++++----- .../elasticsearch/messy/tests/ExtendedStatsTests.java | 2 +- .../java/org/elasticsearch/messy/tests/StatsTests.java | 4 ++-- .../elasticsearch/discovery/ec2/Ec2DiscoveryTests.java | 4 ++-- .../cloud/azure/storage/AzureStorageServiceImpl.java | 2 +- 25 files changed, 36 insertions(+), 36 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java b/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java index 012cc66e1104..c2c1b468f1b0 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java @@ -81,13 +81,13 @@ public class NodeIndexDeletedAction extends AbstractComponent { transportService.sendRequest(clusterState.nodes().masterNode(), INDEX_DELETED_ACTION_NAME, new NodeIndexDeletedMessage(index, nodeId), EmptyTransportResponseHandler.INSTANCE_SAME); if (nodes.localNode().isDataNode() == false) { - logger.trace("[{}] not acking store deletion (not a data node)"); + logger.trace("[{}] not acking store deletion (not a data node)", index); return; } threadPool.generic().execute(new AbstractRunnable() { @Override public void onFailure(Throwable t) { - logger.warn("[{}]failed to ack index store deleted for index", t, index); + logger.warn("[{}] failed to ack index store deleted for index", t, index); } @Override diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index c7f39015c18d..626533f95816 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -151,7 +151,7 @@ public class ShardStateAction extends AbstractComponent { @Override public void onNewClusterState(ClusterState state) { if (logger.isTraceEnabled()) { - logger.trace("new cluster state [{}] after waiting for master election to fail shard [{}]", shardRoutingEntry.getShardRouting().shardId(), state.prettyPrint(), shardRoutingEntry); + logger.trace("new cluster state [{}] after waiting for master election to fail shard [{}]", state.prettyPrint(), shardRoutingEntry); } sendShardAction(actionName, observer, shardRoutingEntry, listener); } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index 4a9c84410723..6202643fae15 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -314,7 +314,7 @@ public class PolygonBuilder extends ShapeBuilder { double shiftOffset = any.coordinate.x > DATELINE ? DATELINE : (any.coordinate.x < -DATELINE ? -DATELINE : 0); if (debugEnabled()) { - LOGGER.debug("shift: {[]}", shiftOffset); + LOGGER.debug("shift: [{}]", shiftOffset); } // run along the border of the component, collect the @@ -392,7 +392,7 @@ public class PolygonBuilder extends ShapeBuilder { if(debugEnabled()) { for (int i = 0; i < result.length; i++) { - LOGGER.debug("Component {[]}:", i); + LOGGER.debug("Component [{}]:", i); for (int j = 0; j < result[i].length; j++) { LOGGER.debug("\t" + Arrays.toString(result[i][j])); } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 54e5738e78c5..8508a8a2e40e 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -111,7 +111,7 @@ public class Lucene { try { return Version.parse(version); } catch (ParseException e) { - logger.warn("no version match {}, default to {}", version, defaultVersion, e); + logger.warn("no version match {}, default to {}", e, version, defaultVersion); return defaultVersion; } } diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java index 3c5583440e02..bb7aa0ea71a2 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -671,7 +671,7 @@ public abstract class Engine implements Closeable { closeNoLock("engine failed on: [" + reason + "]"); } finally { if (failedEngine != null) { - logger.debug("tried to fail engine but engine is already failed. ignoring. [{}]", reason, failure); + logger.debug("tried to fail engine but engine is already failed. ignoring. [{}]", failure, reason); return; } logger.warn("failed engine [{}]", failure, reason); @@ -697,7 +697,7 @@ public abstract class Engine implements Closeable { store.decRef(); } } else { - logger.debug("tried to fail engine but could not acquire lock - engine should be failed by now [{}]", reason, failure); + logger.debug("tried to fail engine but could not acquire lock - engine should be failed by now [{}]", failure, reason); } } diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 4d35755e159b..5e2df0300012 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -712,7 +712,7 @@ public class IndexShard extends AbstractIndexShardComponent { false, true, upgrade.upgradeOnlyAncientSegments()); org.apache.lucene.util.Version version = minimumCompatibleVersion(); if (logger.isTraceEnabled()) { - logger.trace("upgraded segment {} from version {} to version {}", previousVersion, version); + logger.trace("upgraded segments for {} from version {} to version {}", shardId, previousVersion, version); } return version; diff --git a/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java index 6eb7c88a2a49..f3937fc0b66f 100644 --- a/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java +++ b/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java @@ -309,7 +309,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL } final Engine.CommitId expectedCommitId = expectedCommitIds.get(shard.currentNodeId()); if (expectedCommitId == null) { - logger.trace("{} can't resolve expected commit id for {}, skipping for sync id [{}]. shard routing {}", shardId, syncId, shard); + logger.trace("{} can't resolve expected commit id for current node, skipping for sync id [{}]. shard routing {}", shardId, syncId, shard); results.put(shard, new ShardSyncedFlushResponse("no commit id from pre-sync flush")); contDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results); continue; diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveriesCollection.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveriesCollection.java index 24f87ee436fe..8494939e46d1 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveriesCollection.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveriesCollection.java @@ -238,7 +238,7 @@ public class RecoveriesCollection { return; } lastSeenAccessTime = accessTime; - logger.trace("[monitor] rescheduling check for [{}]. last access time is [{}]", lastSeenAccessTime); + logger.trace("[monitor] rescheduling check for [{}]. last access time is [{}]", recoveryId, lastSeenAccessTime); threadPool.schedule(checkInterval, ThreadPool.Names.GENERIC, this); } } diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 15b9b59dd28f..b609eb5d08a8 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -289,7 +289,7 @@ public class RecoverySourceHandler { RemoteTransportException exception = new RemoteTransportException("File corruption occurred on recovery but " + "checksums are ok", null); exception.addSuppressed(targetException); - logger.warn("{} Remote file corruption during finalization on node {}, recovering {}. local checksum OK", + logger.warn("{} Remote file corruption during finalization of recovery on node {}. local checksum OK", corruptIndexException, shard.shardId(), request.targetNode()); throw exception; } else { diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index a6ea381adb40..5d423552a56f 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -478,7 +478,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent implem @Override public void onFailure(Throwable t) { if (lifecycle.stoppedOrClosed()) { - logger.trace("[{}] failed to send ping transport message", t); + logger.trace("failed to send ping transport message", t); } else { - logger.warn("[{}] failed to send ping transport message", t); + logger.warn("failed to send ping transport message", t); } } } diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index 2bd40539807f..b037f706ec19 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -262,7 +262,7 @@ public class TribeService extends AbstractLifecycleComponent { try { otherNode.close(); } catch (Throwable t) { - logger.warn("failed to close node {} on failed start", otherNode, t); + logger.warn("failed to close node {} on failed start", t, otherNode); } } if (e instanceof RuntimeException) { diff --git a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java index 7e46825398b6..ae7397015930 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java @@ -191,7 +191,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { numDocs *= 2; } - logger.info(" --> waiting for relocation to complete", numDocs); + logger.info(" --> waiting for relocation of [{}] docs to complete", numDocs); ensureYellow("test");// move all shards to the new node (it waits on relocation) final int numIters = randomIntBetween(10, 20); for (int i = 0; i < numIters; i++) { diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index 8fd6e303220b..4da9c2df1775 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -380,7 +380,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { assertSyncIdsNotNull(); } - logger.info("--> disabling allocation while the cluster is shut down", useSyncIds ? "" : " a second time"); + logger.info("--> disabling allocation while the cluster is shut down{}", useSyncIds ? "" : " a second time"); // Disable allocations while we are closing nodes client().admin().cluster().prepareUpdateSettings() .setTransientSettings(settingsBuilder() diff --git a/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java b/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java index 6f188ef42800..d28e53332250 100644 --- a/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java +++ b/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java @@ -100,7 +100,7 @@ public class ReusePeerRecoverySharedTest { assertSyncIdsNotNull(); } - logger.info("--> disabling allocation while the cluster is shut down", useSyncIds ? "" : " a second time"); + logger.info("--> disabling allocation while the cluster is shut down{}", useSyncIds ? "" : " a second time"); // Disable allocations while we are closing nodes client().admin().cluster().prepareUpdateSettings().setTransientSettings( settingsBuilder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), EnableAllocationDecider.Allocation.NONE)) diff --git a/core/src/test/java/org/elasticsearch/percolator/ConcurrentPercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/ConcurrentPercolatorIT.java index e99cf51758b0..f2493d85e861 100644 --- a/core/src/test/java/org/elasticsearch/percolator/ConcurrentPercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/ConcurrentPercolatorIT.java @@ -292,7 +292,7 @@ public class ConcurrentPercolatorIT extends ESIntegTestCase { } for (Throwable t : exceptionsHolder) { - logger.error("Unexpected exception {}", t.getMessage(), t); + logger.error("Unexpected exception while indexing", t); } assertThat(exceptionsHolder.isEmpty(), equalTo(true)); } diff --git a/core/src/test/java/org/elasticsearch/percolator/TTLPercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/TTLPercolatorIT.java index 52f8ecb4b135..f85b12d85ace 100644 --- a/core/src/test/java/org/elasticsearch/percolator/TTLPercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/TTLPercolatorIT.java @@ -184,7 +184,7 @@ public class TTLPercolatorIT extends ESIntegTestCase { .endObject() ).setTTL(randomIntBetween(1, 500)).setRefresh(true).execute().actionGet(); } catch (MapperParsingException e) { - logger.info("failed indexing {}", i, e); + logger.info("failed indexing {}", e, i); // if we are unlucky the TTL is so small that we see the expiry date is already in the past when // we parse the doc ignore those... assertThat(e.getCause(), Matchers.instanceOf(AlreadyExpiredException.class)); diff --git a/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java b/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java index 8f0ef3c0fbef..b65380a545c7 100644 --- a/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java +++ b/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java @@ -383,7 +383,7 @@ public class MoreLikeThisIT extends ESIntegTestCase { int maxIters = randomIntBetween(10, 20); for (int i = 0; i < maxIters; i++) { int max_query_terms = randomIntBetween(1, values.length); - logger.info("Running More Like This with max_query_terms = %s", max_query_terms); + logger.info("Running More Like This with max_query_terms = {}", max_query_terms); MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new String[] {"text"}, null, new Item[] {new Item(null, null, "0")}) .minTermFreq(1).minDocFreq(1) .maxQueryTerms(max_query_terms).minimumShouldMatch("0%"); diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index bd6c2533652d..4cfac516930f 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -325,7 +325,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest logger.info("--> execution was blocked on node [{}], shutting it down", blockedNode); unblockNode(blockedNode); - logger.info("--> stopping node", blockedNode); + logger.info("--> stopping node [{}]", blockedNode); stopNode(blockedNode); logger.info("--> waiting for completion"); SnapshotInfo snapshotInfo = waitForCompletion("test-repo", "test-snap", TimeValue.timeValueSeconds(60)); @@ -379,7 +379,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest // Make sure that abort makes some progress Thread.sleep(100); unblockNode(blockedNode); - logger.info("--> stopping node", blockedNode); + logger.info("--> stopping node [{}]", blockedNode); stopNode(blockedNode); try { DeleteSnapshotResponse deleteSnapshotResponse = deleteSnapshotResponseFuture.actionGet(); diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index a8a45e6a42f4..4f47a2a3126b 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -1448,7 +1448,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas } } - logger.info("--> checking snapshot status for all currently running and snapshot with empty repository", blockedNode); + logger.info("--> checking snapshot status for all currently running and snapshot with empty repository"); response = client.admin().cluster().prepareSnapshotStatus().execute().actionGet(); assertThat(response.getSnapshots().size(), equalTo(1)); snapshotStatus = response.getSnapshots().get(0); @@ -1461,7 +1461,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas } } - logger.info("--> checking that _current returns the currently running snapshot", blockedNode); + logger.info("--> checking that _current returns the currently running snapshot"); GetSnapshotsResponse getResponse = client.admin().cluster().prepareGetSnapshots("test-repo").setCurrentSnapshot().execute().actionGet(); assertThat(getResponse.getSnapshots().size(), equalTo(1)); SnapshotInfo snapshotInfo = getResponse.getSnapshots().get(0); @@ -1475,7 +1475,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas logger.info("--> done"); - logger.info("--> checking snapshot status again after snapshot is done", blockedNode); + logger.info("--> checking snapshot status again after snapshot is done"); response = client.admin().cluster().prepareSnapshotStatus("test-repo").addSnapshots("test-snap").execute().actionGet(); snapshotStatus = response.getSnapshots().get(0); assertThat(snapshotStatus.getIndices().size(), equalTo(1)); @@ -1486,11 +1486,11 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas assertThat(indexStatus.getShardsStats().getDoneShards(), equalTo(snapshotInfo.successfulShards())); assertThat(indexStatus.getShards().size(), equalTo(snapshotInfo.totalShards())); - logger.info("--> checking snapshot status after it is done with empty repository", blockedNode); + logger.info("--> checking snapshot status after it is done with empty repository"); response = client.admin().cluster().prepareSnapshotStatus().execute().actionGet(); assertThat(response.getSnapshots().size(), equalTo(0)); - logger.info("--> checking that _current no longer returns the snapshot", blockedNode); + logger.info("--> checking that _current no longer returns the snapshot"); assertThat(client.admin().cluster().prepareGetSnapshots("test-repo").addSnapshots("_current").execute().actionGet().getSnapshots().isEmpty(), equalTo(true)); try { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java index e717ea6d6fba..8346f3157f94 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java @@ -543,7 +543,7 @@ public class ExtendedStatsTests extends AbstractNumericTestCase { ShardSearchFailure[] failures = response.getShardFailures(); if (failures.length != expectedFailures) { for (ShardSearchFailure failure : failures) { - logger.error("Shard Failure: {}", failure.reason(), failure.toString()); + logger.error("Shard Failure: {}", failure); } fail("Unexpected shard failures!"); } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StatsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StatsTests.java index 4934d2ae6c48..954d4353f5dc 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StatsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StatsTests.java @@ -404,10 +404,10 @@ public class StatsTests extends AbstractNumericTestCase { ShardSearchFailure[] failures = response.getShardFailures(); if (failures.length != expectedFailures) { for (ShardSearchFailure failure : failures) { - logger.error("Shard Failure: {}", failure.reason(), failure.toString()); + logger.error("Shard Failure: {}", failure); } fail("Unexpected shard failures!"); } assertThat("Not all shards are initialized", response.getSuccessfulShards(), equalTo(response.getTotalShards())); } -} \ No newline at end of file +} diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java index 5063d59b40eb..1705421207b1 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java @@ -191,7 +191,7 @@ public class Ec2DiscoveryTests extends ESTestCase { tagsList.add(tags); } - logger.info("started [{}] instances with [{}] stage=prod tag"); + logger.info("started [{}] instances with [{}] stage=prod tag", nodes, prodInstances); List discoveryNodes = buildDynamicNodes(nodeSettings, nodes, tagsList); assertThat(discoveryNodes, hasSize(prodInstances)); } @@ -222,7 +222,7 @@ public class Ec2DiscoveryTests extends ESTestCase { tagsList.add(tags); } - logger.info("started [{}] instances with [{}] stage=prod tag"); + logger.info("started [{}] instances with [{}] stage=prod tag", nodes, prodInstances); List discoveryNodes = buildDynamicNodes(nodeSettings, nodes, tagsList); assertThat(discoveryNodes, hasSize(prodInstances)); } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java index cdc6d74edb00..497b0e3753a0 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java @@ -169,7 +169,7 @@ public class AzureStorageServiceImpl extends AbstractLifecycleComponent Date: Sun, 21 Feb 2016 20:56:05 -0800 Subject: [PATCH 181/320] Make logging message String constant to allow static checks --- .../TransportReplicationAction.java | 6 ++-- .../elasticsearch/bootstrap/JNANatives.java | 19 ++++++----- .../org/elasticsearch/bootstrap/JVMCheck.java | 2 +- .../org/elasticsearch/bootstrap/Seccomp.java | 2 +- .../TransportClientNodesService.java | 4 +-- .../action/shard/ShardStateAction.java | 2 +- .../service/InternalClusterService.java | 19 ++++------- .../breaker/ChildMemoryCircuitBreaker.java | 2 +- .../common/breaker/MemoryCircuitBreaker.java | 2 +- .../common/geo/builders/PolygonBuilder.java | 10 +++--- .../common/logging/DeprecationLogger.java | 3 ++ .../common/network/IfConfig.java | 34 +++++++++---------- .../discovery/zen/ZenDiscovery.java | 11 +++--- .../elasticsearch/env/NodeEnvironment.java | 14 +++----- .../gateway/GatewayMetaState.java | 2 +- .../elasticsearch/gateway/GatewayService.java | 9 +++-- .../http/netty/NettyHttpServerTransport.java | 2 +- .../index/analysis/AnalysisService.java | 2 +- ...ElasticsearchConcurrentMergeScheduler.java | 4 +-- .../fielddata/IndexFieldDataService.java | 4 +-- .../index/mapper/core/TypeParsers.java | 2 +- .../index/shard/ElasticsearchMergePolicy.java | 10 +++--- .../index/shard/StoreRecovery.java | 5 ++- .../org/elasticsearch/index/store/Store.java | 2 +- .../elasticsearch/indices/IndicesService.java | 2 +- .../recovery/RecoveryTargetService.java | 2 +- .../indices/ttl/IndicesTTLService.java | 2 +- .../elasticsearch/rest/RestController.java | 4 +-- .../snapshots/SnapshotShardsService.java | 4 +-- .../transport/TransportService.java | 4 +-- .../transport/local/LocalTransport.java | 6 ++-- .../netty/MessageChannelHandler.java | 4 +-- .../netty/NettyInternalESLogger.java | 2 ++ .../java/org/elasticsearch/VersionTests.java | 2 +- .../node/tasks/TransportTasksActionTests.java | 10 +++--- .../admin/indices/upgrade/UpgradeIT.java | 4 +-- .../OldIndexBackwardsCompatibilityIT.java | 18 +++++----- .../RecoveryWithUnsupportedIndicesIT.java | 2 +- .../bwcompat/RestoreBackwardsCompatIT.java | 2 +- .../StaticIndexBackwardCompatibilityIT.java | 2 +- .../cluster/ClusterServiceIT.java | 4 +-- .../elasticsearch/cluster/NoMasterNodeIT.java | 2 +- .../cluster/routing/PrimaryAllocationIT.java | 2 +- .../cluster/routing/RoutingTableTests.java | 6 ++-- .../allocation/AddIncrementallyTests.java | 2 +- .../allocation/AwarenessAllocationTests.java | 16 +++------ .../allocation/CatAllocationTestCase.java | 4 +-- .../NodeVersionAllocationDeciderTests.java | 9 +++-- .../ShardsLimitAllocationTests.java | 2 +- .../common/cache/CacheTests.java | 2 +- .../WriteConsistencyLevelIT.java | 6 ++-- .../DiscoveryWithServiceDisruptionsIT.java | 16 ++++----- .../gateway/MetaDataStateFormatTests.java | 2 +- .../gateway/QuorumGatewayIT.java | 2 +- .../index/fielddata/FilterFieldDataTests.java | 2 +- .../mapper/all/SimpleAllMapperTests.java | 2 +- .../multifield/MultiFieldsIntegrationIT.java | 2 +- .../IndexLifecycleActionIT.java | 6 ++-- .../indices/IndicesRequestCacheTests.java | 4 +-- .../settings/UpdateNumberOfReplicasIT.java | 28 +++++++-------- .../store/IndicesStoreIntegrationIT.java | 4 +-- .../template/SimpleIndexTemplateIT.java | 2 +- .../nodesinfo/SimpleNodesInfoIT.java | 8 ++--- .../percolator/PercolatorIT.java | 4 +-- .../percolator/RecoveryPercolatorIT.java | 4 +-- .../recovery/TruncatedRecoveryIT.java | 2 +- .../aggregations/bucket/ChildrenIT.java | 2 +- .../search/aggregations/bucket/NestedIT.java | 2 +- .../aggregations/metrics/TopHitsIT.java | 4 +-- .../search/basic/SearchWhileRelocatingIT.java | 2 +- .../basic/TransportSearchFailuresIT.java | 2 +- .../search/geo/GeoShapeQueryTests.java | 2 +- .../search/highlight/HighlighterSearchIT.java | 2 +- .../search/morelikethis/MoreLikeThisIT.java | 2 +- .../search/profile/QueryProfilerIT.java | 22 ++++++------ .../search/scroll/DuelScrollIT.java | 4 +-- .../search/sort/GeoDistanceSortBuilderIT.java | 4 +-- .../SharedClusterSnapshotRestoreIT.java | 10 +++--- .../SnapshotBackwardsCompatibilityIT.java | 2 +- .../snapshots/mockstore/MockRepository.java | 2 +- .../transport/netty/NettyTransportIT.java | 4 +-- .../groovy/GroovyScriptEngineService.java | 2 +- .../messy/tests/EquivalenceTests.java | 2 +- .../messy/tests/ExtendedStatsTests.java | 2 +- .../messy/tests/SearchStatsTests.java | 2 +- .../elasticsearch/messy/tests/StatsTests.java | 2 +- .../messy/tests/StringTermsTests.java | 2 +- .../mustache/MustacheScriptEngineService.java | 2 +- .../elasticsearch/cloud/aws/AwsSigner.java | 2 +- .../elasticsearch/cloud/aws/AwsSigner.java | 2 +- .../elasticsearch/test/CorruptionUtils.java | 10 +++--- .../elasticsearch/test/ESIntegTestCase.java | 2 +- .../test/rest/client/RestClient.java | 3 +- .../test/rest/client/http/HttpResponse.java | 2 +- 94 files changed, 237 insertions(+), 249 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 18a7e5e0705d..69df4e617875 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -270,7 +270,7 @@ public abstract class TransportReplicationAction 0) { String reason = String.format(Locale.ROOT, "[%d] unassigned shards after failing shards", numberOfUnassignedShards); if (logger.isTraceEnabled()) { - logger.trace(reason + ", scheduling a reroute"); + logger.trace("{}, scheduling a reroute", reason); } routingService.reroute(reason); } diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index 7cd3d840fbc1..11ee90b79b51 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -436,11 +436,8 @@ public class InternalClusterService extends AbstractLifecycleComponentbuilder().failures(toExecute.stream().map(updateTask -> updateTask.task)::iterator, e).build(previousClusterState); @@ -523,9 +520,7 @@ public class InternalClusterService extends AbstractLifecycleComponent addresses = nic.getInterfaceAddresses(); for (InterfaceAddress address : addresses) { @@ -76,7 +76,7 @@ final class IfConfig { msg.append(System.lineSeparator()); } } - + for (InterfaceAddress address : addresses) { if (address.getAddress() instanceof Inet6Address) { msg.append(INDENT); @@ -84,7 +84,7 @@ final class IfConfig { msg.append(System.lineSeparator()); } } - + // hardware address byte hardware[] = nic.getHardwareAddress(); if (hardware != null) { @@ -98,19 +98,19 @@ final class IfConfig { } msg.append(System.lineSeparator()); } - + // attributes msg.append(INDENT); msg.append(formatFlags(nic)); msg.append(System.lineSeparator()); } - logger.debug("configuration:" + System.lineSeparator() + "{}", msg.toString()); + logger.debug("configuration:{}{}", System.lineSeparator(), msg); } - + /** format internet address: java's default doesn't include everything useful */ private static String formatAddress(InterfaceAddress interfaceAddress) throws IOException { StringBuilder sb = new StringBuilder(); - + InetAddress address = interfaceAddress.getAddress(); if (address instanceof Inet6Address) { sb.append("inet6 "); @@ -122,10 +122,10 @@ final class IfConfig { sb.append(NetworkAddress.formatAddress(address)); int netmask = 0xFFFFFFFF << (32 - interfaceAddress.getNetworkPrefixLength()); sb.append(" netmask:" + NetworkAddress.formatAddress(InetAddress.getByAddress(new byte[] { - (byte)(netmask >>> 24), - (byte)(netmask >>> 16 & 0xFF), - (byte)(netmask >>> 8 & 0xFF), - (byte)(netmask & 0xFF) + (byte)(netmask >>> 24), + (byte)(netmask >>> 16 & 0xFF), + (byte)(netmask >>> 8 & 0xFF), + (byte)(netmask & 0xFF) }))); InetAddress broadcast = interfaceAddress.getBroadcast(); if (broadcast != null) { @@ -141,7 +141,7 @@ final class IfConfig { } return sb.toString(); } - + /** format network interface flags */ private static String formatFlags(NetworkInterface nic) throws SocketException { StringBuilder flags = new StringBuilder(); diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index c0dd78b4e5f3..221b4d98f13c 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -823,7 +823,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen return null; } if (logger.isTraceEnabled()) { - StringBuilder sb = new StringBuilder("full ping responses:"); + StringBuilder sb = new StringBuilder(); if (fullPingResponses.length == 0) { sb.append(" {none}"); } else { @@ -831,7 +831,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen sb.append("\n\t--> ").append(pingResponse); } } - logger.trace(sb.toString()); + logger.trace("full ping responses:{}", sb); } // filter responses @@ -848,7 +848,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen } if (logger.isDebugEnabled()) { - StringBuilder sb = new StringBuilder("filtered ping responses: (filter_client[").append(masterElectionFilterClientNodes).append("], filter_data[").append(masterElectionFilterDataNodes).append("])"); + StringBuilder sb = new StringBuilder(); if (pingResponses.isEmpty()) { sb.append(" {none}"); } else { @@ -856,7 +856,8 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen sb.append("\n\t--> ").append(pingResponse); } } - logger.debug(sb.toString()); + logger.debug("filtered ping responses: (filter_client[{}], filter_data[{}]){}", masterElectionFilterClientNodes, + masterElectionFilterDataNodes, sb); } final DiscoveryNode localNode = clusterService.localNode(); @@ -918,7 +919,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen // *** called from within an cluster state update task *** // assert Thread.currentThread().getName().contains(InternalClusterService.UPDATE_THREAD_NAME); - logger.warn(reason + ", current nodes: {}", clusterState.nodes()); + logger.warn("{}, current nodes: {}", reason, clusterState.nodes()); nodesFD.stop(); masterFD.stop(reason); diff --git a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 0eec5c5765e8..26099cefee4a 100644 --- a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -250,7 +250,7 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl // We do some I/O in here, so skip this if DEBUG/INFO are not enabled: if (logger.isDebugEnabled()) { // Log one line per path.data: - StringBuilder sb = new StringBuilder("node data locations details:"); + StringBuilder sb = new StringBuilder(); for (NodePath nodePath : nodePaths) { sb.append('\n').append(" -> ").append(nodePath.path.toAbsolutePath()); @@ -278,7 +278,7 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl .append(fsPath.getType()) .append(']'); } - logger.debug(sb.toString()); + logger.debug("node data locations details:{}", sb); } else if (logger.isInfoEnabled()) { FsInfo.Path totFSPath = new FsInfo.Path(); Set allTypes = new HashSet<>(); @@ -306,14 +306,8 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl } // Just log a 1-line summary: - logger.info(String.format(Locale.ROOT, - "using [%d] data paths, mounts [%s], net usable_space [%s], net total_space [%s], spins? [%s], types [%s]", - nodePaths.length, - allMounts, - totFSPath.getAvailable(), - totFSPath.getTotal(), - toString(allSpins), - toString(allTypes))); + logger.info("using [{}] data paths, mounts [{}], net usable_space [{}], net total_space [{}], spins? [{}], types [{}]", + nodePaths.length, allMounts, totFSPath.getAvailable(), totFSPath.getTotal(), toString(allSpins), toString(allTypes)); } } diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index e90cb750cf50..15277d6fb4fc 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -202,7 +202,7 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL try (DirectoryStream stream = Files.newDirectoryStream(stateLocation)) { for (Path stateFile : stream) { if (logger.isTraceEnabled()) { - logger.trace("[upgrade]: processing [" + stateFile.getFileName() + "]"); + logger.trace("[upgrade]: processing [{}]", stateFile.getFileName()); } final String name = stateFile.getFileName().toString(); if (name.startsWith("metadata-")) { diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java index 384539b4c631..d4791464e8bb 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java @@ -161,11 +161,14 @@ public class GatewayService extends AbstractLifecycleComponent i if (state.nodes().masterNodeId() == null) { logger.debug("not recovering from gateway, no master elected yet"); } else if (recoverAfterNodes != -1 && (nodes.masterAndDataNodes().size()) < recoverAfterNodes) { - logger.debug("not recovering from gateway, nodes_size (data+master) [" + nodes.masterAndDataNodes().size() + "] < recover_after_nodes [" + recoverAfterNodes + "]"); + logger.debug("not recovering from gateway, nodes_size (data+master) [{}] < recover_after_nodes [{}]", + nodes.masterAndDataNodes().size(), recoverAfterNodes); } else if (recoverAfterDataNodes != -1 && nodes.dataNodes().size() < recoverAfterDataNodes) { - logger.debug("not recovering from gateway, nodes_size (data) [" + nodes.dataNodes().size() + "] < recover_after_data_nodes [" + recoverAfterDataNodes + "]"); + logger.debug("not recovering from gateway, nodes_size (data) [{}] < recover_after_data_nodes [{}]", + nodes.dataNodes().size(), recoverAfterDataNodes); } else if (recoverAfterMasterNodes != -1 && nodes.masterNodes().size() < recoverAfterMasterNodes) { - logger.debug("not recovering from gateway, nodes_size (master) [" + nodes.masterNodes().size() + "] < recover_after_master_nodes [" + recoverAfterMasterNodes + "]"); + logger.debug("not recovering from gateway, nodes_size (master) [{}] < recover_after_master_nodes [{}]", + nodes.masterNodes().size(), recoverAfterMasterNodes); } else { boolean enforceRecoverAfterTime; String reason; diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java index e64c6401f718..6bebe2e22ca6 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java +++ b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java @@ -262,7 +262,7 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent Integer.MAX_VALUE) { - logger.warn("maxContentLength[" + maxContentLength + "] set to high value, resetting it to [100mb]"); + logger.warn("maxContentLength[{}] set to high value, resetting it to [100mb]", maxContentLength); maxContentLength = new ByteSizeValue(100, ByteSizeUnit.MB); } this.maxContentLength = maxContentLength; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java index 09e96f3743be..453552b9dd11 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java @@ -155,7 +155,7 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable // because analyzers are aliased, they might be closed several times // an NPE is thrown in this case, so ignore.... } catch (Exception e) { - logger.debug("failed to close analyzer " + analyzer); + logger.debug("failed to close analyzer {}", analyzer); } } } diff --git a/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java b/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java index 6dd710e4e890..965a2e58f9cc 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java +++ b/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java @@ -129,9 +129,9 @@ class ElasticsearchConcurrentMergeScheduler extends ConcurrentMergeScheduler { merge.rateLimiter.getMBPerSec()); if (tookMS > 20000) { // if more than 20 seconds, DEBUG log it - logger.debug(message); + logger.debug("{}", message); } else if (logger.isTraceEnabled()) { - logger.trace(message); + logger.trace("{}", message); } } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java index 94e9edc5b946..536b79b4c44c 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java @@ -230,13 +230,13 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo IndexFieldData.Builder builder = null; String format = type.getFormat(indexSettings.getSettings()); if (format != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(format) && !docValues) { - logger.warn("field [" + fieldName + "] has no doc values, will use default field data format"); + logger.warn("field [{}] has no doc values, will use default field data format", fieldName); format = null; } if (format != null) { builder = buildersByTypeAndFormat.get(Tuple.tuple(type.getType(), format)); if (builder == null) { - logger.warn("failed to find format [" + format + "] for field [" + fieldName + "], will use default"); + logger.warn("failed to find format [{}] for field [{}], will use default", format, fieldName); } } if (builder == null && docValues) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index c42de2f611f1..f8c1c0a812a9 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -256,7 +256,7 @@ public class TypeParsers { (indexVersionCreated.after(Version.V_2_0_1) && indexVersionCreated.before(Version.V_2_1_0))) { throw new MapperParsingException("copy_to in multi fields is not allowed. Found the copy_to in field [" + name + "] which is within a multi field."); } else { - ESLoggerFactory.getLogger("mapping [" + parserContext.type() + "]").warn("Found a copy_to in field [" + name + "] which is within a multi field. This feature has been removed and the copy_to will be removed from the mapping."); + ESLoggerFactory.getLogger("mapping [" + parserContext.type() + "]").warn("Found a copy_to in field [{}] which is within a multi field. This feature has been removed and the copy_to will be removed from the mapping.", name); } } else { parseCopyFields(propNode, builder); diff --git a/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java b/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java index 524266420fb7..adae6caf4524 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java +++ b/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java @@ -49,7 +49,7 @@ import java.util.Map; * be stored as payloads to numeric doc values. */ public final class ElasticsearchMergePolicy extends MergePolicy { - + private static ESLogger logger = Loggers.getLogger(ElasticsearchMergePolicy.class); private final MergePolicy delegate; @@ -69,7 +69,7 @@ public final class ElasticsearchMergePolicy extends MergePolicy { /** Return an "upgraded" view of the reader. */ static CodecReader filter(CodecReader reader) throws IOException { - // TODO: remove 0.90.x/1.x freqs/prox/payloads from _uid? + // TODO: remove 0.90.x/1.x freqs/prox/payloads from _uid? // the previous code never did this, so some indexes carry around trash. return reader; } @@ -155,7 +155,7 @@ public final class ElasticsearchMergePolicy extends MergePolicy { // TODO: Use IndexUpgradeMergePolicy instead. We should be comparing codecs, // for now we just assume every minor upgrade has a new format. - logger.debug("Adding segment " + info.info.name + " to be upgraded"); + logger.debug("Adding segment {} to be upgraded", info.info.name); spec.add(new OneMerge(Collections.singletonList(info))); } @@ -163,14 +163,14 @@ public final class ElasticsearchMergePolicy extends MergePolicy { if (spec.merges.size() == MAX_CONCURRENT_UPGRADE_MERGES) { // hit our max upgrades, so return the spec. we will get a cascaded call to continue. - logger.debug("Returning " + spec.merges.size() + " merges for upgrade"); + logger.debug("Returning {} merges for upgrade", spec.merges.size()); return spec; } } // We must have less than our max upgrade merges, so the next return will be our last in upgrading mode. if (spec.merges.isEmpty() == false) { - logger.debug("Returning " + spec.merges.size() + " merges for end of upgrade"); + logger.debug("Returning {} merges for end of upgrade", spec.merges.size()); return spec; } diff --git a/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java b/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java index e057349223d7..d11e67340255 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java +++ b/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java @@ -128,9 +128,8 @@ final class StoreRecovery { assert shardState != IndexShardState.CREATED && shardState != IndexShardState.RECOVERING : "recovery process of " + shardId + " didn't get to post_recovery. shardState [" + shardState + "]"; if (logger.isTraceEnabled()) { - StringBuilder sb = new StringBuilder(); - sb.append("recovery completed from ").append("shard_store").append(", took [").append(timeValueMillis(recoveryState.getTimer().time())).append("]\n"); RecoveryState.Index index = recoveryState.getIndex(); + StringBuilder sb = new StringBuilder(); sb.append(" index : files [").append(index.totalFileCount()).append("] with total_size [") .append(new ByteSizeValue(index.totalBytes())).append("], took[") .append(TimeValue.timeValueMillis(index.time())).append("]\n"); @@ -142,7 +141,7 @@ final class StoreRecovery { .append(timeValueMillis(recoveryState.getVerifyIndex().checkIndexTime())).append("]\n"); sb.append(" translog : number_of_operations [").append(recoveryState.getTranslog().recoveredOperations()) .append("], took [").append(TimeValue.timeValueMillis(recoveryState.getTranslog().time())).append("]"); - logger.trace(sb.toString()); + logger.trace("recovery completed from [shard_store], took [{}]\n{}", timeValueMillis(recoveryState.getTimer().time()), sb); } else if (logger.isDebugEnabled()) { logger.debug("recovery completed from [shard_store], took [{}]", timeValueMillis(recoveryState.getTimer().time())); } diff --git a/core/src/main/java/org/elasticsearch/index/store/Store.java b/core/src/main/java/org/elasticsearch/index/store/Store.java index 77e7f32f5f51..02a5a4da6fe0 100644 --- a/core/src/main/java/org/elasticsearch/index/store/Store.java +++ b/core/src/main/java/org/elasticsearch/index/store/Store.java @@ -379,7 +379,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref if (isClosed.compareAndSet(false, true)) { // only do this once! decRef(); - logger.debug("store reference count on close: " + refCounter.refCount()); + logger.debug("store reference count on close: {}", refCounter.refCount()); } } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index 6fd833471eda..b1b38578887e 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -193,7 +193,7 @@ public class IndicesService extends AbstractLifecycleComponent i try { removeIndex(index, "shutdown", false); } catch (Throwable e) { - logger.warn("failed to remove index on stop " + index + "", e); + logger.warn("failed to remove index on stop [{}]", e, index); } finally { latch.countDown(); } diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetService.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetService.java index dcbb0c7bedf3..ab8c87cd636b 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetService.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetService.java @@ -218,7 +218,7 @@ public class RecoveryTargetService extends AbstractComponent implements IndexEve "operations") .append(", took [").append(timeValueMillis(recoveryResponse.phase2Time)).append("]") .append("\n"); - logger.trace(sb.toString()); + logger.trace("{}", sb); } else { logger.debug("{} recovery done from [{}], took [{}]", request.shardId(), recoveryTarget.sourceNode(), recoveryTime); } diff --git a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java index e4537b876fa2..09d07f4dec4a 100644 --- a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java +++ b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java @@ -287,7 +287,7 @@ public class IndicesTTLService extends AbstractLifecycleComponent { try { channel.sendResponse(new BytesRestResponse(channel, e)); } catch (Throwable e1) { - logger.error("failed to send failure response for uri [" + request.uri() + "]", e1); + logger.error("failed to send failure response for uri [{}]", e1, request.uri()); } } } else { @@ -275,7 +275,7 @@ public class RestController extends AbstractLifecycleComponent { try { channel.sendResponse(new BytesRestResponse(channel, e)); } catch (IOException e1) { - logger.error("Failed to send failure response for uri [" + request.uri() + "]", e1); + logger.error("Failed to send failure response for uri [{}]", e1, request.uri()); } } } diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index 340a7f6ce835..949d4607b638 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -343,9 +343,9 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implem handleException(handler, new RemoteTransportException(nodeName(), localAddress, action, e)); } } else { - logger.warn("Failed to receive message for action [" + action + "]", e); + logger.warn("Failed to receive message for action [{}]", e, action); } } } @@ -314,7 +314,7 @@ public class LocalTransport extends AbstractLifecycleComponent implem try { transportChannel.sendResponse(e); } catch (Throwable e1) { - logger.warn("Failed to send error message back to client for action [" + action + "]", e1); + logger.warn("Failed to send error message back to client for action [{}]", e1, action); logger.warn("Actual Exception", e); } } @@ -325,7 +325,7 @@ public class LocalTransport extends AbstractLifecycleComponent implem try { transportChannel.sendResponse(e); } catch (Throwable e1) { - logger.warn("Failed to send error message back to client for action [" + action + "]", e); + logger.warn("Failed to send error message back to client for action [{}]", e, action); logger.warn("Actual Exception", e1); } diff --git a/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java b/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java index e4dbbfa73af7..302f8296ad32 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java @@ -274,7 +274,7 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler { try { transportChannel.sendResponse(e); } catch (IOException e1) { - logger.warn("Failed to send error message back to client for action [" + action + "]", e); + logger.warn("Failed to send error message back to client for action [{}]", e, action); logger.warn("Actual Exception", e1); } } @@ -336,7 +336,7 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler { try { transportChannel.sendResponse(e); } catch (Throwable e1) { - logger.warn("Failed to send error message back to client for action [" + reg.getAction() + "]", e1); + logger.warn("Failed to send error message back to client for action [{}]", e1, reg.getAction()); logger.warn("Actual Exception", e); } } diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyInternalESLogger.java b/core/src/main/java/org/elasticsearch/transport/netty/NettyInternalESLogger.java index ed92aa261db2..2a1fc3226a4e 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyInternalESLogger.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/NettyInternalESLogger.java @@ -19,12 +19,14 @@ package org.elasticsearch.transport.netty; +import org.elasticsearch.common.SuppressLoggerChecks; import org.elasticsearch.common.logging.ESLogger; import org.jboss.netty.logging.AbstractInternalLogger; /** * */ +@SuppressLoggerChecks(reason = "safely delegates to logger") public class NettyInternalESLogger extends AbstractInternalLogger { private final ESLogger logger; diff --git a/core/src/test/java/org/elasticsearch/VersionTests.java b/core/src/test/java/org/elasticsearch/VersionTests.java index 7824ecd39b12..eec912989a74 100644 --- a/core/src/test/java/org/elasticsearch/VersionTests.java +++ b/core/src/test/java/org/elasticsearch/VersionTests.java @@ -231,7 +231,7 @@ public class VersionTests extends ESTestCase { assertTrue(constantName + " should be final", Modifier.isFinal(versionConstant.getModifiers())); Version v = (Version) versionConstant.get(Version.class); - logger.info("Checking " + v); + logger.info("Checking {}", v); assertEquals("Version id " + field.getName() + " does not point to " + constantName, v, Version.fromId(versionId)); assertEquals("Version " + constantName + " does not have correct id", versionId, v.id); if (v.major >= 2) { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index 556eee238fda..2fe79d25ebb7 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -294,14 +294,14 @@ public class TransportTasksActionTests extends TaskManagerTestCase { actions[i] = new TestNodesAction(Settings.EMPTY, "testAction", clusterName, threadPool, testNodes[i].clusterService, testNodes[i].transportService) { @Override protected NodeResponse nodeOperation(NodeRequest request) { - logger.info("Action on node " + node); + logger.info("Action on node {}", node); actionLatch.countDown(); try { checkLatch.await(); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } - logger.info("Action on node " + node + " finished"); + logger.info("Action on node {} finished", node); return new NodeResponse(testNodes[node].discoveryNode); } }; @@ -565,7 +565,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { actions[i] = new TestNodesAction(Settings.EMPTY, "testAction", clusterName, threadPool, testNodes[i].clusterService, testNodes[i].transportService) { @Override protected NodeResponse nodeOperation(NodeRequest request) { - logger.info("Action on node " + node); + logger.info("Action on node {}", node); throw new RuntimeException("Test exception"); } }; @@ -604,9 +604,9 @@ public class TransportTasksActionTests extends TaskManagerTestCase { tasksActions[i] = new TestTasksAction(Settings.EMPTY, "testTasksAction", clusterName, threadPool, testNodes[i].clusterService, testNodes[i].transportService) { @Override protected TestTaskResponse taskOperation(TestTasksRequest request, Task task) { - logger.info("Task action on node " + node); + logger.info("Task action on node {}", node); if (failTaskOnNode == node && task.getParentTaskId().isSet() == false) { - logger.info("Failing on node " + node); + logger.info("Failing on node {}", node); throw new RuntimeException("Task level failure"); } return new TestTaskResponse("Success on node " + node); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java index baca9508a8bb..620cef31f9a0 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java @@ -134,7 +134,7 @@ public class UpgradeIT extends ESBackcompatTestCase { // means we can never generate ancient segments in this test (unless Lucene major version bumps but ES major version does not): assertFalse(hasAncientSegments(client(), indexToUpgrade)); - logger.info("--> Running upgrade on index " + indexToUpgrade); + logger.info("--> Running upgrade on index {}", indexToUpgrade); assertNoFailures(client().admin().indices().prepareUpgrade(indexToUpgrade).get()); awaitBusy(() -> { try { @@ -228,7 +228,7 @@ public class UpgradeIT extends ESBackcompatTestCase { ESLogger logger = Loggers.getLogger(UpgradeIT.class); int toUpgrade = 0; for (IndexUpgradeStatus status : getUpgradeStatus(client, index)) { - logger.info("Index: " + status.getIndex() + ", total: " + status.getTotalBytes() + ", toUpgrade: " + status.getToUpgradeBytes()); + logger.info("Index: {}, total: {}, toUpgrade: {}", status.getIndex(), status.getTotalBytes(), status.getToUpgradeBytes()); toUpgrade += status.getToUpgradeBytes(); } return toUpgrade == 0; diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index 8e3dbd5f563d..1b0988f21bae 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -162,7 +162,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { singleDataPath = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER); assertFalse(Files.exists(singleDataPath)); Files.createDirectories(singleDataPath); - logger.info("--> Single data path: " + singleDataPath.toString()); + logger.info("--> Single data path: {}", singleDataPath); // find multi data path dirs nodePaths = internalCluster().getInstance(NodeEnvironment.class, multiDataPathNode.get()).nodeDataPaths(); @@ -173,7 +173,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { assertFalse(Files.exists(multiDataPath[1])); Files.createDirectories(multiDataPath[0]); Files.createDirectories(multiDataPath[1]); - logger.info("--> Multi data paths: " + multiDataPath[0].toString() + ", " + multiDataPath[1].toString()); + logger.info("--> Multi data paths: {}, {}", multiDataPath[0], multiDataPath[1]); replicas.get(); // wait for replicas } @@ -239,13 +239,13 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (file.getFileName().toString().equals(IndexWriter.WRITE_LOCK_NAME)) { // skip lock file, we don't need it - logger.trace("Skipping lock file: " + file.toString()); + logger.trace("Skipping lock file: {}", file); return FileVisitResult.CONTINUE; } Path relativeFile = src.relativize(file); Path destFile = destinationDataPath.resolve(indexName).resolve(relativeFile); - logger.trace("--> Moving " + relativeFile.toString() + " to " + destFile.toString()); + logger.trace("--> Moving {} to {}", relativeFile, destFile); Files.move(file, destFile); assertFalse(Files.exists(file)); assertTrue(Files.exists(destFile)); @@ -269,7 +269,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { for (String index : indexes) { if (expectedVersions.remove(index) == false) { - logger.warn("Old indexes tests contain extra index: " + index); + logger.warn("Old indexes tests contain extra index: {}", index); } } if (expectedVersions.isEmpty() == false) { @@ -287,9 +287,9 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { Collections.shuffle(indexes, random()); for (String index : indexes) { long startTime = System.currentTimeMillis(); - logger.info("--> Testing old index " + index); + logger.info("--> Testing old index {}", index); assertOldIndexWorks(index); - logger.info("--> Done testing " + index + ", took " + ((System.currentTimeMillis() - startTime) / 1000.0) + " seconds"); + logger.info("--> Done testing {}, took {} seconds", index, (System.currentTimeMillis() - startTime) / 1000.0); } } @@ -344,7 +344,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { SearchResponse searchRsp = searchReq.get(); ElasticsearchAssertions.assertNoFailures(searchRsp); long numDocs = searchRsp.getHits().getTotalHits(); - logger.info("Found " + numDocs + " in old index"); + logger.info("Found {} in old index", numDocs); logger.info("--> testing basic search with sort"); searchReq.addSort("long_sort", SortOrder.ASC); @@ -523,7 +523,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { for (String indexFile : indexes) { String indexName = indexFile.replace(".zip", "").toLowerCase(Locale.ROOT).replace("unsupported-", "index-"); Path nodeDir = getNodeDir(indexFile); - logger.info("Parsing cluster state files from index [" + indexName + "]"); + logger.info("Parsing cluster state files from index [{}]", indexName); assertNotNull(globalFormat.loadLatestState(logger, nodeDir)); // no exception Path indexDir = nodeDir.resolve("indices").resolve(indexName); assertNotNull(indexFormat.loadLatestState(logger, indexDir)); // no exception diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java index 23163b86112a..9fe83f65c45a 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java @@ -28,7 +28,7 @@ public class RecoveryWithUnsupportedIndicesIT extends StaticIndexBackwardCompati public void testUpgradeStartClusterOn_0_20_6() throws Exception { String indexName = "unsupported-0.20.6"; - logger.info("Checking static index " + indexName); + logger.info("Checking static index {}", indexName); Settings nodeSettings = prepareBackwardsDataDir(getBwcIndicesPath().resolve(indexName + ".zip"), NetworkModule.HTTP_ENABLED.getKey(), true); try { internalCluster().startNode(nodeSettings); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java index 483040209d08..5b81621e6dd3 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java @@ -108,7 +108,7 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { for (String repoVersion : repoVersions) { if (expectedVersions.remove(repoVersion) == false) { - logger.warn("Old repositories tests contain extra repo: " + repoVersion); + logger.warn("Old repositories tests contain extra repo: {}", repoVersion); } } if (expectedVersions.isEmpty() == false) { diff --git a/core/src/test/java/org/elasticsearch/bwcompat/StaticIndexBackwardCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/StaticIndexBackwardCompatibilityIT.java index 794aea854871..3884d3475e12 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/StaticIndexBackwardCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/StaticIndexBackwardCompatibilityIT.java @@ -36,7 +36,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; public class StaticIndexBackwardCompatibilityIT extends ESIntegTestCase { public void loadIndex(String index, Object... settings) throws Exception { - logger.info("Checking static index " + index); + logger.info("Checking static index {}", index); Settings nodeSettings = prepareBackwardsDataDir(getDataPath(index + ".zip"), settings); internalCluster().startNode(nodeSettings); ensureGreen(index); diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java index 813557e314b9..351959460b15 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java @@ -608,13 +608,13 @@ public class ClusterServiceIT extends ESIntegTestCase { @Override public void onMaster() { - logger.info("on master [" + clusterService.localNode() + "]"); + logger.info("on master [{}]", clusterService.localNode()); master = true; } @Override public void offMaster() { - logger.info("off master [" + clusterService.localNode() + "]"); + logger.info("off master [{}]", clusterService.localNode()); master = false; } diff --git a/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java b/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java index 370f1464fd2c..13b1d40b5d32 100644 --- a/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java @@ -235,7 +235,7 @@ public class NoMasterNodeIT extends ESIntegTestCase { ensureSearchable("test1", "test2"); ClusterStateResponse clusterState = client().admin().cluster().prepareState().get(); - logger.info("Cluster state:\n" + clusterState.getState().prettyPrint()); + logger.info("Cluster state:\n{}", clusterState.getState().prettyPrint()); internalCluster().stopRandomDataNode(); assertTrue(awaitBusy(() -> { diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java index 94336d23623b..da6f270a79df 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -163,7 +163,7 @@ public class PrimaryAllocationIT extends ESIntegTestCase { for (IntObjectCursor> shardStoreStatuses : storeStatuses) { int shardId = shardStoreStatuses.key; IndicesShardStoresResponse.StoreStatus storeStatus = randomFrom(shardStoreStatuses.value); - logger.info("--> adding allocation command for shard " + shardId); + logger.info("--> adding allocation command for shard {}", shardId); // force allocation based on node id if (useStaleReplica) { rerouteBuilder.add(new AllocateStalePrimaryAllocationCommand("test", shardId, storeStatus.getNode().getId(), true)); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java index 741d62d74e6d..40e24338f009 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java @@ -63,7 +63,7 @@ public class RoutingTableTests extends ESAllocationTestCase { this.numberOfReplicas = randomIntBetween(1, 5); this.shardsPerIndex = this.numberOfShards * (this.numberOfReplicas + 1); this.totalNumberOfShards = this.shardsPerIndex * 2; - logger.info("Setup test with " + this.numberOfShards + " shards and " + this.numberOfReplicas + " replicas."); + logger.info("Setup test with {} shards and {} replicas.", this.numberOfShards, this.numberOfReplicas); this.emptyRoutingTable = new RoutingTable.Builder().build(); MetaData metaData = MetaData.builder() .put(createIndexMetaData(TEST_INDEX_1)) @@ -81,7 +81,7 @@ public class RoutingTableTests extends ESAllocationTestCase { * puts primary shard routings into initializing state */ private void initPrimaries() { - logger.info("adding " + (this.numberOfReplicas + 1) + " nodes and performing rerouting"); + logger.info("adding {} nodes and performing rerouting", this.numberOfReplicas + 1); Builder discoBuilder = DiscoveryNodes.builder(); for (int i = 0; i < this.numberOfReplicas + 1; i++) { discoBuilder = discoBuilder.put(newNode("node" + i)); @@ -95,7 +95,7 @@ public class RoutingTableTests extends ESAllocationTestCase { private void startInitializingShards(String index) { this.clusterState = ClusterState.builder(clusterState).routingTable(this.testRoutingTable).build(); - logger.info("start primary shards for index " + index); + logger.info("start primary shards for index {}", index); RoutingAllocation.Result rerouteResult = ALLOCATION_SERVICE.applyStartedShards(this.clusterState, this.clusterState.getRoutingNodes().shardsWithState(index, INITIALIZING)); this.clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); this.testRoutingTable = rerouteResult.routingTable(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java index 91ba1f4999c8..1c5f77ce4080 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java @@ -301,7 +301,7 @@ public class AddIncrementallyTests extends ESAllocationTestCase { RoutingTable routingTable = routingTableBuilder.build(); - logger.info("start " + numberOfNodes + " nodes"); + logger.info("start {} nodes", numberOfNodes); DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(); for (int i = 0; i < numberOfNodes; i++) { nodes.put(newNode("node" + i)); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java index 8810fc473950..18f24504619b 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java @@ -221,18 +221,10 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { routingTable = strategy.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); - for (ShardRouting shard : clusterState.getRoutingNodes().shardsWithState(INITIALIZING)) { - logger.info(shard.toString()); - } - for (ShardRouting shard : clusterState.getRoutingNodes().shardsWithState(STARTED)) { - logger.info(shard.toString()); - } - for (ShardRouting shard : clusterState.getRoutingNodes().shardsWithState(RELOCATING)) { - logger.info(shard.toString()); - } - for (ShardRouting shard : clusterState.getRoutingNodes().shardsWithState(UNASSIGNED)) { - logger.info(shard.toString()); - } + logger.info("Initializing shards: {}", clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); + logger.info("Started shards: {}", clusterState.getRoutingNodes().shardsWithState(STARTED)); + logger.info("Relocating shards: {}", clusterState.getRoutingNodes().shardsWithState(RELOCATING)); + logger.info("Unassigned shards: {}", clusterState.getRoutingNodes().shardsWithState(UNASSIGNED)); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(5)); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/CatAllocationTestCase.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/CatAllocationTestCase.java index 1ba0c0632555..be4035101952 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/CatAllocationTestCase.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/CatAllocationTestCase.java @@ -147,12 +147,12 @@ public abstract class CatAllocationTestCase extends ESAllocationTestCase { if (initializing.isEmpty()) { break; } - logger.debug(initializing.toString()); + logger.debug("Initializing shards: {}", initializing); numRelocations += initializing.size(); routingTable = strategy.applyStartedShards(clusterState, initializing).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); } - logger.debug("--> num relocations to get balance: " + numRelocations); + logger.debug("--> num relocations to get balance: {}", numRelocations); return clusterState; } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index 813bee8f80eb..3ec8df5cea63 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -409,14 +409,16 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { String fromId = r.currentNodeId(); assertThat(fromId, notNullValue()); assertThat(toId, notNullValue()); - logger.trace("From: " + fromId + " with Version: " + routingNodes.node(fromId).node().version() + " to: " + toId + " with Version: " + routingNodes.node(toId).node().version()); + logger.trace("From: {} with Version: {} to: {} with Version: {}", fromId, routingNodes.node(fromId).node().version(), + toId, routingNodes.node(toId).node().version()); assertTrue(routingNodes.node(toId).node().version().onOrAfter(routingNodes.node(fromId).node().version())); } else { ShardRouting primary = routingNodes.activePrimary(r); assertThat(primary, notNullValue()); String fromId = primary.currentNodeId(); String toId = r.relocatingNodeId(); - logger.trace("From: " + fromId + " with Version: " + routingNodes.node(fromId).node().version() + " to: " + toId + " with Version: " + routingNodes.node(toId).node().version()); + logger.trace("From: {} with Version: {} to: {} with Version: {}", fromId, routingNodes.node(fromId).node().version(), + toId, routingNodes.node(toId).node().version()); assertTrue(routingNodes.node(toId).node().version().onOrAfter(routingNodes.node(fromId).node().version())); } } @@ -428,7 +430,8 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { assertThat(primary, notNullValue()); String fromId = primary.currentNodeId(); String toId = r.currentNodeId(); - logger.trace("From: " + fromId + " with Version: " + routingNodes.node(fromId).node().version() + " to: " + toId + " with Version: " + routingNodes.node(toId).node().version()); + logger.trace("From: {} with Version: {} to: {} with Version: {}", fromId, routingNodes.node(fromId).node().version(), + toId, routingNodes.node(toId).node().version()); assertTrue(routingNodes.node(toId).node().version().onOrAfter(routingNodes.node(fromId).node().version())); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java index 0830747a9dd2..e220c8eb0f67 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java @@ -212,7 +212,7 @@ public class ShardsLimitAllocationTests extends ESAllocationTestCase { assertThat(shardRouting.getIndexName(), equalTo("test1")); } - logger.info("update " + ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey() + " for test, see that things move"); + logger.info("update {} for test, see that things move", ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey()); metaData = MetaData.builder(metaData) .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 5) diff --git a/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java b/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java index 921c66f7acb4..3b88a3bdcfe9 100644 --- a/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java +++ b/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java @@ -55,7 +55,7 @@ public class CacheTests extends ESTestCase { public void setUp() throws Exception { super.setUp(); numberOfEntries = randomIntBetween(1000, 10000); - logger.debug("numberOfEntries: " + numberOfEntries); + logger.debug("numberOfEntries: {}", numberOfEntries); } // cache some entries, then randomly lookup keys that do not exist, then check the stats diff --git a/core/src/test/java/org/elasticsearch/consistencylevel/WriteConsistencyLevelIT.java b/core/src/test/java/org/elasticsearch/consistencylevel/WriteConsistencyLevelIT.java index b696c445f305..46c027cb91c6 100644 --- a/core/src/test/java/org/elasticsearch/consistencylevel/WriteConsistencyLevelIT.java +++ b/core/src/test/java/org/elasticsearch/consistencylevel/WriteConsistencyLevelIT.java @@ -40,7 +40,7 @@ public class WriteConsistencyLevelIT extends ESIntegTestCase { prepareCreate("test", 1, Settings.settingsBuilder().put("index.number_of_shards", 1).put("index.number_of_replicas", 2)).execute().actionGet(); ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForActiveShards(1).setWaitForYellowStatus().execute().actionGet(); - logger.info("Done Cluster Health, status " + clusterHealth.getStatus()); + logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); @@ -60,7 +60,7 @@ public class WriteConsistencyLevelIT extends ESIntegTestCase { allowNodes("test", 2); clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForActiveShards(2).setWaitForYellowStatus().execute().actionGet(); - logger.info("Done Cluster Health, status " + clusterHealth.getStatus()); + logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); @@ -82,7 +82,7 @@ public class WriteConsistencyLevelIT extends ESIntegTestCase { allowNodes("test", 3); clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForActiveShards(3).setWaitForGreenStatus().execute().actionGet(); - logger.info("Done Cluster Health, status " + clusterHealth.getStatus()); + logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index 3948a4bab903..29997aec8f64 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -208,7 +208,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { // Figure out what is the elected master node final String masterNode = internalCluster().getMasterName(); - logger.info("---> legit elected master node=" + masterNode); + logger.info("---> legit elected master node={}", masterNode); // Pick a node that isn't the elected master. Set nonMasters = new HashSet<>(nodes); @@ -496,7 +496,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { } int docsPerIndexer = randomInt(3); - logger.info("indexing " + docsPerIndexer + " docs per indexer before partition"); + logger.info("indexing {} docs per indexer before partition", docsPerIndexer); countDownLatchRef.set(new CountDownLatch(docsPerIndexer * indexers.size())); for (Semaphore semaphore : semaphores) { semaphore.release(docsPerIndexer); @@ -508,7 +508,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { disruptionScheme.startDisrupting(); docsPerIndexer = 1 + randomInt(5); - logger.info("indexing " + docsPerIndexer + " docs per indexer during partition"); + logger.info("indexing {} docs per indexer during partition", docsPerIndexer); countDownLatchRef.set(new CountDownLatch(docsPerIndexer * indexers.size())); Collections.shuffle(semaphores, random()); for (Semaphore semaphore : semaphores) { @@ -539,11 +539,11 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { } } finally { if (exceptedExceptions.size() > 0) { - StringBuilder sb = new StringBuilder("Indexing exceptions during disruption:"); + StringBuilder sb = new StringBuilder(); for (Exception e : exceptedExceptions) { sb.append("\n").append(e.getMessage()); } - logger.debug(sb.toString()); + logger.debug("Indexing exceptions during disruption: {}", sb); } logger.info("shutting down indexers"); stop.set(true); @@ -731,7 +731,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { IndexResponse indexResponse = internalCluster().client(notIsolatedNode).prepareIndex("test", "type").setSource("field", "value").get(); assertThat(indexResponse.getVersion(), equalTo(1L)); - logger.info("Verifying if document exists via node[" + notIsolatedNode + "]"); + logger.info("Verifying if document exists via node[{}]", notIsolatedNode); GetResponse getResponse = internalCluster().client(notIsolatedNode).prepareGet("test", "type", indexResponse.getId()) .setPreference("_local") .get(); @@ -745,7 +745,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { ensureGreen("test"); for (String node : nodes) { - logger.info("Verifying if document exists after isolating node[" + isolatedNode + "] via node[" + node + "]"); + logger.info("Verifying if document exists after isolating node[{}] via node[{}]", isolatedNode, node); getResponse = internalCluster().client(node).prepareGet("test", "type", indexResponse.getId()) .setPreference("_local") .get(); @@ -764,7 +764,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { List nodes = startCluster(4, -1, new int[]{0}); // Figure out what is the elected master node final String masterNode = internalCluster().getMasterName(); - logger.info("---> legit elected master node=" + masterNode); + logger.info("---> legit elected master node={}", masterNode); List otherNodes = new ArrayList<>(nodes); otherNodes.remove(masterNode); otherNodes.remove(nodes.get(0)); // <-- Don't isolate the node that is in the unicast endpoint for all the other nodes. diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java index 00c549ef2f11..dfd8ba51a541 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java @@ -225,7 +225,7 @@ public class MetaDataStateFormatTests extends ESTestCase { msg.append(" after: [").append(checksumAfterCorruption).append("]"); msg.append(" checksum value after corruption: ").append(actualChecksumAfterCorruption).append("]"); msg.append(" file: ").append(fileToCorrupt.getFileName().toString()).append(" length: ").append(dir.fileLength(fileToCorrupt.getFileName().toString())); - logger.debug(msg.toString()); + logger.debug("{}", msg.toString()); assumeTrue("Checksum collision - " + msg.toString(), checksumAfterCorruption != checksumBeforeCorruption // collision || actualChecksumAfterCorruption != checksumBeforeCorruption); // checksum corrupted diff --git a/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java index 399ef9badab2..a1d16bfd884f 100644 --- a/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java @@ -82,7 +82,7 @@ public class QuorumGatewayIT extends ESIntegTestCase { assertTrue(awaitBusy(() -> { logger.info("--> running cluster_health (wait for the shards to startup)"); ClusterHealthResponse clusterHealth = activeClient.admin().cluster().health(clusterHealthRequest().waitForYellowStatus().waitForNodes("2").waitForActiveShards(test.numPrimaries * 2)).actionGet(); - logger.info("--> done cluster_health, status " + clusterHealth.getStatus()); + logger.info("--> done cluster_health, status {}", clusterHealth.getStatus()); return (!clusterHealth.isTimedOut()) && clusterHealth.getStatus() == ClusterHealthStatus.YELLOW; }, 30, TimeUnit.SECONDS)); logger.info("--> one node is closed -- index 1 document into the remaining nodes"); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java index 49cb414208db..3d2b77246a8c 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java @@ -145,7 +145,7 @@ public class FilterFieldDataTests extends AbstractFieldDataTestCase { } writer.addDocument(d); } - logger.debug(hundred + " " + ten + " " + five); + logger.debug("{} {} {}", hundred, ten, five); writer.forceMerge(1, true); LeafReaderContext context = refreshReader(); String[] formats = new String[] { "paged_bytes"}; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index 191ce5d477eb..19d0317f492f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -272,7 +272,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); String mapping = mappingBuilder.endObject().endObject().bytes().toUtf8(); - logger.info(mapping); + logger.info("Mapping: {}", mapping); DocumentMapper docMapper = parser.parse("test", new CompressedXContent(mapping)); String builtMapping = docMapper.mappingSource().string(); // reparse it diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationIT.java index 6a82052bfa85..a1f6929fade5 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationIT.java @@ -112,7 +112,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase { assertThat(mappingMetaData, not(nullValue())); Map mappingSource = mappingMetaData.sourceAsMap(); Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource)); - logger.info("Keys: " + aField.keySet()); + logger.info("Keys: {}", aField.keySet()); assertThat(aField.size(), equalTo(2)); assertThat(aField.get("type").toString(), equalTo("geo_point")); assertThat(aField.get("fields"), notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java b/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java index 4853d59588b8..0c36a8566826 100644 --- a/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java +++ b/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java @@ -77,7 +77,7 @@ public class IndexLifecycleActionIT extends ESIntegTestCase { logger.info("Running Cluster Health"); ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet(); - logger.info("Done Cluster Health, status " + clusterHealth.getStatus()); + logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); @@ -92,7 +92,7 @@ public class IndexLifecycleActionIT extends ESIntegTestCase { // first wait for 2 nodes in the cluster logger.info("Running Cluster Health"); clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForGreenStatus().waitForNodes("2")).actionGet(); - logger.info("Done Cluster Health, status " + clusterHealth.getStatus()); + logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); final String node2 = getLocalNodeId(server_2); @@ -171,7 +171,7 @@ public class IndexLifecycleActionIT extends ESIntegTestCase { // verify health logger.info("Running Cluster Health"); clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForGreenStatus().waitForNodes("2")).actionGet(); - logger.info("Done Cluster Health, status " + clusterHealth.getStatus()); + logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java index e36f1bca49ba..646d9651436c 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java @@ -282,7 +282,7 @@ public class IndicesRequestCacheTests extends ESTestCase { assertEquals("foo", value1.toUtf8()); BytesReference value2 = cache.getOrCompute(secondEntity, secondReader, termQuery.buildAsBytes()); assertEquals("bar", value2.toUtf8()); - logger.info(requestCacheStats.stats().getMemorySize().toString()); + logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize()); BytesReference value3 = cache.getOrCompute(thirddEntity, thirdReader, termQuery.buildAsBytes()); assertEquals("baz", value3.toUtf8()); assertEquals(2, cache.count()); @@ -319,7 +319,7 @@ public class IndicesRequestCacheTests extends ESTestCase { assertEquals("foo", value1.toUtf8()); BytesReference value2 = cache.getOrCompute(secondEntity, secondReader, termQuery.buildAsBytes()); assertEquals("bar", value2.toUtf8()); - logger.info(requestCacheStats.stats().getMemorySize().toString()); + logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize()); BytesReference value3 = cache.getOrCompute(thirddEntity, thirdReader, termQuery.buildAsBytes()); assertEquals("baz", value3.toUtf8()); assertEquals(3, cache.count()); diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java index b257e3bcd5e4..1a2f7e4ba18f 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java @@ -48,7 +48,7 @@ public class UpdateNumberOfReplicasIT extends ESIntegTestCase { assertAcked(prepareCreate("test", 2)); logger.info("Running Cluster Health"); ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); - logger.info("Done Cluster Health, status " + clusterHealth.getStatus()); + logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); NumShards numShards = getNumShards("test"); @@ -75,7 +75,7 @@ public class UpdateNumberOfReplicasIT extends ESIntegTestCase { assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put("index.number_of_replicas", 2)).execute().actionGet()); logger.info("Running Cluster Health"); clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().setWaitForActiveShards(numShards.numPrimaries * 2).execute().actionGet(); - logger.info("Done Cluster Health, status " + clusterHealth.getStatus()); + logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); assertThat(clusterHealth.getIndices().get("test").getActivePrimaryShards(), equalTo(numShards.numPrimaries)); @@ -88,7 +88,7 @@ public class UpdateNumberOfReplicasIT extends ESIntegTestCase { logger.info("Running Cluster Health"); clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForRelocatingShards(0).setWaitForNodes(">=3").execute().actionGet(); - logger.info("Done Cluster Health, status " + clusterHealth.getStatus()); + logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); assertThat(clusterHealth.getIndices().get("test").getActivePrimaryShards(), equalTo(numShards.numPrimaries)); @@ -106,7 +106,7 @@ public class UpdateNumberOfReplicasIT extends ESIntegTestCase { logger.info("Running Cluster Health"); clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForRelocatingShards(0).setWaitForNodes(">=3").execute().actionGet(); - logger.info("Done Cluster Health, status " + clusterHealth.getStatus()); + logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); assertThat(clusterHealth.getIndices().get("test").getActivePrimaryShards(), equalTo(numShards.numPrimaries)); @@ -128,7 +128,7 @@ public class UpdateNumberOfReplicasIT extends ESIntegTestCase { logger.info("--> running cluster health"); ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForActiveShards(numShards.numPrimaries * 2).execute().actionGet(); - logger.info("--> done cluster health, status " + clusterHealth.getStatus()); + logger.info("--> done cluster health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); assertThat(clusterHealth.getIndices().get("test").getActivePrimaryShards(), equalTo(numShards.numPrimaries)); @@ -140,7 +140,7 @@ public class UpdateNumberOfReplicasIT extends ESIntegTestCase { logger.info("--> running cluster health"); clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForActiveShards(numShards.numPrimaries * 3).setWaitForNodes(">=3").execute().actionGet(); - logger.info("--> done cluster health, status " + clusterHealth.getStatus()); + logger.info("--> done cluster health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); assertThat(clusterHealth.getIndices().get("test").getActivePrimaryShards(), equalTo(numShards.numPrimaries)); @@ -153,7 +153,7 @@ public class UpdateNumberOfReplicasIT extends ESIntegTestCase { logger.info("--> running cluster health"); clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForActiveShards(numShards.numPrimaries * 2).setWaitForNodes(">=2").execute().actionGet(); - logger.info("--> done cluster health, status " + clusterHealth.getStatus()); + logger.info("--> done cluster health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); assertThat(clusterHealth.getIndices().get("test").getActivePrimaryShards(), equalTo(numShards.numPrimaries)); @@ -166,7 +166,7 @@ public class UpdateNumberOfReplicasIT extends ESIntegTestCase { logger.info("--> running cluster health"); clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes(">=1").setWaitForActiveShards(numShards.numPrimaries).execute().actionGet(); - logger.info("--> done cluster health, status " + clusterHealth.getStatus()); + logger.info("--> done cluster health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); assertThat(clusterHealth.getIndices().get("test").getActivePrimaryShards(), equalTo(numShards.numPrimaries)); @@ -183,7 +183,7 @@ public class UpdateNumberOfReplicasIT extends ESIntegTestCase { logger.info("--> running cluster health"); ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForActiveShards(numShards.numPrimaries * 2).execute().actionGet(); - logger.info("--> done cluster health, status " + clusterHealth.getStatus()); + logger.info("--> done cluster health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); assertThat(clusterHealth.getIndices().get("test").getActivePrimaryShards(), equalTo(numShards.numPrimaries)); @@ -195,7 +195,7 @@ public class UpdateNumberOfReplicasIT extends ESIntegTestCase { logger.info("--> running cluster health"); clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForActiveShards(numShards.numPrimaries * 3).execute().actionGet(); - logger.info("--> done cluster health, status " + clusterHealth.getStatus()); + logger.info("--> done cluster health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); assertThat(clusterHealth.getIndices().get("test").getActivePrimaryShards(), equalTo(numShards.numPrimaries)); @@ -208,7 +208,7 @@ public class UpdateNumberOfReplicasIT extends ESIntegTestCase { logger.info("--> running cluster health"); clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes(">=2").setWaitForActiveShards(numShards.numPrimaries * 2).execute().actionGet(); - logger.info("--> done cluster health, status " + clusterHealth.getStatus()); + logger.info("--> done cluster health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); assertThat(clusterHealth.getIndices().get("test").getActivePrimaryShards(), equalTo(numShards.numPrimaries)); @@ -221,7 +221,7 @@ public class UpdateNumberOfReplicasIT extends ESIntegTestCase { logger.info("--> running cluster health"); clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().setWaitForNodes(">=1").setWaitForActiveShards(numShards.numPrimaries).execute().actionGet(); - logger.info("--> done cluster health, status " + clusterHealth.getStatus()); + logger.info("--> done cluster health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); assertThat(clusterHealth.getIndices().get("test").getActivePrimaryShards(), equalTo(numShards.numPrimaries)); @@ -237,7 +237,7 @@ public class UpdateNumberOfReplicasIT extends ESIntegTestCase { logger.info("--> running cluster health"); ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForActiveShards(numShards.numPrimaries * 3).execute().actionGet(); - logger.info("--> done cluster health, status " + clusterHealth.getStatus()); + logger.info("--> done cluster health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); assertThat(clusterHealth.getIndices().get("test").getActivePrimaryShards(), equalTo(numShards.numPrimaries)); @@ -253,7 +253,7 @@ public class UpdateNumberOfReplicasIT extends ESIntegTestCase { logger.info("--> running cluster health"); clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForActiveShards(numShards.numPrimaries * 4).execute().actionGet(); - logger.info("--> done cluster health, status " + clusterHealth.getStatus()); + logger.info("--> done cluster health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); assertThat(clusterHealth.getIndices().get("test").getActivePrimaryShards(), equalTo(numShards.numPrimaries)); diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index b1f94f203e48..26e2b7702c82 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -258,7 +258,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { assertThat(waitForShardDeletion(node_3, "test", 0), equalTo(false)); Path server2Shard = shardDirectory(node_2, "test", 0); - logger.info("--> stopping node " + node_2); + logger.info("--> stopping node {}", node_2); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(node_2)); logger.info("--> running cluster_health"); @@ -268,7 +268,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { .setWaitForRelocatingShards(0) .get(); assertThat(clusterHealth.isTimedOut(), equalTo(false)); - logger.info("--> done cluster_health, status " + clusterHealth.getStatus()); + logger.info("--> done cluster_health, status {}", clusterHealth.getStatus()); assertThat(Files.exists(server2Shard), equalTo(true)); diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index cce687fcec36..d14a411c332c 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -131,7 +131,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { .addField("field1").addField("field2") .execute().actionGet(); if (searchResponse.getFailedShards() > 0) { - logger.warn("failed search " + Arrays.toString(searchResponse.getShardFailures())); + logger.warn("failed search {}", Arrays.toString(searchResponse.getShardFailures())); } assertHitCount(searchResponse, 1); assertThat(searchResponse.getHits().getAt(0).field("field1").value().toString(), equalTo("value1")); diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java index e3777e84f9ab..a4632079b35e 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java @@ -47,11 +47,11 @@ public class SimpleNodesInfoIT extends ESIntegTestCase { final String node_2 = nodesIds.get(1); ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth().setWaitForGreenStatus().setWaitForNodes("2").get(); - logger.info("--> done cluster_health, status " + clusterHealth.getStatus()); + logger.info("--> done cluster_health, status {}", clusterHealth.getStatus()); String server1NodeId = internalCluster().getInstance(ClusterService.class, node_1).state().nodes().localNodeId(); String server2NodeId = internalCluster().getInstance(ClusterService.class, node_2).state().nodes().localNodeId(); - logger.info("--> started nodes: " + server1NodeId + " and " + server2NodeId); + logger.info("--> started nodes: {} and {}", server1NodeId, server2NodeId); NodesInfoResponse response = client().admin().cluster().prepareNodesInfo().execute().actionGet(); assertThat(response.getNodes().length, is(2)); @@ -91,11 +91,11 @@ public class SimpleNodesInfoIT extends ESIntegTestCase { final String node_2 = nodesIds.get(1); ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth().setWaitForGreenStatus().setWaitForNodes("2").get(); - logger.info("--> done cluster_health, status " + clusterHealth.getStatus()); + logger.info("--> done cluster_health, status {}", clusterHealth.getStatus()); String server1NodeId = internalCluster().getInstance(ClusterService.class, node_1).state().nodes().localNodeId(); String server2NodeId = internalCluster().getInstance(ClusterService.class, node_2).state().nodes().localNodeId(); - logger.info("--> started nodes: " + server1NodeId + " and " + server2NodeId); + logger.info("--> started nodes: {} and {}", server1NodeId, server2NodeId); NodesInfoResponse response = client().admin().cluster().prepareNodesInfo().execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java index 01494aab72d8..5945a21dc977 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java @@ -1072,7 +1072,7 @@ public class PercolatorIT extends ESIntegTestCase { int numLevels = randomIntBetween(1, 25); long numQueriesPerLevel = randomIntBetween(10, 250); long totalQueries = numLevels * numQueriesPerLevel; - logger.info("--> register " + totalQueries + " queries"); + logger.info("--> register {} queries", totalQueries); for (int level = 1; level <= numLevels; level++) { for (int query = 1; query <= numQueriesPerLevel; query++) { client().prepareIndex("my-index", PercolatorService.TYPE_NAME, level + "-" + query) @@ -1166,7 +1166,7 @@ public class PercolatorIT extends ESIntegTestCase { Map> controlMap = new HashMap<>(); long numQueries = randomIntBetween(100, 250); - logger.info("--> register " + numQueries + " queries"); + logger.info("--> register {} queries", numQueries); for (int i = 0; i < numQueries; i++) { int value = randomInt(10); client().prepareIndex("my-index", PercolatorService.TYPE_NAME, Integer.toString(i)) diff --git a/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java index a08eb41236d9..f76a117ddb0d 100644 --- a/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java @@ -131,7 +131,7 @@ public class RecoveryPercolatorIT extends ESIntegTestCase { logger.info("Running Cluster Health (wait for the shards to startup)"); ClusterHealthResponse clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForYellowStatus().waitForActiveShards(1)).actionGet(); - logger.info("Done Cluster Health, status " + clusterHealth.getStatus()); + logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); SearchResponse countResponse = client().prepareSearch().setSize(0).setTypes(PercolatorService.TYPE_NAME).setQuery(matchAllQuery()).get(); assertHitCount(countResponse, 1L); @@ -140,7 +140,7 @@ public class RecoveryPercolatorIT extends ESIntegTestCase { assertThat(actionGet.isAcknowledged(), equalTo(true)); assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=text").addMapping(PercolatorService.TYPE_NAME, "color", "type=text")); clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForYellowStatus().waitForActiveShards(1)).actionGet(); - logger.info("Done Cluster Health, status " + clusterHealth.getStatus()); + logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(client().prepareSearch().setSize(0).setTypes(PercolatorService.TYPE_NAME).setQuery(matchAllQuery()).get().getHits().totalHits(), equalTo(0L)); diff --git a/core/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java b/core/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java index 16f276550559..b441dd32c780 100644 --- a/core/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java @@ -123,7 +123,7 @@ public class TruncatedRecoveryIT extends ESIntegTestCase { public void sendRequest(DiscoveryNode node, long requestId, String action, TransportRequest request, TransportRequestOptions options) throws IOException, TransportException { if (action.equals(RecoveryTargetService.Actions.FILE_CHUNK)) { RecoveryFileChunkRequest req = (RecoveryFileChunkRequest) request; - logger.debug("file chunk [" + req.toString() + "] lastChunk: " + req.lastChunk()); + logger.debug("file chunk [{}] lastChunk: {}", req, req.lastChunk()); if ((req.name().endsWith("cfs") || req.name().endsWith("fdt")) && req.lastChunk() && truncate.get()) { latch.countDown(); throw new RuntimeException("Caused some truncated files for fun and profit"); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java index 74c308c0fcd8..4e4b54d91f69 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java @@ -185,7 +185,7 @@ children("to_comment", "comment") assertThat(categoryTerms.getBuckets().size(), equalTo(3)); for (Terms.Bucket bucket : categoryTerms.getBuckets()) { - logger.info("bucket=" + bucket.getKey()); + logger.info("bucket={}", bucket.getKey()); Children childrenBucket = bucket.getAggregations().get("to_comment"); TopHits topHits = childrenBucket.getAggregations().get("top_comments"); logger.info("total_hits={}", topHits.getHits().getTotalHits()); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java index 044ca4f80458..11d838d43c45 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java @@ -84,7 +84,7 @@ public class NestedIT extends ESIntegTestCase { numParents = randomIntBetween(3, 10); numChildren = new int[numParents]; aggCollectionMode = randomFrom(SubAggCollectionMode.values()); - logger.info("AGG COLLECTION MODE: " + aggCollectionMode); + logger.info("AGG COLLECTION MODE: {}", aggCollectionMode); int totalChildren = 0; for (int i = 0; i < numParents; ++i) { if (i == numParents - 1 && totalChildren == 0) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index 8304922aa62e..14f2912d19fa 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -433,7 +433,7 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(hits.totalHits(), equalTo(controlHits.totalHits())); assertThat(hits.getHits().length, equalTo(controlHits.getHits().length)); for (int i = 0; i < hits.getHits().length; i++) { - logger.info(i + ": top_hits: [" + hits.getAt(i).id() + "][" + hits.getAt(i).sortValues()[0] + "] control: [" + controlHits.getAt(i).id() + "][" + controlHits.getAt(i).sortValues()[0] + "]"); + logger.info("{}: top_hits: [{}][{}] control: [{}][{}]", i, hits.getAt(i).id(), hits.getAt(i).sortValues()[0], controlHits.getAt(i).id(), controlHits.getAt(i).sortValues()[0]); assertThat(hits.getAt(i).id(), equalTo(controlHits.getAt(i).id())); assertThat(hits.getAt(i).sortValues()[0], equalTo(controlHits.getAt(i).sortValues()[0])); } @@ -609,7 +609,7 @@ public class TopHitsIT extends ESIntegTestCase { public void testTrackScores() throws Exception { boolean[] trackScores = new boolean[]{true, false}; for (boolean trackScore : trackScores) { - logger.info("Track score=" + trackScore); + logger.info("Track score={}", trackScore); SearchResponse response = client().prepareSearch("idx").setTypes("field-collapsing") .setQuery(matchQuery("text", "term rare")) .addAggregation(terms("terms") diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java index beedd72c2801..3168fdc0ff11 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java @@ -142,7 +142,7 @@ public class SearchWhileRelocatingIT extends ESIntegTestCase { } assertThat("numberOfReplicas: " + numberOfReplicas + " failed in iteration " + i + ", verification: " + verified, thrownExceptions, Matchers.emptyIterable()); // if we hit only non-critical exceptions we only make sure that the post search works - logger.info("Non-CriticalExceptions: " + nonCriticalExceptions.toString()); + logger.info("Non-CriticalExceptions: {}", nonCriticalExceptions); assertThat("numberOfReplicas: " + numberOfReplicas + " failed in iteration " + i + ", verification: " + verified, postSearchOK, is(true)); } } diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java index 3cd1d269275a..bd1d6ed97955 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java @@ -90,7 +90,7 @@ public class TransportSearchFailuresIT extends ESIntegTestCase { .cluster() .health(clusterHealthRequest("test").waitForYellowStatus().waitForRelocatingShards(0) .waitForActiveShards(test.totalNumShards)).actionGet(); - logger.info("Done Cluster Health, status " + clusterHealth.getStatus()); + logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), anyOf(equalTo(ClusterHealthStatus.YELLOW), equalTo(ClusterHealthStatus.GREEN))); assertThat(clusterHealth.getActiveShards(), equalTo(test.totalNumShards)); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java index e41e3c178c58..d124fcf63869 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java @@ -299,7 +299,7 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase { // Create a random geometry collection. GeometryCollectionBuilder gcb = RandomShapeGenerator.createGeometryCollection(getRandom()); - logger.info("Created Random GeometryCollection containing " + gcb.numShapes() + " shapes"); + logger.info("Created Random GeometryCollection containing {} shapes", gcb.numShapes()); client().admin().indices().prepareCreate("test").addMapping("type", "location", "type=geo_shape,tree=quadtree") .execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java index f34d5b33c9dc..dbe2714d05d7 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java @@ -2044,7 +2044,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { .query(multiMatchQueryBuilder) .highlighter(highlight().highlightQuery(randomBoolean() ? multiMatchQueryBuilder : null).highlighterType(highlighterType) .field(new Field("field1").requireFieldMatch(true).preTags("").postTags(""))); - logger.info("Running multi-match type: [" + matchQueryType + "] highlight with type: [" + highlighterType + "]"); + logger.info("Running multi-match type: [{}] highlight with type: [{}]", matchQueryType, highlighterType); SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); assertHitCount(searchResponse, 1L); assertHighlight(searchResponse, 0, "field1", 0, anyOf(equalTo("The quick brown fox jumps over"), diff --git a/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java b/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java index b65380a545c7..651982106c59 100644 --- a/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java +++ b/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java @@ -419,7 +419,7 @@ public class MoreLikeThisIT extends ESIntegTestCase { .minTermFreq(1) .minDocFreq(1) .minimumShouldMatch(minimumShouldMatch); - logger.info("Testing with minimum_should_match = " + minimumShouldMatch); + logger.info("Testing with minimum_should_match = {}", minimumShouldMatch); SearchResponse response = client().prepareSearch("test").setTypes("type1") .setQuery(mltQuery).get(); assertSearchResponse(response); diff --git a/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java b/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java index f09b18bdb8a4..f55075045865 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java +++ b/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java @@ -72,7 +72,7 @@ public class QueryProfilerIT extends ESIntegTestCase { int iters = between(20, 100); for (int i = 0; i < iters; i++) { QueryBuilder q = randomQueryBuilder(stringFields, numericFields, numDocs, 3); - logger.info(q.toString()); + logger.info("Query: {}", q); SearchResponse resp = client().prepareSearch() .setQuery(q) @@ -126,8 +126,7 @@ public class QueryProfilerIT extends ESIntegTestCase { int iters = between(1, 10); for (int i = 0; i < iters; i++) { QueryBuilder q = randomQueryBuilder(stringFields, numericFields, numDocs, 3); - logger.info(q.toString()); - + logger.info("Query: {}", q); SearchRequestBuilder vanilla = client().prepareSearch("test") .setQuery(q) @@ -309,7 +308,7 @@ public class QueryProfilerIT extends ESIntegTestCase { refresh(); QueryBuilder q = QueryBuilders.boolQuery(); - logger.info(q.toString()); + logger.info("Query: {}", q); SearchResponse resp = client().prepareSearch() .setQuery(q) @@ -360,8 +359,7 @@ public class QueryProfilerIT extends ESIntegTestCase { QueryBuilder q = QueryBuilders.boolQuery().must(QueryBuilders.boolQuery().must(QueryBuilders.boolQuery().must(QueryBuilders.matchQuery("field1", "one")))); - - logger.info(q.toString()); + logger.info("Query: {}", q); SearchResponse resp = client().prepareSearch() .setQuery(q) @@ -408,7 +406,7 @@ public class QueryProfilerIT extends ESIntegTestCase { QueryBuilder q = QueryBuilders.boostingQuery(QueryBuilders.matchQuery("field1", "one"), QueryBuilders.matchQuery("field1", "two")) .boost(randomFloat()) .negativeBoost(randomFloat()); - logger.info(q.toString()); + logger.info("Query: {}", q); SearchResponse resp = client().prepareSearch() .setQuery(q) @@ -455,7 +453,7 @@ public class QueryProfilerIT extends ESIntegTestCase { QueryBuilder q = QueryBuilders.disMaxQuery() .boost(0.33703882f) .add(QueryBuilders.rangeQuery("field2").from(null).to(73).includeLower(true).includeUpper(true)); - logger.info(q.toString()); + logger.info("Query: {}", q); SearchResponse resp = client().prepareSearch() .setQuery(q) @@ -501,7 +499,7 @@ public class QueryProfilerIT extends ESIntegTestCase { QueryBuilder q = QueryBuilders.rangeQuery("field2").from(0).to(5); - logger.info(q.toString()); + logger.info("Query: {}", q.toString()); SearchResponse resp = client().prepareSearch() .setQuery(q) @@ -547,7 +545,7 @@ public class QueryProfilerIT extends ESIntegTestCase { QueryBuilder q = QueryBuilders.matchPhraseQuery("field1", "one two"); - logger.info(q.toString()); + logger.info("Query: {}", q); SearchResponse resp = client().prepareSearch() .setQuery(q) @@ -559,7 +557,7 @@ public class QueryProfilerIT extends ESIntegTestCase { if (resp.getShardFailures().length > 0) { for (ShardSearchFailure f : resp.getShardFailures()) { - logger.error(f.toString()); + logger.error("Shard search failure: {}", f); } fail(); } @@ -603,7 +601,7 @@ public class QueryProfilerIT extends ESIntegTestCase { refresh(); QueryBuilder q = QueryBuilders.rangeQuery("field2").from(0).to(5); - logger.info(q.toString()); + logger.info("Query: {}", q); SearchResponse resp = client().prepareSearch().setQuery(q).setProfile(false).execute().actionGet(); assertThat("Profile response element should be an empty map", resp.getProfileResults().size(), equalTo(0)); diff --git a/core/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java b/core/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java index 292f9a495dc4..6c0b99639401 100644 --- a/core/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java +++ b/core/src/test/java/org/elasticsearch/search/scroll/DuelScrollIT.java @@ -282,8 +282,8 @@ public class DuelScrollIT extends ESIntegTestCase { } assertEquals(control.getHits().getTotalHits(), scrollDocs); } catch (AssertionError e) { - logger.info("Control:\n" + control); - logger.info("Scroll size=" + size + ", from=" + scrollDocs + ":\n" + scroll); + logger.info("Control:\n{}", control); + logger.info("Scroll size={}, from={}:\n{}", size, scrollDocs, scroll); throw e; } finally { clearScroll(scroll.getScrollId()); diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java index dc06c43cb85a..309c4bcdaf23 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java @@ -78,8 +78,8 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { GeoPoint[] d2Points = {new GeoPoint(5, 1), new GeoPoint(6, 2)}; createShuffeldJSONArray(d2Builder, d2Points); - logger.info(d1Builder.string()); - logger.info(d2Builder.string()); + logger.info("d1: {}", d1Builder); + logger.info("d2: {}", d2Builder); indexRandom(true, client().prepareIndex("index", "type", "d1").setSource(d1Builder), client().prepareIndex("index", "type", "d2").setSource(d2Builder)); diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 4f47a2a3126b..51924244f5d8 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -761,7 +761,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas Client client = client(); Path repo = randomRepoPath(); - logger.info("--> creating repository at " + repo.toAbsolutePath()); + logger.info("--> creating repository at {}", repo.toAbsolutePath()); assertAcked(client.admin().cluster().preparePutRepository("test-repo") .setType("fs").setSettings(Settings.settingsBuilder() .put("location", repo) @@ -817,7 +817,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas Client client = client(); Path repo = randomRepoPath(); - logger.info("--> creating repository at " + repo.toAbsolutePath()); + logger.info("--> creating repository at {}", repo.toAbsolutePath()); assertAcked(client.admin().cluster().preparePutRepository("test-repo") .setType("fs").setSettings(Settings.settingsBuilder() .put("location", repo) @@ -855,7 +855,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas Client client = client(); Path repo = randomRepoPath(); - logger.info("--> creating repository at " + repo.toAbsolutePath()); + logger.info("--> creating repository at {}", repo.toAbsolutePath()); assertAcked(client.admin().cluster().preparePutRepository("test-repo") .setType("fs").setSettings(Settings.settingsBuilder() .put("location", repo) @@ -889,7 +889,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas Client client = client(); Path repo = randomRepoPath(); - logger.info("--> creating repository at " + repo.toAbsolutePath()); + logger.info("--> creating repository at {}", repo.toAbsolutePath()); assertAcked(client.admin().cluster().preparePutRepository("test-repo") .setType("fs").setSettings(Settings.settingsBuilder() .put("location", repo) @@ -2159,7 +2159,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas public void testListCorruptedSnapshot() throws Exception { Client client = client(); Path repo = randomRepoPath(); - logger.info("--> creating repository at " + repo.toAbsolutePath()); + logger.info("--> creating repository at {}", repo.toAbsolutePath()); assertAcked(client.admin().cluster().preparePutRepository("test-repo") .setType("fs").setSettings(Settings.settingsBuilder() .put("location", repo) diff --git a/core/src/test/java/org/elasticsearch/snapshots/SnapshotBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/snapshots/SnapshotBackwardsCompatibilityIT.java index eb069d4721c9..b3f466cdcc82 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SnapshotBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SnapshotBackwardsCompatibilityIT.java @@ -215,7 +215,7 @@ public class SnapshotBackwardsCompatibilityIT extends ESBackcompatTestCase { logger.info("--> move from 0 to 1 replica"); client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)).get(); } - logger.debug("---> repo exists: " + Files.exists(tempDir.resolve("indices/test/0")) + " files: " + Arrays.toString(FileSystemUtils.files(tempDir.resolve("indices/test/0")))); // it's only one shard! + logger.debug("---> repo exists: {} files: {}", Files.exists(tempDir.resolve("indices/test/0")), Arrays.toString(FileSystemUtils.files(tempDir.resolve("indices/test/0")))); // it's only one shard! CreateSnapshotResponse createSnapshotResponseSecond = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-1").setWaitForCompletion(true).setIndices("test").get(); assertThat(createSnapshotResponseSecond.getSnapshotInfo().successfulShards(), greaterThan(0)); assertThat(createSnapshotResponseSecond.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponseSecond.getSnapshotInfo().totalShards())); diff --git a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java index e92a28db86bb..7a6b327ff899 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java +++ b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java @@ -120,7 +120,7 @@ public class MockRepository extends FsRepository { blockOnInitialization = repositorySettings.settings().getAsBoolean("block_on_init", false); randomPrefix = repositorySettings.settings().get("random", "default"); waitAfterUnblock = repositorySettings.settings().getAsLong("wait_after_unblock", 0L); - logger.info("starting mock repository with random prefix " + randomPrefix); + logger.info("starting mock repository with random prefix {}", randomPrefix); mockBlobStore = new MockBlobStore(super.blobStore()); } diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java index ef408d167847..d9466d284245 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java @@ -157,7 +157,7 @@ public class NettyTransportIT extends ESIntegTestCase { try { transportChannel.sendResponse(e); } catch (IOException e1) { - logger.warn("Failed to send error message back to client for action [" + action + "]", e); + logger.warn("Failed to send error message back to client for action [{}]", e, action); logger.warn("Actual Exception", e1); } } @@ -194,7 +194,7 @@ public class NettyTransportIT extends ESIntegTestCase { try { transportChannel.sendResponse(e); } catch (Throwable e1) { - logger.warn("Failed to send error message back to client for action [" + reg.getAction() + "]", e1); + logger.warn("Failed to send error message back to client for action [{}]", e1, reg.getAction()); logger.warn("Actual Exception", e); } } } diff --git a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java index 4d9e7a4b57bd..60a8a0c13388 100644 --- a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java +++ b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java @@ -316,7 +316,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri }); } catch (Throwable e) { if (logger.isTraceEnabled()) { - logger.trace("failed to run " + compiledScript, e); + logger.trace("failed to run {}", e, compiledScript); } throw new ScriptException("failed to run " + compiledScript, e); } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java index 2fb0f9f6327b..6f83746d4ce3 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java @@ -332,7 +332,7 @@ public class EquivalenceTests extends ESIntegTestCase { createIndex("idx"); final int numDocs = scaledRandomIntBetween(2500, 5000); - logger.info("Indexing [" + numDocs +"] docs"); + logger.info("Indexing [{}] docs", numDocs); List indexingRequests = new ArrayList<>(); for (int i = 0; i < numDocs; ++i) { indexingRequests.add(client().prepareIndex("idx", "type", Integer.toString(i)).setSource("double_value", randomDouble())); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java index 8346f3157f94..4642d4662c93 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java @@ -543,7 +543,7 @@ public class ExtendedStatsTests extends AbstractNumericTestCase { ShardSearchFailure[] failures = response.getShardFailures(); if (failures.length != expectedFailures) { for (ShardSearchFailure failure : failures) { - logger.error("Shard Failure: {}", failure); + logger.error("Shard Failure: {}", failure.getCause(), failure); } fail("Unexpected shard failures!"); } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchStatsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchStatsTests.java index 72abe487d890..7838bb58f8e7 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchStatsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchStatsTests.java @@ -116,7 +116,7 @@ public class SearchStatsTests extends ESIntegTestCase { } IndicesStatsResponse indicesStats = client().admin().indices().prepareStats().execute().actionGet(); - logger.debug("###### indices search stats: " + indicesStats.getTotal().getSearch()); + logger.debug("###### indices search stats: {}", indicesStats.getTotal().getSearch()); assertThat(indicesStats.getTotal().getSearch().getTotal().getQueryCount(), greaterThan(0L)); assertThat(indicesStats.getTotal().getSearch().getTotal().getQueryTimeInMillis(), greaterThan(0L)); assertThat(indicesStats.getTotal().getSearch().getTotal().getFetchCount(), greaterThan(0L)); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StatsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StatsTests.java index 954d4353f5dc..b06d3395b2b5 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StatsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StatsTests.java @@ -404,7 +404,7 @@ public class StatsTests extends AbstractNumericTestCase { ShardSearchFailure[] failures = response.getShardFailures(); if (failures.length != expectedFailures) { for (ShardSearchFailure failure : failures) { - logger.error("Shard Failure: {}", failure); + logger.error("Shard Failure: {}", failure.getCause(), failure); } fail("Unexpected shard failures!"); } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StringTermsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StringTermsTests.java index edf8be49ddc2..346d19d4ce5d 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StringTermsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StringTermsTests.java @@ -243,7 +243,7 @@ public class StringTermsTests extends AbstractTermsTestCase { ExecutionMode[] executionModes = new ExecutionMode[] { null, ExecutionMode.GLOBAL_ORDINALS, ExecutionMode.GLOBAL_ORDINALS_HASH, ExecutionMode.GLOBAL_ORDINALS_LOW_CARDINALITY }; for (ExecutionMode executionMode : executionModes) { - logger.info("Execution mode:" + executionMode); + logger.info("Execution mode: {}", executionMode); SearchResponse response = client() .prepareSearch("idx") .setTypes("type") diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java index 766c5bff9c49..647a727b2dd6 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java @@ -191,7 +191,7 @@ public final class MustacheScriptEngineService extends AbstractComponent impleme } }); } catch (Exception e) { - logger.error("Error running " + template, e); + logger.error("Error running {}", e, template); throw new ScriptException("Error running " + template, e); } return result.bytes(); diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsSigner.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsSigner.java index 36eae9b58292..a76a2b04a91e 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsSigner.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsSigner.java @@ -55,7 +55,7 @@ public class AwsSigner { try { validateSignerType(signer); } catch (IllegalArgumentException e) { - logger.warn(e.getMessage()); + logger.warn("{}", e.getMessage()); } configuration.setSignerOverride(signer); diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsSigner.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsSigner.java index c94491696c04..5c02671e5e9f 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsSigner.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsSigner.java @@ -64,7 +64,7 @@ public class AwsSigner { try { validateSignerType(signer, endpoint); } catch (IllegalArgumentException e) { - logger.warn(e.getMessage()); + logger.warn("{}", e.getMessage()); } configuration.setSignerOverride(signer); diff --git a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java index 15b72c4cccd2..916adc142c86 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java @@ -88,11 +88,11 @@ public final class CorruptionUtils { // we need to add assumptions here that the checksums actually really don't match there is a small chance to get collisions // in the checksum which is ok though.... StringBuilder msg = new StringBuilder(); - msg.append("Checksum before: [").append(checksumBeforeCorruption).append("]"); - msg.append(" after: [").append(checksumAfterCorruption).append("]"); - msg.append(" checksum value after corruption: ").append(actualChecksumAfterCorruption).append("]"); - msg.append(" file: ").append(fileToCorrupt.getFileName()).append(" length: ").append(dir.fileLength(fileToCorrupt.getFileName().toString())); - logger.info(msg.toString()); + msg.append("before: [").append(checksumBeforeCorruption).append("] "); + msg.append("after: [").append(checksumAfterCorruption).append("] "); + msg.append("checksum value after corruption: ").append(actualChecksumAfterCorruption).append("] "); + msg.append("file: ").append(fileToCorrupt.getFileName()).append(" length: ").append(dir.fileLength(fileToCorrupt.getFileName().toString())); + logger.info("Checksum {}", msg); assumeTrue("Checksum collision - " + msg.toString(), checksumAfterCorruption != checksumBeforeCorruption // collision || actualChecksumAfterCorruption != checksumBeforeCorruption); // checksum corrupted diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 0098c4ce9c8f..90a74eadf9e1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -883,7 +883,7 @@ public abstract class ESIntegTestCase extends ESTestCase { sb.append("\n-> _index: [").append(hit.getIndex()).append("] type [").append(hit.getType()) .append("] id [").append(hit.id()).append("]"); } - logger.warn(sb.toString()); + logger.warn("{}", sb); fail(failMsg); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java index e55977135702..e798fd8c8abe 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java @@ -151,8 +151,7 @@ public class RestClient implements Closeable { HttpRequestBuilder httpRequestBuilder = callApiBuilder(apiName, requestParams, body); for (Map.Entry header : headers.entrySet()) { - logger.error("Adding header " + header.getKey()); - logger.error(" with value " + header.getValue()); + logger.error("Adding header {}\n with value {}", header.getKey(), header.getValue()); httpRequestBuilder.addHeader(header.getKey(), header.getValue()); } logger.debug("calling api [{}]", apiName); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpResponse.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpResponse.java index 9945edbefa9d..37fc163ac61d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpResponse.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpResponse.java @@ -61,7 +61,7 @@ public class HttpResponse { try { httpResponse.close(); } catch (IOException e) { - logger.error(e.getMessage(), e); + logger.error("Failed closing response", e); } } } else { From f465d98eb3af2a6e7ee7bc7c851b50385fc23672 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 9 Mar 2016 21:52:38 -0500 Subject: [PATCH 182/320] Add raw recovery progress to cat recovery API This commit adds fields bytes_recovered and files_recovered to the cat recovery API. These fields, respectively, indicate the total number of bytes and files recovered. Additionally, for consistency, some totals fields and translog recovery fields have been renamed. Closes #17064 --- .../rest/action/cat/RestRecoveryAction.java | 22 ++- .../action/cat/RestRecoveryActionTests.java | 187 ++++++++++++++++++ docs/reference/migration/migrate_5_0.asciidoc | 15 ++ .../test/cat.recovery/10_basic.yaml | 12 +- 4 files changed, 222 insertions(+), 14 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java index 759fac2eb194..7c555c9b3577 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java @@ -92,14 +92,16 @@ public class RestRecoveryAction extends AbstractCatAction { .addCell("repository", "alias:rep;desc:repository") .addCell("snapshot", "alias:snap;desc:snapshot") .addCell("files", "alias:f;desc:number of files to recover") + .addCell("files_recovered", "alias:fr;desc:files recovered") .addCell("files_percent", "alias:fp;desc:percent of files recovered") - .addCell("bytes", "alias:b;desc:size to recover in bytes") + .addCell("files_total", "alias:tf;desc:total number of files") + .addCell("bytes", "alias:b;desc:number of bytes to recover") + .addCell("bytes_recovered", "alias:br;desc:bytes recovered") .addCell("bytes_percent", "alias:bp;desc:percent of bytes recovered") - .addCell("total_files", "alias:tf;desc:total number of files") - .addCell("total_bytes", "alias:tb;desc:total number of bytes") - .addCell("translog", "alias:tr;desc:translog operations recovered") - .addCell("translog_percent", "alias:trp;desc:percent of translog recovery") - .addCell("total_translog", "alias:trt;desc:current total translog operations") + .addCell("bytes_total", "alias:tb;desc:total number of bytes") + .addCell("translog_ops", "alias:to;desc:number of translog ops to recover") + .addCell("translog_ops_recovered", "alias:tor;desc:translog ops recovered") + .addCell("translog_ops_percent", "alias:top;desc:percent of translog ops recovered") .endHeaders(); return t; } @@ -151,14 +153,16 @@ public class RestRecoveryAction extends AbstractCatAction { t.addCell(state.getRestoreSource() == null ? "n/a" : state.getRestoreSource().snapshotId().getRepository()); t.addCell(state.getRestoreSource() == null ? "n/a" : state.getRestoreSource().snapshotId().getSnapshot()); t.addCell(state.getIndex().totalRecoverFiles()); + t.addCell(state.getIndex().recoveredFileCount()); t.addCell(String.format(Locale.ROOT, "%1.1f%%", state.getIndex().recoveredFilesPercent())); - t.addCell(state.getIndex().totalRecoverBytes()); - t.addCell(String.format(Locale.ROOT, "%1.1f%%", state.getIndex().recoveredBytesPercent())); t.addCell(state.getIndex().totalFileCount()); + t.addCell(state.getIndex().totalRecoverBytes()); + t.addCell(state.getIndex().recoveredBytes()); + t.addCell(String.format(Locale.ROOT, "%1.1f%%", state.getIndex().recoveredBytesPercent())); t.addCell(state.getIndex().totalBytes()); + t.addCell(state.getTranslog().totalOperations()); t.addCell(state.getTranslog().recoveredOperations()); t.addCell(String.format(Locale.ROOT, "%1.1f%%", state.getTranslog().recoveredPercent())); - t.addCell(state.getTranslog().totalOperations()); t.endRow(); } } diff --git a/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java new file mode 100644 index 000000000000..848c62ab2b4a --- /dev/null +++ b/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java @@ -0,0 +1,187 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.cat; + +import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; +import org.elasticsearch.cluster.metadata.SnapshotId; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.routing.RestoreSource; +import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.Table; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.recovery.RecoveryState; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import static org.elasticsearch.mock.orig.Mockito.when; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.mockito.Mockito.mock; + +public class RestRecoveryActionTests extends ESTestCase { + + public void testRestRecoveryAction() { + final Settings settings = Settings.EMPTY; + final RestController restController = new RestController(settings); + final RestRecoveryAction action = new RestRecoveryAction(settings, restController, restController, null); + final int totalShards = randomIntBetween(1, 32); + final int successfulShards = Math.max(0, totalShards - randomIntBetween(1, 2)); + final int failedShards = totalShards - successfulShards; + final boolean detailed = randomBoolean(); + final Map> shardRecoveryStates = new HashMap<>(); + final List recoveryStates = new ArrayList<>(); + + for (int i = 0; i < successfulShards; i++) { + final RecoveryState state = mock(RecoveryState.class); + when(state.getShardId()).thenReturn(new ShardId(new Index("index", "_na_"), i)); + final RecoveryState.Timer timer = mock(RecoveryState.Timer.class); + when(timer.time()).thenReturn((long)randomIntBetween(1000000, 10 * 1000000)); + when(state.getTimer()).thenReturn(timer); + when(state.getType()).thenReturn(randomFrom(RecoveryState.Type.values())); + when(state.getStage()).thenReturn(randomFrom(RecoveryState.Stage.values())); + final DiscoveryNode sourceNode = randomBoolean() ? mock(DiscoveryNode.class) : null; + if (sourceNode != null) { + when(sourceNode.getHostName()).thenReturn(randomAsciiOfLength(8)); + } + when(state.getSourceNode()).thenReturn(sourceNode); + final DiscoveryNode targetNode = mock(DiscoveryNode.class); + when(targetNode.getHostName()).thenReturn(randomAsciiOfLength(8)); + when(state.getTargetNode()).thenReturn(targetNode); + + final RestoreSource restoreSource = randomBoolean() ? mock(RestoreSource.class) : null; + if (restoreSource != null) { + final SnapshotId snapshotId = mock(SnapshotId.class); + when(snapshotId.getRepository()).thenReturn(randomAsciiOfLength(8)); + when(snapshotId.getSnapshot()).thenReturn(randomAsciiOfLength(8)); + when(restoreSource.snapshotId()).thenReturn(snapshotId); + } + + RecoveryState.Index index = mock(RecoveryState.Index.class); + + final int totalRecoveredFiles = randomIntBetween(1, 64); + when(index.totalRecoverFiles()).thenReturn(totalRecoveredFiles); + final int recoveredFileCount = randomIntBetween(0, totalRecoveredFiles); + when(index.recoveredFileCount()).thenReturn(recoveredFileCount); + when(index.recoveredFilesPercent()).thenReturn((100f * recoveredFileCount) / totalRecoveredFiles); + when(index.totalFileCount()).thenReturn(randomIntBetween(totalRecoveredFiles, 2 * totalRecoveredFiles)); + + final int totalRecoveredBytes = randomIntBetween(1, 1 << 24); + when(index.totalRecoverBytes()).thenReturn((long)totalRecoveredBytes); + final int recoveredBytes = randomIntBetween(0, totalRecoveredBytes); + when(index.recoveredBytes()).thenReturn((long)recoveredBytes); + when(index.recoveredBytesPercent()).thenReturn((100f * recoveredBytes) / totalRecoveredBytes); + when(index.totalRecoverBytes()).thenReturn((long)randomIntBetween(totalRecoveredBytes, 2 * totalRecoveredBytes)); + when(state.getIndex()).thenReturn(index); + + final RecoveryState.Translog translog = mock(RecoveryState.Translog.class); + final int translogOps = randomIntBetween(0, 1 << 18); + when(translog.totalOperations()).thenReturn(translogOps); + final int translogOpsRecovered = randomIntBetween(0, translogOps); + when(translog.recoveredOperations()).thenReturn(translogOpsRecovered); + when(translog.recoveredPercent()).thenReturn(translogOps == 0 ? 100f : (100f * translogOpsRecovered / translogOps)); + when(state.getTranslog()).thenReturn(translog); + + recoveryStates.add(state); + } + + final List shuffle = new ArrayList<>(recoveryStates); + Randomness.shuffle(shuffle); + shardRecoveryStates.put("index", shuffle); + + final List shardFailures = new ArrayList<>(); + final RecoveryResponse response = new RecoveryResponse( + totalShards, + successfulShards, + failedShards, + detailed, + shardRecoveryStates, + shardFailures); + final Table table = action.buildRecoveryTable(null, response); + + assertNotNull(table); + + List headers = table.getHeaders(); + assertThat(headers.get(0).value, equalTo("index")); + assertThat(headers.get(1).value, equalTo("shard")); + assertThat(headers.get(2).value, equalTo("time")); + assertThat(headers.get(3).value, equalTo("type")); + assertThat(headers.get(4).value, equalTo("stage")); + assertThat(headers.get(5).value, equalTo("source_host")); + assertThat(headers.get(6).value, equalTo("target_host")); + assertThat(headers.get(7).value, equalTo("repository")); + assertThat(headers.get(8).value, equalTo("snapshot")); + assertThat(headers.get(9).value, equalTo("files")); + assertThat(headers.get(10).value, equalTo("files_recovered")); + assertThat(headers.get(11).value, equalTo("files_percent")); + assertThat(headers.get(12).value, equalTo("files_total")); + assertThat(headers.get(13).value, equalTo("bytes")); + assertThat(headers.get(14).value, equalTo("bytes_recovered")); + assertThat(headers.get(15).value, equalTo("bytes_percent")); + assertThat(headers.get(16).value, equalTo("bytes_total")); + assertThat(headers.get(17).value, equalTo("translog_ops")); + assertThat(headers.get(18).value, equalTo("translog_ops_recovered")); + assertThat(headers.get(19).value, equalTo("translog_ops_percent")); + + assertThat(table.getRows().size(), equalTo(successfulShards)); + for (int i = 0; i < successfulShards; i++) { + final RecoveryState state = recoveryStates.get(i); + List cells = table.getRows().get(i); + assertThat(cells.get(0).value, equalTo("index")); + assertThat(cells.get(1).value, equalTo(i)); + assertThat(cells.get(2).value, equalTo(new TimeValue(state.getTimer().time()))); + assertThat(cells.get(3).value, equalTo(state.getType().name().toLowerCase(Locale.ROOT))); + assertThat(cells.get(4).value, equalTo(state.getStage().name().toLowerCase(Locale.ROOT))); + assertThat(cells.get(5).value, equalTo(state.getSourceNode() == null ? "n/a" : state.getSourceNode().getHostName())); + assertThat(cells.get(6).value, equalTo(state.getTargetNode().getHostName())); + assertThat( + cells.get(7).value, + equalTo(state.getRestoreSource() == null ? "n/a" : state.getRestoreSource().snapshotId().getRepository())); + assertThat( + cells.get(8).value, + equalTo(state.getRestoreSource() == null ? "n/a" : state.getRestoreSource().snapshotId().getSnapshot())); + assertThat(cells.get(9).value, equalTo(state.getIndex().totalRecoverFiles())); + assertThat(cells.get(10).value, equalTo(state.getIndex().recoveredFileCount())); + assertThat(cells.get(11).value, equalTo(percent(state.getIndex().recoveredFilesPercent()))); + assertThat(cells.get(12).value, equalTo(state.getIndex().totalFileCount())); + assertThat(cells.get(13).value, equalTo(state.getIndex().totalRecoverBytes())); + assertThat(cells.get(14).value, equalTo(state.getIndex().recoveredBytes())); + assertThat(cells.get(15).value, equalTo(percent(state.getIndex().recoveredBytesPercent()))); + assertThat(cells.get(16).value, equalTo(state.getIndex().totalBytes())); + assertThat(cells.get(17).value, equalTo(state.getTranslog().totalOperations())); + assertThat(cells.get(18).value, equalTo(state.getTranslog().recoveredOperations())); + assertThat(cells.get(19).value, equalTo(percent(state.getTranslog().recoveredPercent()))); + } + } + + private static String percent(float percent) { + return String.format(Locale.ROOT, "%1.1f%%", percent); + } + +} diff --git a/docs/reference/migration/migrate_5_0.asciidoc b/docs/reference/migration/migrate_5_0.asciidoc index 62381e480e32..8e082a1e426d 100644 --- a/docs/reference/migration/migrate_5_0.asciidoc +++ b/docs/reference/migration/migrate_5_0.asciidoc @@ -191,6 +191,21 @@ The `host` field has been removed from the cat nodes API as its value is always equal to the `ip` field. The `name` field is available in the cat nodes API and should be used instead of the `host` field. +==== Changes to cat recovery API + +The fields `bytes_recovered` and `files_recovered` have been added to +the cat recovery API. These fields, respectively, indicate the total +number of bytes and files that have been recovered. + +The fields `total_files` and `total_bytes` have been renamed to +`files_total` and `bytes_total`, respectively. + +Additionally, the field `translog` has been renamed to +`translog_ops_recovered`, the field `translog_total` to +`translog_ops` and the field `translog_percent` to +`translog_ops_percent`. The short aliases for these fields are `tor`, +`to`, and `top`, respectively. + [[breaking_50_parent_child_changes]] === Parent/Child changes diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml index 432b0e50ae42..820cf6dec4db 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml @@ -35,14 +35,16 @@ [-\w./]+ \s+ # repository [-\w./]+ \s+ # snapshot \d+ \s+ # files + \d+ \s+ # files_recovered \d+\.\d+% \s+ # files_percent + \d+ \s+ # files_total \d+ \s+ # bytes + \d+ \s+ # bytes_recovered \d+\.\d+% \s+ # bytes_percent - \d+ \s+ # total_files - \d+ \s+ # total_bytes - \d+ \s+ # translog - -?\d+\.\d+% \s+ # translog_percent - -?\d+ # total_translog + \d+ \s+ # bytes_total + -?\d+ \s+ # translog_ops + \d+ \s+ # translog_ops_recovered + -?\d+\.\d+% # translog_ops_percent \n )+ $/ From a5a9bbfe88689dffaca25e359979ccd4f0930fdd Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Fri, 11 Mar 2016 15:07:51 +0100 Subject: [PATCH 183/320] Update compound-word-tokenfilter.asciidoc Only FOP v1.2 compatible hyphenation files are supported by the hyphenation decompounder --- .../analysis/tokenfilters/compound-word-tokenfilter.asciidoc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/reference/analysis/tokenfilters/compound-word-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/compound-word-tokenfilter.asciidoc index f77c47d156ef..1268727b2efd 100644 --- a/docs/reference/analysis/tokenfilters/compound-word-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/compound-word-tokenfilter.asciidoc @@ -27,7 +27,8 @@ use. For languages like German they are quite good. XML based hyphenation grammar files can be found in the http://offo.sourceforge.net/hyphenation/#FOP+XML+Hyphenation+Patterns[Objects For Formatting Objects] -(OFFO) Sourceforge project. You can download http://downloads.sourceforge.net/offo/offo-hyphenation.zip[offo-hyphenation.zip] +(OFFO) Sourceforge project. Currently only FOP v1.2 compatible hyphenation files +are supported. You can download https://sourceforge.net/projects/offo/files/offo-hyphenation/1.2/offo-hyphenation_v1.2.zip/download[offo-hyphenation_v1.2.zip] directly and look in the `offo-hyphenation/hyph/` directory. Credits for the hyphenation code go to the Apache FOP project . From 7aa29e3f7c4f6959592dcfab5648197a03eb7568 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 11 Mar 2016 15:20:52 +0100 Subject: [PATCH 184/320] Make Copy of collate parameter map Test failures showed problems with passing down the same collate parameter map reference from the phrase suggestion builder to the context where. This changes the collate parameter setters to make a shallow copy of the map passed in. --- .../search/suggest/phrase/PhraseSuggestionBuilder.java | 6 ++++-- .../search/suggest/phrase/PhraseSuggestionContext.java | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java index a5856649bd8f..790ca636587c 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java @@ -346,10 +346,12 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder collateParams) { - this.collateParams = collateParams; + Objects.requireNonNull(collateParams, "collate parameters cannot be null."); + this.collateParams = new HashMap<>(collateParams); return this; } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java index 95c02d5add85..80ac850a38c4 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java @@ -201,7 +201,7 @@ class PhraseSuggestionContext extends SuggestionContext { } void setCollateScriptParams(Map collateScriptParams) { - this.collateScriptParams = collateScriptParams; + this.collateScriptParams = new HashMap<>(collateScriptParams); } void setCollatePrune(boolean prune) { From 422df6089c9813868dabd8eea0aacd885082be5c Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Fri, 11 Mar 2016 16:35:50 +0100 Subject: [PATCH 185/320] [TEST] Unblock nodes if snapshot/restore test fails --- .../SharedClusterSnapshotRestoreIT.java | 98 ++++++++++--------- 1 file changed, 52 insertions(+), 46 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 51924244f5d8..cf0e37a51b45 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -1862,41 +1862,44 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas } else { waitForBlockOnAnyDataNode("test-repo", TimeValue.timeValueMinutes(1)); } - if (allowPartial) { - // partial snapshots allow close / delete operations - if (randomBoolean()) { - logger.info("--> delete index while partial snapshot is running"); - client.admin().indices().prepareDelete("test-idx-1").get(); - } else { - logger.info("--> close index while partial snapshot is running"); - client.admin().indices().prepareClose("test-idx-1").get(); - } - } else { - // non-partial snapshots do not allow close / delete operations on indices where snapshot has not been completed - if (randomBoolean()) { - try { - logger.info("--> delete index while non-partial snapshot is running"); + try { + if (allowPartial) { + // partial snapshots allow close / delete operations + if (randomBoolean()) { + logger.info("--> delete index while partial snapshot is running"); client.admin().indices().prepareDelete("test-idx-1").get(); - fail("Expected deleting index to fail during snapshot"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("Cannot delete indices that are being snapshotted: [test-idx-1]")); + } else { + logger.info("--> close index while partial snapshot is running"); + client.admin().indices().prepareClose("test-idx-1").get(); } } else { - try { - logger.info("--> close index while non-partial snapshot is running"); - client.admin().indices().prepareClose("test-idx-1").get(); - fail("Expected closing index to fail during snapshot"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("Cannot close indices that are being snapshotted: [test-idx-1]")); + // non-partial snapshots do not allow close / delete operations on indices where snapshot has not been completed + if (randomBoolean()) { + try { + logger.info("--> delete index while non-partial snapshot is running"); + client.admin().indices().prepareDelete("test-idx-1").get(); + fail("Expected deleting index to fail during snapshot"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("Cannot delete indices that are being snapshotted: [test-idx-1]")); + } + } else { + try { + logger.info("--> close index while non-partial snapshot is running"); + client.admin().indices().prepareClose("test-idx-1").get(); + fail("Expected closing index to fail during snapshot"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("Cannot close indices that are being snapshotted: [test-idx-1]")); + } } } - } - if (initBlocking) { - logger.info("--> unblock running master node"); - unblockNode(internalCluster().getMasterName()); - } else { - logger.info("--> unblock all data nodes"); - unblockAllDataNodes("test-repo"); + } finally { + if (initBlocking) { + logger.info("--> unblock running master node"); + unblockNode(internalCluster().getMasterName()); + } else { + logger.info("--> unblock all data nodes"); + unblockAllDataNodes("test-repo"); + } } logger.info("--> waiting for snapshot to finish"); CreateSnapshotResponse createSnapshotResponse = future.get(); @@ -1946,24 +1949,27 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas blockAllDataNodes("test-repo"); logger.info("--> execution will be blocked on all data nodes"); - logger.info("--> start restore"); - ListenableActionFuture restoreFut = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap") - .setWaitForCompletion(true) - .execute(); - - logger.info("--> waiting for block to kick in"); - waitForBlockOnAnyDataNode("test-repo", TimeValue.timeValueSeconds(60)); - - logger.info("--> close index while restore is running"); + final ListenableActionFuture restoreFut; try { - client.admin().indices().prepareClose("test-idx-1").get(); - fail("Expected closing index to fail during restore"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("Cannot close indices that are being restored: [test-idx-1]")); - } + logger.info("--> start restore"); + restoreFut = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap") + .setWaitForCompletion(true) + .execute(); - logger.info("--> unblocking all data nodes"); - unblockAllDataNodes("test-repo"); + logger.info("--> waiting for block to kick in"); + waitForBlockOnAnyDataNode("test-repo", TimeValue.timeValueSeconds(60)); + + logger.info("--> close index while restore is running"); + try { + client.admin().indices().prepareClose("test-idx-1").get(); + fail("Expected closing index to fail during restore"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("Cannot close indices that are being restored: [test-idx-1]")); + } + } finally { + logger.info("--> unblocking all data nodes"); + unblockAllDataNodes("test-repo"); + } logger.info("--> wait for restore to finish"); RestoreSnapshotResponse restoreSnapshotResponse = restoreFut.get(); From 8ab4d001e2053834f8f36383f403d28a25791ac4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 11 Mar 2016 10:30:00 +0100 Subject: [PATCH 186/320] Make ScriptSortBuilder implement NamedWritable This adds methods and tests to ScriptSortBuilder that makes it implement NamedWritable and adds the fromXContent method needed to read itseld from xContent. --- .../search/sort/ScriptSortBuilder.java | 212 +++++++++++++++++- .../search/sort/ScriptSortParser.java | 6 +- .../search/sort/AbstractSortTestCase.java | 1 + .../search/sort/ScriptSortBuilderTests.java | 87 +++++++ 4 files changed, 293 insertions(+), 13 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index e554eb8846bc..a767faa3e0ea 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -19,24 +19,49 @@ package org.elasticsearch.search.sort; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; +import org.elasticsearch.script.Script.ScriptField; +import org.elasticsearch.script.ScriptParameterParser; +import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; /** * Script sort builder allows to sort based on a custom script expression. */ -public class ScriptSortBuilder extends SortBuilder { +public class ScriptSortBuilder extends SortBuilder implements NamedWriteable, + SortElementParserTemp { - private Script script; + private static final String NAME = "_script"; + static final ScriptSortBuilder PROTOTYPE = new ScriptSortBuilder(new Script("_na_"), "_na_"); + public static final ParseField TYPE_FIELD = new ParseField("type"); + public static final ParseField SCRIPT_FIELD = new ParseField("script"); + public static final ParseField SORTMODE_FIELD = new ParseField("mode"); + public static final ParseField NESTED_PATH_FIELD = new ParseField("nested_path"); + public static final ParseField NESTED_FILTER_FIELD = new ParseField("nested_filter"); + public static final ParseField PARAMS_FIELD = new ParseField("params"); + private final Script script; + + // TODO make this an enum private final String type; + // TODO make this an enum private String sortMode; - private QueryBuilder nestedFilter; + private QueryBuilder nestedFilter; private String nestedPath; @@ -45,12 +70,40 @@ public class ScriptSortBuilder extends SortBuilder { * * @param script * The script to use. + * @param type + * The type of the script, can be either {@link ScriptSortParser#STRING_SORT_TYPE} or + * {@link ScriptSortParser#NUMBER_SORT_TYPE} */ public ScriptSortBuilder(Script script, String type) { + Objects.requireNonNull(script, "script cannot be null"); + Objects.requireNonNull(type, "type cannot be null"); this.script = script; this.type = type; } + ScriptSortBuilder(ScriptSortBuilder original) { + this.script = original.script; + this.type = original.type; + this.order = original.order; + this.sortMode = original.sortMode; + this.nestedFilter = original.nestedFilter; + this.nestedPath = original.nestedPath; + } + + /** + * Get the script used in this sort. + */ + public Script script() { + return this.script; + } + + /** + * Get the type used in this sort. + */ + public String type() { + return this.type; + } + /** * Defines which distance to use for sorting in the case a document contains multiple geo points. * Possible values: min and max @@ -60,6 +113,13 @@ public class ScriptSortBuilder extends SortBuilder { return this; } + /** + * Get the sort mode. + */ + public String sortMode() { + return this.sortMode; + } + /** * Sets the nested filter that the nested objects should match with in order to be taken into account * for sorting. @@ -69,6 +129,13 @@ public class ScriptSortBuilder extends SortBuilder { return this; } + /** + * Gets the nested filter. + */ + public QueryBuilder getNestedFilter() { + return this.nestedFilter; + } + /** * Sets the nested path if sorting occurs on a field that is inside a nested object. For sorting by script this * needs to be specified. @@ -78,22 +145,149 @@ public class ScriptSortBuilder extends SortBuilder { return this; } + /** + * Gets the nested path. + */ + public String getNestedPath() { + return this.nestedPath; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException { - builder.startObject("_script"); - builder.field("script", script); - builder.field("type", type); + builder.startObject(NAME); + builder.field(SCRIPT_FIELD.getPreferredName(), script); + builder.field(TYPE_FIELD.getPreferredName(), type); builder.field(ORDER_FIELD.getPreferredName(), order); if (sortMode != null) { - builder.field("mode", sortMode); + builder.field(SORTMODE_FIELD.getPreferredName(), sortMode); } if (nestedPath != null) { - builder.field("nested_path", nestedPath); + builder.field(NESTED_PATH_FIELD.getPreferredName(), nestedPath); } if (nestedFilter != null) { - builder.field("nested_filter", nestedFilter, builderParams); + builder.field(NESTED_FILTER_FIELD.getPreferredName(), nestedFilter, builderParams); } builder.endObject(); return builder; } + + @Override + public ScriptSortBuilder fromXContent(QueryParseContext context, String elementName) throws IOException { + ScriptParameterParser scriptParameterParser = new ScriptParameterParser(); + XContentParser parser = context.parser(); + ParseFieldMatcher parseField = context.parseFieldMatcher(); + Script script = null; + String type = null; + String sortMode = null; + SortOrder order = null; + QueryBuilder nestedFilter = null; + String nestedPath = null; + Map params = new HashMap<>(); + + XContentParser.Token token; + String currentName = parser.currentName(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentName = parser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + if (parseField.match(currentName, ScriptField.SCRIPT)) { + script = Script.parse(parser, parseField); + } else if (parseField.match(currentName, PARAMS_FIELD)) { + params = parser.map(); + } else if (parseField.match(currentName, NESTED_FILTER_FIELD)) { + nestedFilter = context.parseInnerQueryBuilder(); + } + } else if (token.isValue()) { + if (parseField.match(currentName, ORDER_FIELD)) { + order = SortOrder.fromString(parser.text()); + } else if (scriptParameterParser.token(currentName, token, parser, parseField)) { + // Do Nothing (handled by ScriptParameterParser + } else if (parseField.match(currentName, TYPE_FIELD)) { + type = parser.text(); + } else if (parseField.match(currentName, SORTMODE_FIELD)) { + sortMode = parser.text(); + } else if (parseField.match(currentName, NESTED_PATH_FIELD)) { + nestedPath = parser.text(); + } + } + } + + if (script == null) { // Didn't find anything using the new API so try using the old one instead + ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); + if (scriptValue != null) { + if (params == null) { + params = new HashMap<>(); + } + script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), params); + } + } + + ScriptSortBuilder result = new ScriptSortBuilder(script, type); + if (order != null) { + result.order(order); + } + if (sortMode != null) { + result.sortMode(sortMode); + } + if (nestedFilter != null) { + result.setNestedFilter(nestedFilter); + } + if (nestedPath != null) { + result.setNestedPath(nestedPath); + } + return result; + } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + if (object == null || getClass() != object.getClass()) { + return false; + } + ScriptSortBuilder other = (ScriptSortBuilder) object; + return Objects.equals(script, other.script) && + Objects.equals(type, other.type) && + Objects.equals(order, other.order) && + Objects.equals(sortMode, other.sortMode) && + Objects.equals(nestedFilter, other.nestedFilter) && + Objects.equals(nestedPath, other.nestedPath); + } + + @Override + public int hashCode() { + return Objects.hash(script, type, order, sortMode, nestedFilter, nestedPath); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + script.writeTo(out); + out.writeString(type); + order.writeTo(out); + out.writeOptionalString(sortMode); + out.writeOptionalString(nestedPath); + boolean hasNestedFilter = nestedFilter != null; + out.writeBoolean(hasNestedFilter); + if (hasNestedFilter) { + out.writeQuery(nestedFilter); + } + } + + @Override + public ScriptSortBuilder readFrom(StreamInput in) throws IOException { + ScriptSortBuilder builder = new ScriptSortBuilder(Script.readScript(in), in.readString()); + builder.order(SortOrder.readOrderFrom(in)); + builder.sortMode = in.readOptionalString(); + builder.nestedPath = in.readOptionalString(); + if (in.readBoolean()) { + builder.nestedFilter = in.readQuery(); + } + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java index c30ea503d80e..9bf4dde71148 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java @@ -48,8 +48,6 @@ import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.MultiValueMode; -import org.elasticsearch.search.SearchParseException; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.Collections; @@ -61,8 +59,8 @@ import java.util.Map; */ public class ScriptSortParser implements SortParser { - private static final String STRING_SORT_TYPE = "string"; - private static final String NUMBER_SORT_TYPE = "number"; + public static final String STRING_SORT_TYPE = "string"; + public static final String NUMBER_SORT_TYPE = "number"; @Override public String[] names() { diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index dc61f0ef34c4..71866c34d2ed 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -55,6 +55,7 @@ public abstract class AbstractSortTestCase { + + @Override + protected ScriptSortBuilder createTestItem() { + ScriptSortBuilder builder = new ScriptSortBuilder(new Script(randomAsciiOfLengthBetween(5, 10)), + randomBoolean() ? ScriptSortParser.NUMBER_SORT_TYPE : ScriptSortParser.STRING_SORT_TYPE); + if (randomBoolean()) { + builder.order(RandomSortDataGenerator.order(builder.order())); + } + if (randomBoolean()) { + builder.sortMode(RandomSortDataGenerator.mode(builder.sortMode())); + } + if (randomBoolean()) { + builder.setNestedFilter(RandomSortDataGenerator.nestedFilter(builder.getNestedFilter())); + } + if (randomBoolean()) { + builder.setNestedPath(RandomSortDataGenerator.randomAscii(builder.getNestedPath())); + } + return builder; + } + + @Override + protected ScriptSortBuilder mutate(ScriptSortBuilder original) throws IOException { + ScriptSortBuilder result; + if (randomBoolean()) { + // change one of the constructor args, copy the rest over + Script script = original.script(); + String type = original.type(); + if (randomBoolean()) { + result = new ScriptSortBuilder(new Script(script.getScript() + "_suffix"), type); + } else { + result = new ScriptSortBuilder(script, type + "_suffix"); + } + result.order(original.order()); + result.sortMode(original.sortMode()); + result.setNestedFilter(original.getNestedFilter()); + result.setNestedPath(original.getNestedPath()); + return result; + } + result = new ScriptSortBuilder(original); + switch (randomIntBetween(0, 3)) { + case 0: + if (original.order() == SortOrder.ASC) { + result.order(SortOrder.DESC); + } else { + result.order(SortOrder.ASC); + } + break; + case 1: + result.sortMode(RandomSortDataGenerator.mode(original.sortMode())); + break; + case 2: + result.setNestedFilter(RandomSortDataGenerator.nestedFilter(original.getNestedFilter())); + break; + case 3: + result.setNestedPath(original.getNestedPath() + "_some_suffix"); + break; + } + return result; + } +} From 5107388fe9250978373e3413627dd76a210a432d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 11 Mar 2016 17:32:39 +0100 Subject: [PATCH 187/320] Added enum for script sort type --- .../search/sort/ScriptSortBuilder.java | 59 ++++++++++++++++--- .../search/sort/ScriptSortParser.java | 14 ++--- .../search/sort/SortBuilders.java | 11 ++-- .../aggregations/metrics/TopHitsTests.java | 3 +- .../builder/SearchSourceBuilderTests.java | 3 +- .../search/sort/ScriptSortBuilderTests.java | 23 +++++++- .../messy/tests/SimpleSortTests.java | 7 ++- .../script/groovy/GroovyScriptTests.java | 4 +- 8 files changed, 92 insertions(+), 32 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index a767faa3e0ea..6254d5b1e414 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; @@ -35,6 +36,7 @@ import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; import java.io.IOException; import java.util.HashMap; +import java.util.Locale; import java.util.Map; import java.util.Objects; @@ -45,7 +47,7 @@ public class ScriptSortBuilder extends SortBuilder implements SortElementParserTemp { private static final String NAME = "_script"; - static final ScriptSortBuilder PROTOTYPE = new ScriptSortBuilder(new Script("_na_"), "_na_"); + static final ScriptSortBuilder PROTOTYPE = new ScriptSortBuilder(new Script("_na_"), ScriptSortType.STRING); public static final ParseField TYPE_FIELD = new ParseField("type"); public static final ParseField SCRIPT_FIELD = new ParseField("script"); public static final ParseField SORTMODE_FIELD = new ParseField("mode"); @@ -55,8 +57,7 @@ public class ScriptSortBuilder extends SortBuilder implements private final Script script; - // TODO make this an enum - private final String type; + private ScriptSortType type; // TODO make this an enum private String sortMode; @@ -74,7 +75,7 @@ public class ScriptSortBuilder extends SortBuilder implements * The type of the script, can be either {@link ScriptSortParser#STRING_SORT_TYPE} or * {@link ScriptSortParser#NUMBER_SORT_TYPE} */ - public ScriptSortBuilder(Script script, String type) { + public ScriptSortBuilder(Script script, ScriptSortType type) { Objects.requireNonNull(script, "script cannot be null"); Objects.requireNonNull(type, "type cannot be null"); this.script = script; @@ -100,7 +101,7 @@ public class ScriptSortBuilder extends SortBuilder implements /** * Get the type used in this sort. */ - public String type() { + public ScriptSortType type() { return this.type; } @@ -177,7 +178,7 @@ public class ScriptSortBuilder extends SortBuilder implements XContentParser parser = context.parser(); ParseFieldMatcher parseField = context.parseFieldMatcher(); Script script = null; - String type = null; + ScriptSortType type = null; String sortMode = null; SortOrder order = null; QueryBuilder nestedFilter = null; @@ -203,7 +204,7 @@ public class ScriptSortBuilder extends SortBuilder implements } else if (scriptParameterParser.token(currentName, token, parser, parseField)) { // Do Nothing (handled by ScriptParameterParser } else if (parseField.match(currentName, TYPE_FIELD)) { - type = parser.text(); + type = ScriptSortType.fromString(parser.text()); } else if (parseField.match(currentName, SORTMODE_FIELD)) { sortMode = parser.text(); } else if (parseField.match(currentName, NESTED_PATH_FIELD)) { @@ -263,7 +264,7 @@ public class ScriptSortBuilder extends SortBuilder implements @Override public void writeTo(StreamOutput out) throws IOException { script.writeTo(out); - out.writeString(type); + type.writeTo(out); order.writeTo(out); out.writeOptionalString(sortMode); out.writeOptionalString(nestedPath); @@ -276,7 +277,7 @@ public class ScriptSortBuilder extends SortBuilder implements @Override public ScriptSortBuilder readFrom(StreamInput in) throws IOException { - ScriptSortBuilder builder = new ScriptSortBuilder(Script.readScript(in), in.readString()); + ScriptSortBuilder builder = new ScriptSortBuilder(Script.readScript(in), ScriptSortType.PROTOTYPE.readFrom(in)); builder.order(SortOrder.readOrderFrom(in)); builder.sortMode = in.readOptionalString(); builder.nestedPath = in.readOptionalString(); @@ -290,4 +291,44 @@ public class ScriptSortBuilder extends SortBuilder implements public String getWriteableName() { return NAME; } + + public enum ScriptSortType implements Writeable { + /** script sort for a string value **/ + STRING, + /** script sort for a numeric value **/ + NUMBER; + + static ScriptSortType PROTOTYPE = STRING; + + @Override + public void writeTo(final StreamOutput out) throws IOException { + out.writeVInt(ordinal()); + } + + @Override + public ScriptSortType readFrom(final StreamInput in) throws IOException { + int ordinal = in.readVInt(); + if (ordinal < 0 || ordinal >= values().length) { + throw new IOException("Unknown ScriptSortType ordinal [" + ordinal + "]"); + } + return values()[ordinal]; + } + + public static ScriptSortType fromString(final String str) { + Objects.requireNonNull(str, "input string is null"); + switch (str.toLowerCase(Locale.ROOT)) { + case ("string"): + return ScriptSortType.STRING; + case ("number"): + return ScriptSortType.NUMBER; + default: + throw new IllegalArgumentException("Unknown ScriptSortType [" + str + "]"); + } + } + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + } } diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java index 9bf4dde71148..c238ad6ccaf2 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java @@ -48,6 +48,7 @@ import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; import java.io.IOException; import java.util.Collections; @@ -59,9 +60,6 @@ import java.util.Map; */ public class ScriptSortParser implements SortParser { - public static final String STRING_SORT_TYPE = "string"; - public static final String NUMBER_SORT_TYPE = "number"; - @Override public String[] names() { return new String[]{"_script"}; @@ -71,7 +69,7 @@ public class ScriptSortParser implements SortParser { public SortField parse(XContentParser parser, QueryShardContext context) throws Exception { ScriptParameterParser scriptParameterParser = new ScriptParameterParser(); Script script = null; - String type = null; + ScriptSortType type = null; Map params = null; boolean reverse = false; MultiValueMode sortMode = null; @@ -101,7 +99,7 @@ public class ScriptSortParser implements SortParser { } else if (scriptParameterParser.token(currentName, token, parser, context.parseFieldMatcher())) { // Do Nothing (handled by ScriptParameterParser } else if ("type".equals(currentName)) { - type = parser.text(); + type = ScriptSortType.fromString(parser.text()); } else if ("mode".equals(currentName)) { sortMode = MultiValueMode.fromString(parser.text()); } else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) { @@ -134,7 +132,7 @@ public class ScriptSortParser implements SortParser { final SearchScript searchScript = context.getScriptService().search( context.lookup(), script, ScriptContext.Standard.SEARCH, Collections.emptyMap()); - if (STRING_SORT_TYPE.equals(type) && (sortMode == MultiValueMode.SUM || sortMode == MultiValueMode.AVG)) { + if (ScriptSortType.STRING.equals(type) && (sortMode == MultiValueMode.SUM || sortMode == MultiValueMode.AVG)) { throw new ParsingException(parser.getTokenLocation(), "type [string] doesn't support mode [" + sortMode + "]"); } @@ -160,7 +158,7 @@ public class ScriptSortParser implements SortParser { final IndexFieldData.XFieldComparatorSource fieldComparatorSource; switch (type) { - case STRING_SORT_TYPE: + case STRING: fieldComparatorSource = new BytesRefFieldComparatorSource(null, null, sortMode, nested) { LeafSearchScript leafScript; @Override @@ -183,7 +181,7 @@ public class ScriptSortParser implements SortParser { } }; break; - case NUMBER_SORT_TYPE: + case NUMBER: // TODO: should we rather sort missing values last? fieldComparatorSource = new DoubleValuesComparatorSource(null, Double.MAX_VALUE, sortMode, nested) { LeafSearchScript leafScript; diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortBuilders.java b/core/src/main/java/org/elasticsearch/search/sort/SortBuilders.java index f326fee3837a..3eae9b8d0196 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortBuilders.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortBuilders.java @@ -21,8 +21,7 @@ package org.elasticsearch.search.sort; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.script.Script; - -import java.util.Arrays; +import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; /** * A set of static factory methods for {@link SortBuilder}s. @@ -53,7 +52,7 @@ public class SortBuilders { * @param script The script to use. * @param type The type, can either be "string" or "number". */ - public static ScriptSortBuilder scriptSort(Script script, String type) { + public static ScriptSortBuilder scriptSort(Script script, ScriptSortType type) { return new ScriptSortBuilder(script, type); } @@ -63,12 +62,12 @@ public class SortBuilders { * @param fieldName The geo point like field name. * @param lat Latitude of the point to create the range distance facets from. * @param lon Longitude of the point to create the range distance facets from. - * + * */ public static GeoDistanceSortBuilder geoDistanceSort(String fieldName, double lat, double lon) { return new GeoDistanceSortBuilder(fieldName, lat, lon); } - + /** * Constructs a new distance based sort on a geo point like field. * @@ -87,5 +86,5 @@ public class SortBuilders { */ public static GeoDistanceSortBuilder geoDistanceSort(String fieldName, String ... geohashes) { return new GeoDistanceSortBuilder(fieldName, geohashes); - } + } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java index cccac925a1f8..05ea6148a56a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregatorBuilder; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.HighlightBuilderTests; +import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; @@ -132,7 +133,7 @@ public class TopHitsTests extends BaseAggregationTestCase Date: Fri, 11 Mar 2016 16:06:45 +0100 Subject: [PATCH 188/320] Fixing some tests and compile problems in reindex module --- .../messy/tests/IndicesRequestTests.java | 3 +- .../messy/tests/SuggestSearchTests.java | 66 +++++++++---------- .../AbstractBaseReindexRestHandler.java | 10 ++- .../index/reindex/RestReindexAction.java | 33 ++++++---- .../reindex/RestUpdateByQueryAction.java | 21 +++--- 5 files changed, 72 insertions(+), 61 deletions(-) diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java index 60b9460bb12e..626a5d9af90a 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java @@ -93,6 +93,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.search.action.SearchTransportService; +import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -445,7 +446,7 @@ public class IndicesRequestTests extends ESIntegTestCase { String suggestAction = SuggestAction.NAME + "[s]"; interceptTransportActions(suggestAction); - SuggestRequest suggestRequest = new SuggestRequest(randomIndicesOrAliases()); + SuggestRequest suggestRequest = new SuggestRequest(randomIndicesOrAliases()).suggest(new SuggestBuilder()); internalCluster().clientNodeClient().suggest(suggestRequest).actionGet(); clearInterceptedActions(); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java index cdafa3363cc9..94f60f8802c9 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java @@ -20,6 +20,38 @@ package org.elasticsearch.messy.tests; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.elasticsearch.index.query.QueryBuilders.matchQuery; +import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; +import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestion; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionPhraseCollateMatchExists; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionSize; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.nullValue; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.concurrent.ExecutionException; + import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -49,38 +81,6 @@ import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; -import java.io.IOException; -import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.concurrent.ExecutionException; - -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; -import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; -import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestion; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionPhraseCollateMatchExists; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionSize; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; -import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.endsWith; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.nullValue; - /** * Integration tests for term and phrase suggestions. Many of these tests many requests that vary only slightly from one another. Where * possible these tests should declare for the first request, make the request, modify the configuration for the next request, make that @@ -343,7 +343,7 @@ public class SuggestSearchTests extends ESIntegTestCase { createIndex("test"); ensureGreen(); - index("test", "type1", "1", "foo", "bar"); + index("test", "type1", "1", "text", "bar"); refresh(); TermSuggestionBuilder termSuggest = termSuggestion("text") diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java index 6f50b216c9bf..f5f612f130d7 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java @@ -19,6 +19,8 @@ package org.elasticsearch.index.reindex; +import java.io.IOException; + import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.TransportAction; @@ -33,23 +35,25 @@ import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AggregatorParsers; +import org.elasticsearch.search.suggest.Suggesters; import org.elasticsearch.tasks.LoggingTaskListener; import org.elasticsearch.tasks.Task; -import java.io.IOException; - public abstract class AbstractBaseReindexRestHandler, Response extends BulkIndexByScrollResponse, TA extends TransportAction> extends BaseRestHandler { protected final IndicesQueriesRegistry indicesQueriesRegistry; protected final AggregatorParsers aggParsers; + protected final Suggesters suggesters; private final ClusterService clusterService; private final TA action; protected AbstractBaseReindexRestHandler(Settings settings, Client client, - IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, ClusterService clusterService, TA action) { + IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, Suggesters suggesters, + ClusterService clusterService, TA action) { super(settings, client); this.indicesQueriesRegistry = indicesQueriesRegistry; this.aggParsers = aggParsers; + this.suggesters = suggesters; this.clusterService = clusterService; this.action = action; } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java index 1130dc7beab2..44d0d8fcb30b 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java @@ -19,6 +19,14 @@ package org.elasticsearch.index.reindex; +import static org.elasticsearch.common.unit.TimeValue.parseTimeValue; +import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; @@ -43,14 +51,7 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregatorParsers; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.common.unit.TimeValue.parseTimeValue; -import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; +import org.elasticsearch.search.suggest.Suggesters; /** * Expose IndexBySearchRequest over rest. @@ -76,7 +77,7 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler destParser = new ObjectParser<>("dest"); @@ -102,9 +103,9 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler { @Inject public RestUpdateByQueryAction(Settings settings, RestController controller, Client client, - IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, ClusterService clusterService, - TransportUpdateByQueryAction action) { - super(settings, client, indicesQueriesRegistry, aggParsers, clusterService, action); + IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, Suggesters suggesters, + ClusterService clusterService, TransportUpdateByQueryAction action) { + super(settings, client, indicesQueriesRegistry, aggParsers, suggesters, clusterService, action); controller.registerHandler(POST, "/{index}/_update_by_query", this); controller.registerHandler(POST, "/{index}/{type}/_update_by_query", this); } @@ -96,7 +97,7 @@ public class RestUpdateByQueryAction extends } } RestSearchAction.parseSearchRequest(internalRequest.getSearchRequest(), indicesQueriesRegistry, request, - parseFieldMatcher, aggParsers, bodyContent); + parseFieldMatcher, aggParsers, suggesters, bodyContent); String conflicts = request.param("conflicts"); if (conflicts != null) { From e91245e25f908c8bd5f2a3ebe439bd2ce56df86a Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 11 Mar 2016 19:20:37 +0100 Subject: [PATCH 189/320] Use a seed node to form multi-node cluster in integ tests Today we use hardcoded ports to form a cluster in the mulit-node case. The hardcoded URIs are passed to the unicast host list which is error prone and might cause problems if those ports are exhausted etc. This commit moves to a less error prone way of forming the cluster where all nodes are started with port `0` and all but the first node wait for the first node to write it's ports file to form a cluster. This seed node is enough to form a cluster. --- .../gradle/test/ClusterConfiguration.groovy | 17 +++++++++++ .../gradle/test/ClusterFormationTasks.groovy | 29 ++++++++++++------- 2 files changed, 36 insertions(+), 10 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index c9db5657ba4a..3e8b62253294 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -49,6 +49,15 @@ class ClusterConfiguration { @Input String jvmArgs = System.getProperty('tests.jvm.argline', '') + /** + * The seed nodes port file. In the case the cluster has more than one node we use a seed node + * to form the cluster. The file is null if there is no seed node yet available. + * + * Note: this can only be null if the cluster has only one node or if the first node is not yet + * configured. All nodes but the first node should see a non null value. + */ + File seedNodePortsFile + /** * A closure to call before the cluster is considered ready. The closure is passed the node info, * as well as a groovy AntBuilder, to enable running ant condition checks. The default wait @@ -119,4 +128,12 @@ class ClusterConfiguration { } extraConfigFiles.put(path, sourceFile) } + + /** Returns an address and port suitable for a uri to connect to this clusters seed node over transport protocol*/ + String seedNodeTransportUri() { + if (seedNodePortsFile != null) { + return seedNodePortsFile.readLines("UTF-8").get(0) + } + return null; + } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index d96ee5110510..59a27ea36bd2 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -58,6 +58,13 @@ class ClusterFormationTasks { List nodes = [] for (int i = 0; i < config.numNodes; ++i) { NodeInfo node = new NodeInfo(config, i, project, task) + if (i == 0) { + if (config.seedNodePortsFile != null) { + // we might allow this in the future to be set but for now we are the only authority to set this! + throw new GradleException("seedNodePortsFile has a non-null value but first node has not been intialized") + } + config.seedNodePortsFile = node.transportPortsFile; + } nodes.add(node) startTasks.add(configureNode(project, task, node)) } @@ -220,20 +227,22 @@ class ClusterFormationTasks { 'node.testattr' : 'test', 'repositories.url.allowed_urls': 'http://snapshot.test*' ] - if (node.config.numNodes == 1) { - esConfig['http.port'] = node.config.httpPort - esConfig['transport.tcp.port'] = node.config.transportPort - } else { - // TODO: fix multi node so it doesn't use hardcoded prots - esConfig['http.port'] = 9400 + node.nodeNum - esConfig['transport.tcp.port'] = 9500 + node.nodeNum - esConfig['discovery.zen.ping.unicast.hosts'] = (0.. 0) { // multi-node cluster case, we have to wait for the seed node to startup + ant.waitfor(maxwait: '20', maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond') { + resourceexists { + file(file: node.config.seedNodePortsFile.toString()) + } + } + // the seed node is enough to form the cluster - all subsequent nodes will get the seed node as a unicast + // host and join the cluster via that. + esConfig['discovery.zen.ping.unicast.hosts'] = "\"${node.config.seedNodeTransportUri()}\"" + } File configFile = new File(node.confDir, 'elasticsearch.yml') logger.info("Configuring ${configFile}") configFile.setText(esConfig.collect { key, value -> "${key}: ${value}" }.join('\n'), 'UTF-8') From afb54bab4467a4cac38f314580840470d6907e8e Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Fri, 11 Mar 2016 20:19:08 +0100 Subject: [PATCH 190/320] [TEST] Wait on all data nodes to be blocked if blocks active Fixes race condition in MockRepository where unblock happens before block --- .../snapshots/AbstractSnapshotIntegTestCase.java | 8 ++++---- .../snapshots/SharedClusterSnapshotRestoreIT.java | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index dc803a464124..5ab6b5855c4b 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -149,15 +149,15 @@ public abstract class AbstractSnapshotIntegTestCase extends ESIntegTestCase { } } - public void waitForBlockOnAnyDataNode(String repository, TimeValue timeout) throws InterruptedException { + public void waitForBlockOnAllDataNodes(String repository, TimeValue timeout) throws InterruptedException { if (false == awaitBusy(() -> { for(RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) { MockRepository mockRepository = (MockRepository) repositoriesService.repository(repository); - if (mockRepository.blocked()) { - return true; + if (mockRepository.blocked() == false) { + return false; } } - return false; + return true; }, timeout.millis(), TimeUnit.MILLISECONDS)) { fail("Timeout waiting for repository block on any data node!!!"); } diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index cf0e37a51b45..00e4d5909910 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -1860,7 +1860,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas if (initBlocking) { waitForBlock(internalCluster().getMasterName(), "test-repo", TimeValue.timeValueMinutes(1)); } else { - waitForBlockOnAnyDataNode("test-repo", TimeValue.timeValueMinutes(1)); + waitForBlockOnAllDataNodes("test-repo", TimeValue.timeValueMinutes(1)); } try { if (allowPartial) { @@ -1957,7 +1957,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .execute(); logger.info("--> waiting for block to kick in"); - waitForBlockOnAnyDataNode("test-repo", TimeValue.timeValueSeconds(60)); + waitForBlockOnAllDataNodes("test-repo", TimeValue.timeValueMinutes(1)); logger.info("--> close index while restore is running"); try { From 5bd7da56597391cc484873b413e72fa5757418c7 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 11 Mar 2016 11:46:23 -0800 Subject: [PATCH 191/320] Addressed PR feedback * Fix tests still referring to -E * add comment about missing classes * rename writer constant --- .../main/java/org/elasticsearch/cli/Terminal.java | 14 +++++++------- modules/lang-groovy/build.gradle | 2 ++ .../bootstrap/BootstrapCliParserTests.java | 11 +++++++++-- 3 files changed, 18 insertions(+), 9 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cli/Terminal.java b/core/src/main/java/org/elasticsearch/cli/Terminal.java index 00d886aa8abd..d2dc57263dc1 100644 --- a/core/src/main/java/org/elasticsearch/cli/Terminal.java +++ b/core/src/main/java/org/elasticsearch/cli/Terminal.java @@ -89,35 +89,35 @@ public abstract class Terminal { private static class ConsoleTerminal extends Terminal { - private static final Console console = System.console(); + private static final Console CONSOLE = System.console(); ConsoleTerminal() { super(System.lineSeparator()); } static boolean isSupported() { - return console != null; + return CONSOLE != null; } @Override public PrintWriter getWriter() { - return console.writer(); + return CONSOLE.writer(); } @Override public String readText(String prompt) { - return console.readLine("%s", prompt); + return CONSOLE.readLine("%s", prompt); } @Override public char[] readSecret(String prompt) { - return console.readPassword("%s", prompt); + return CONSOLE.readPassword("%s", prompt); } } private static class SystemTerminal extends Terminal { - private static final PrintWriter writer = newWriter(); + private static final PrintWriter WRITER = newWriter(); SystemTerminal() { super(System.lineSeparator()); @@ -130,7 +130,7 @@ public abstract class Terminal { @Override public PrintWriter getWriter() { - return writer; + return WRITER; } @Override diff --git a/modules/lang-groovy/build.gradle b/modules/lang-groovy/build.gradle index 005a7d4be183..2160210ba732 100644 --- a/modules/lang-groovy/build.gradle +++ b/modules/lang-groovy/build.gradle @@ -38,6 +38,8 @@ thirdPartyAudit.excludes = [ // for example we do not need ivy, scripts arent allowed to download code 'com.thoughtworks.xstream.XStream', 'groovyjarjarasm.asm.util.Textifiable', + // commons-cli is referenced by groovy, even though they supposedly + // jarjar it. Since we don't use the cli, we don't need the dep. 'org.apache.commons.cli.CommandLine', 'org.apache.commons.cli.CommandLineParser', 'org.apache.commons.cli.GnuParser', diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java index 2fc08f23a064..fc7504fc97f5 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java @@ -133,7 +133,7 @@ public class BootstrapCliParserTests extends CommandTestCase { public void testConfig() throws Exception { registerProperties("es.foo", "es.spam"); - execute("-Efoo=bar", "-Espam=eggs"); + execute("-Dfoo=bar", "-Dspam=eggs"); assertSystemProperty("es.foo", "bar"); assertSystemProperty("es.spam", "eggs"); assertShouldRun(true); @@ -141,11 +141,18 @@ public class BootstrapCliParserTests extends CommandTestCase { public void testConfigMalformed() throws Exception { UserError e = expectThrows(UserError.class, () -> { - execute("-Efoo"); + execute("-Dfoo"); }); assertTrue(e.getMessage(), e.getMessage().contains("Malformed elasticsearch setting")); } + public void testUnknownOption() throws Exception { + OptionException e = expectThrows(OptionException.class, () -> { + execute("--network.host"); + }); + assertTrue(e.getMessage(), e.getMessage().contains("network.host is not a recognized option")); + } + private void registerProperties(String ... systemProperties) { propertiesToClear.addAll(Arrays.asList(systemProperties)); } From 3f44e1d429cb9b84f9ff15479ebe232ffdfcfb8b Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 11 Mar 2016 11:52:59 -0800 Subject: [PATCH 192/320] Remove old reference to site plugins example in docs --- docs/plugins/authors.asciidoc | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/plugins/authors.asciidoc b/docs/plugins/authors.asciidoc index d0a606987ae4..af3710f7e2da 100644 --- a/docs/plugins/authors.asciidoc +++ b/docs/plugins/authors.asciidoc @@ -26,9 +26,7 @@ https://github.com/elastic/elasticsearch/blob/master/buildSrc/src/main/resources Either fill in this template yourself (see https://github.com/lmenezes/elasticsearch-kopf/blob/master/plugin-descriptor.properties[elasticsearch-kopf] as an example) or, if you are using Elasticsearch's Gradle build system, you -can fill in the necessary values in the `build.gradle` file for your plugin. For -instance, see -https://github.com/elastic/elasticsearch/blob/master/plugins/site-example/build.gradle[`/plugins/site-example/build.gradle`]. +can fill in the necessary values in the `build.gradle` file for your plugin. [float] ==== Mandatory elements for plugins From c9f30f2f3fb9591bf3d5ca36cb9713960d9a527d Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Fri, 11 Mar 2016 13:30:11 -0500 Subject: [PATCH 193/320] scope internal methods in suggest builders --- .../search/suggest/SuggestBuilders.java | 6 +- .../search/suggest/SuggestionBuilder.java | 322 +++++++++--------- .../CompletionSuggestionBuilder.java | 18 +- .../phrase/PhraseSuggestionBuilder.java | 16 +- .../suggest/term/TermSuggestionBuilder.java | 21 +- .../suggest/CustomSuggesterSearchIT.java | 6 +- 6 files changed, 196 insertions(+), 193 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilders.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilders.java index c9111c660f8f..6050d4ffb4e5 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilders.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilders.java @@ -29,7 +29,7 @@ import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; public abstract class SuggestBuilders { /** - * Creates a term suggestion lookup query with the provided fieldname + * Creates a term suggestion lookup query with the provided field * * @return a {@link org.elasticsearch.search.suggest.term.TermSuggestionBuilder} * instance @@ -39,7 +39,7 @@ public abstract class SuggestBuilders { } /** - * Creates a phrase suggestion lookup query with the provided fieldname + * Creates a phrase suggestion lookup query with the provided field * * @return a {@link org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder} * instance @@ -49,7 +49,7 @@ public abstract class SuggestBuilders { } /** - * Creates a completion suggestion lookup query with the provided fieldname + * Creates a completion suggestion lookup query with the provided field * * @return a {@link org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder} * instance diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java index 4ff418bea396..29f50649cb16 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.suggest; import org.apache.lucene.analysis.Analyzer; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; @@ -44,7 +45,7 @@ import java.util.Objects; */ public abstract class SuggestionBuilder> extends ToXContentToBytes implements NamedWriteable { - protected final String fieldname; + protected final String field; protected String text; protected String prefix; protected String regex; @@ -62,21 +63,21 @@ public abstract class SuggestionBuilder> extends /** * Creates a new suggestion. - * @param fieldname field to fetch the candidate suggestions from + * @param field field to execute suggestions on */ - public SuggestionBuilder(String fieldname) { - Objects.requireNonNull(fieldname, "suggestion requires a field name"); - if (fieldname.isEmpty()) { + protected SuggestionBuilder(String field) { + Objects.requireNonNull(field, "suggestion requires a field name"); + if (field.isEmpty()) { throw new IllegalArgumentException("suggestion field name is empty"); } - this.fieldname = fieldname; + this.field = field; } /** * internal copy constructor that copies over all class fields from second SuggestionBuilder except field name. */ - protected SuggestionBuilder(String fieldname, SuggestionBuilder in) { - this(fieldname); + protected SuggestionBuilder(String field, SuggestionBuilder in) { + this(field); text = in.text; prefix = in.prefix; regex = in.regex; @@ -127,157 +128,11 @@ public abstract class SuggestionBuilder> extends return this.regex; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (text != null) { - builder.field(TEXT_FIELD.getPreferredName(), text); - } - if (prefix != null) { - builder.field(PREFIX_FIELD.getPreferredName(), prefix); - } - if (regex != null) { - builder.field(REGEX_FIELD.getPreferredName(), regex); - } - builder.startObject(getSuggesterName()); - if (analyzer != null) { - builder.field(ANALYZER_FIELD.getPreferredName(), analyzer); - } - builder.field(FIELDNAME_FIELD.getPreferredName(), fieldname); - if (size != null) { - builder.field(SIZE_FIELD.getPreferredName(), size); - } - if (shardSize != null) { - builder.field(SHARDSIZE_FIELD.getPreferredName(), shardSize); - } - - builder = innerToXContent(builder, params); - builder.endObject(); - return builder; - } - - protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException; - - public static SuggestionBuilder fromXContent(QueryParseContext parseContext, Suggesters suggesters) - throws IOException { - XContentParser parser = parseContext.parser(); - ParseFieldMatcher parsefieldMatcher = parseContext.parseFieldMatcher(); - XContentParser.Token token; - String currentFieldName = null; - String suggestText = null; - String prefix = null; - String regex = null; - SuggestionBuilder suggestionBuilder = null; - - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if (parsefieldMatcher.match(currentFieldName, TEXT_FIELD)) { - suggestText = parser.text(); - } else if (parsefieldMatcher.match(currentFieldName, PREFIX_FIELD)) { - prefix = parser.text(); - } else if (parsefieldMatcher.match(currentFieldName, REGEX_FIELD)) { - regex = parser.text(); - } else { - throw new ParsingException(parser.getTokenLocation(), "suggestion does not support [" + currentFieldName + "]"); - } - } else if (token == XContentParser.Token.START_OBJECT) { - SuggestionBuilder suggestParser = suggesters.getSuggestionPrototype(currentFieldName); - if (suggestParser == null) { - throw new ParsingException(parser.getTokenLocation(), "suggestion [" + currentFieldName + "] not supported"); - } - suggestionBuilder = suggestParser.innerFromXContent(parseContext); - } - } - if (suggestText != null) { - suggestionBuilder.text(suggestText); - } - if (prefix != null) { - suggestionBuilder.prefix(prefix); - } - if (regex != null) { - suggestionBuilder.regex(regex); - } - return suggestionBuilder; - } - - protected abstract SuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException; - - public SuggestionContext build(QueryShardContext context) throws IOException { - SuggestionContext suggestionContext = innerBuild(context); - return suggestionContext; - } - - protected abstract SuggestionContext innerBuild(QueryShardContext context) throws IOException; - - /** - * Transfers the text, prefix, regex, analyzer, fieldname, size and shard size settings from the - * original {@link SuggestionBuilder} to the target {@link SuggestionContext} - */ - protected void populateCommonFields(MapperService mapperService, - SuggestionSearchContext.SuggestionContext suggestionContext) throws IOException { - - Objects.requireNonNull(fieldname, "fieldname must not be null"); - - MappedFieldType fieldType = mapperService.fullName(fieldname); - if (fieldType == null) { - throw new IllegalArgumentException("no mapping found for field [" + fieldname + "]"); - } else if (analyzer == null) { - // no analyzer name passed in, so try the field's analyzer, or the default analyzer - if (fieldType.searchAnalyzer() == null) { - suggestionContext.setAnalyzer(mapperService.searchAnalyzer()); - } else { - suggestionContext.setAnalyzer(fieldType.searchAnalyzer()); - } - } else { - Analyzer luceneAnalyzer = mapperService.analysisService().analyzer(analyzer); - if (luceneAnalyzer == null) { - throw new IllegalArgumentException("analyzer [" + analyzer + "] doesn't exists"); - } - suggestionContext.setAnalyzer(luceneAnalyzer); - } - - suggestionContext.setField(fieldname); - - if (size != null) { - suggestionContext.setSize(size); - } - - if (shardSize != null) { - suggestionContext.setShardSize(shardSize); - } else { - // if no shard size is set in builder, use size (or at least 5) - suggestionContext.setShardSize(Math.max(suggestionContext.getSize(), 5)); - } - - if (text != null) { - suggestionContext.setText(BytesRefs.toBytesRef(text)); - } - if (prefix != null) { - suggestionContext.setPrefix(BytesRefs.toBytesRef(prefix)); - } - if (regex != null) { - suggestionContext.setRegex(BytesRefs.toBytesRef(regex)); - } - if (text != null && prefix == null) { - suggestionContext.setPrefix(BytesRefs.toBytesRef(text)); - } else if (text == null && prefix != null) { - suggestionContext.setText(BytesRefs.toBytesRef(prefix)); - } else if (text == null && regex != null) { - suggestionContext.setText(BytesRefs.toBytesRef(regex)); - } - } - - private String getSuggesterName() { - //default impl returns the same as writeable name, but we keep the distinction between the two just to make sure - return getWriteableName(); - } - /** * get the {@link #field()} parameter */ public String field() { - return this.fieldname; + return this.field; } /** @@ -341,11 +196,154 @@ public abstract class SuggestionBuilder> extends return this.shardSize; } + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (text != null) { + builder.field(TEXT_FIELD.getPreferredName(), text); + } + if (prefix != null) { + builder.field(PREFIX_FIELD.getPreferredName(), prefix); + } + if (regex != null) { + builder.field(REGEX_FIELD.getPreferredName(), regex); + } + builder.startObject(getSuggesterName()); + if (analyzer != null) { + builder.field(ANALYZER_FIELD.getPreferredName(), analyzer); + } + builder.field(FIELDNAME_FIELD.getPreferredName(), field); + if (size != null) { + builder.field(SIZE_FIELD.getPreferredName(), size); + } + if (shardSize != null) { + builder.field(SHARDSIZE_FIELD.getPreferredName(), shardSize); + } + + builder = innerToXContent(builder, params); + builder.endObject(); + return builder; + } + + protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException; + + static SuggestionBuilder fromXContent(QueryParseContext parseContext, Suggesters suggesters) + throws IOException { + XContentParser parser = parseContext.parser(); + ParseFieldMatcher parsefieldMatcher = parseContext.parseFieldMatcher(); + XContentParser.Token token; + String currentFieldName = null; + String suggestText = null; + String prefix = null; + String regex = null; + SuggestionBuilder suggestionBuilder = null; + + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (parsefieldMatcher.match(currentFieldName, TEXT_FIELD)) { + suggestText = parser.text(); + } else if (parsefieldMatcher.match(currentFieldName, PREFIX_FIELD)) { + prefix = parser.text(); + } else if (parsefieldMatcher.match(currentFieldName, REGEX_FIELD)) { + regex = parser.text(); + } else { + throw new ParsingException(parser.getTokenLocation(), "suggestion does not support [" + currentFieldName + "]"); + } + } else if (token == XContentParser.Token.START_OBJECT) { + SuggestionBuilder suggestParser = suggesters.getSuggestionPrototype(currentFieldName); + if (suggestParser == null) { + throw new ParsingException(parser.getTokenLocation(), "suggestion [" + currentFieldName + "] not supported"); + } + suggestionBuilder = suggestParser.innerFromXContent(parseContext); + } + } + if (suggestionBuilder == null) { + throw new ElasticsearchParseException("missing suggestion object"); + } + if (suggestText != null) { + suggestionBuilder.text(suggestText); + } + if (prefix != null) { + suggestionBuilder.prefix(prefix); + } + if (regex != null) { + suggestionBuilder.regex(regex); + } + return suggestionBuilder; + } + + protected abstract SuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException; + + protected abstract SuggestionContext build(QueryShardContext context) throws IOException; + + /** + * Transfers the text, prefix, regex, analyzer, field, size and shard size settings from the + * original {@link SuggestionBuilder} to the target {@link SuggestionContext} + */ + protected void populateCommonFields(MapperService mapperService, + SuggestionSearchContext.SuggestionContext suggestionContext) throws IOException { + + Objects.requireNonNull(field, "field must not be null"); + + MappedFieldType fieldType = mapperService.fullName(field); + if (fieldType == null) { + throw new IllegalArgumentException("no mapping found for field [" + field + "]"); + } else if (analyzer == null) { + // no analyzer name passed in, so try the field's analyzer, or the default analyzer + if (fieldType.searchAnalyzer() == null) { + suggestionContext.setAnalyzer(mapperService.searchAnalyzer()); + } else { + suggestionContext.setAnalyzer(fieldType.searchAnalyzer()); + } + } else { + Analyzer luceneAnalyzer = mapperService.analysisService().analyzer(analyzer); + if (luceneAnalyzer == null) { + throw new IllegalArgumentException("analyzer [" + analyzer + "] doesn't exists"); + } + suggestionContext.setAnalyzer(luceneAnalyzer); + } + + suggestionContext.setField(field); + + if (size != null) { + suggestionContext.setSize(size); + } + + if (shardSize != null) { + suggestionContext.setShardSize(shardSize); + } else { + // if no shard size is set in builder, use size (or at least 5) + suggestionContext.setShardSize(Math.max(suggestionContext.getSize(), 5)); + } + + if (text != null) { + suggestionContext.setText(BytesRefs.toBytesRef(text)); + } + if (prefix != null) { + suggestionContext.setPrefix(BytesRefs.toBytesRef(prefix)); + } + if (regex != null) { + suggestionContext.setRegex(BytesRefs.toBytesRef(regex)); + } + if (text != null && prefix == null) { + suggestionContext.setPrefix(BytesRefs.toBytesRef(text)); + } else if (text == null && prefix != null) { + suggestionContext.setText(BytesRefs.toBytesRef(prefix)); + } else if (text == null && regex != null) { + suggestionContext.setText(BytesRefs.toBytesRef(regex)); + } + } + + private String getSuggesterName() { + //default impl returns the same as writeable name, but we keep the distinction between the two just to make sure + return getWriteableName(); + } @Override public final T readFrom(StreamInput in) throws IOException { - String fieldname = in.readString(); - T suggestionBuilder = doReadFrom(in, fieldname); + String field = in.readString(); + T suggestionBuilder = doReadFrom(in, field); suggestionBuilder.text = in.readOptionalString(); suggestionBuilder.prefix = in.readOptionalString(); suggestionBuilder.regex = in.readOptionalString(); @@ -358,13 +356,13 @@ public abstract class SuggestionBuilder> extends /** * Subclass should return a new instance, reading itself from the input string * @param in the input string to read from - * @param fieldname the fieldname needed for ctor or concrete suggestion + * @param field the field needed for ctor or concrete suggestion */ - protected abstract T doReadFrom(StreamInput in, String fieldname) throws IOException; + protected abstract T doReadFrom(StreamInput in, String field) throws IOException; @Override public final void writeTo(StreamOutput out) throws IOException { - out.writeString(fieldname); + out.writeString(field); doWriteTo(out); out.writeOptionalString(text); out.writeOptionalString(prefix); @@ -389,7 +387,7 @@ public abstract class SuggestionBuilder> extends return Objects.equals(text, other.text()) && Objects.equals(prefix, other.prefix()) && Objects.equals(regex, other.regex()) && - Objects.equals(fieldname, other.field()) && + Objects.equals(field, other.field()) && Objects.equals(analyzer, other.analyzer()) && Objects.equals(size, other.size()) && Objects.equals(shardSize, other.shardSize()) && @@ -403,7 +401,7 @@ public abstract class SuggestionBuilder> extends @Override public final int hashCode() { - return Objects.hash(text, prefix, regex, fieldname, analyzer, size, shardSize, doHashCode()); + return Objects.hash(text, prefix, regex, field, analyzer, size, shardSize, doHashCode()); } /** diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 00592e6c9841..7244c544cf2f 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -98,12 +98,12 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder payloadFields = Collections.emptyList(); - public CompletionSuggestionBuilder(String fieldname) { - super(fieldname); + public CompletionSuggestionBuilder(String field) { + super(field); } /** - * internal copy constructor that copies over all class fields except for the fieldname which is + * internal copy constructor that copies over all class fields except for the field which is * set to the one provided in the first argument */ private CompletionSuggestionBuilder(String fieldname, CompletionSuggestionBuilder in) { @@ -205,8 +205,8 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder payloadFields = new ArrayList<>(numPayloadField); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java index 790ca636587c..a4793dfbdaab 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.suggest.phrase; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; @@ -94,12 +95,12 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder> generators = new HashMap<>(); - public PhraseSuggestionBuilder(String fieldname) { - super(fieldname); + public PhraseSuggestionBuilder(String field) { + super(field); } /** - * internal copy constructor that copies over all class fields except for the fieldname which is + * internal copy constructor that copies over all class fields except for the field which is * set to the one provided in the first argument */ private PhraseSuggestionBuilder(String fieldname, PhraseSuggestionBuilder in) { @@ -529,14 +530,15 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder options = new HashMap<>(); options.put(FIELDNAME_FIELD.getPreferredName(), field()); options.put(RANDOM_SUFFIX_FIELD.getPreferredName(), randomSuffix); From 97e2bab4cd1214a3fa69d914ae1d3becb2de1bbd Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Fri, 11 Mar 2016 13:47:45 -0500 Subject: [PATCH 194/320] clarify parsing logic in SuggestBuilder --- .../org/elasticsearch/search/suggest/SuggestBuilder.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java index 9306cb4cbdee..aed44c426d3f 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestBuilder.java @@ -118,11 +118,12 @@ public class SuggestBuilder extends ToXContentToBytes implements Writeable Date: Fri, 11 Mar 2016 16:27:33 -0500 Subject: [PATCH 195/320] nuke SuggestParseElement --- .../elasticsearch/search/SearchService.java | 5 + .../search/suggest/SuggestContextParser.java | 29 -- .../search/suggest/SuggestParseElement.java | 136 ------- .../search/suggest/SuggestPhase.java | 16 +- .../search/suggest/Suggester.java | 7 - .../search/suggest/Suggesters.java | 6 +- .../completion/CompletionSuggestParser.java | 143 ------- .../completion/CompletionSuggester.java | 6 - .../CompletionSuggestionBuilder.java | 52 +-- .../suggest/completion/FuzzyOptions.java | 10 + .../suggest/completion/RegexOptions.java | 6 + .../phrase/DirectCandidateGenerator.java | 2 +- .../suggest/phrase/PhraseSuggestParser.java | 356 ------------------ .../suggest/phrase/PhraseSuggester.java | 6 - .../search/suggest/phrase/WordScorer.java | 14 +- .../suggest/term/TermSuggestParser.java | 68 ---- .../search/suggest/term/TermSuggester.java | 7 - .../AbstractSuggestionBuilderTestCase.java | 130 ------- .../search/suggest/CustomSuggester.java | 10 - .../CompletionSuggesterBuilderTests.java | 28 -- .../phrase/DirectCandidateGeneratorTests.java | 69 ---- .../phrase/PhraseSuggestionBuilderTests.java | 34 -- .../term/TermSuggestionBuilderTests.java | 10 - 23 files changed, 63 insertions(+), 1087 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/search/suggest/SuggestContextParser.java delete mode 100644 core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java delete mode 100644 core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java delete mode 100644 core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java delete mode 100644 core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 463e6d3e5577..2e4dc1502133 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -794,6 +794,11 @@ public class SearchService extends AbstractLifecycleComponent imp } else { SearchParseElement parseElement = this.elementParsers.get(currentFieldName); if (parseElement == null) { + if (currentFieldName != null && currentFieldName.equals("suggest")) { + throw new SearchParseException(context, + "suggest is not supported in [ext], please use SearchSourceBuilder#suggest(SuggestBuilder) instead", + extParser.getTokenLocation()); + } throw new SearchParseException(context, "Unknown element [" + currentFieldName + "] in [ext]", extParser.getTokenLocation()); } else { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestContextParser.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestContextParser.java deleted file mode 100644 index 53d510bf5305..000000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestContextParser.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.suggest; - -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.QueryShardContext; - -import java.io.IOException; - -public interface SuggestContextParser { - SuggestionSearchContext.SuggestionContext parse(XContentParser parser, QueryShardContext shardContext) throws IOException; - -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java deleted file mode 100644 index b9454dc264aa..000000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.suggest; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.search.SearchParseElement; -import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -/** - * - */ -public final class SuggestParseElement implements SearchParseElement { - private Suggesters suggesters; - - @Inject - public SuggestParseElement(Suggesters suggesters) { - this.suggesters = suggesters; - } - - @Override - public void parse(XContentParser parser, SearchContext context) throws Exception { - SuggestionSearchContext suggestionSearchContext = parseInternal(parser, context.getQueryShardContext()); - context.suggest(suggestionSearchContext); - } - - public SuggestionSearchContext parseInternal(XContentParser parser, QueryShardContext shardContext) throws IOException { - SuggestionSearchContext suggestionSearchContext = new SuggestionSearchContext(); - MapperService mapperService = shardContext.getMapperService(); - - BytesRef globalText = null; - String fieldName = null; - Map suggestionContexts = new HashMap<>(); - - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - } else if (token.isValue()) { - if ("text".equals(fieldName)) { - globalText = parser.utf8Bytes(); - } else { - throw new IllegalArgumentException("[suggest] does not support [" + fieldName + "]"); - } - } else if (token == XContentParser.Token.START_OBJECT) { - String suggestionName = fieldName; - BytesRef suggestText = null; - BytesRef prefix = null; - BytesRef regex = null; - SuggestionContext suggestionContext = null; - - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - } else if (token.isValue()) { - if ("text".equals(fieldName)) { - suggestText = parser.utf8Bytes(); - } else if ("prefix".equals(fieldName)) { - prefix = parser.utf8Bytes(); - } else if ("regex".equals(fieldName)) { - regex = parser.utf8Bytes(); - } else { - throw new IllegalArgumentException("[suggest] does not support [" + fieldName + "]"); - } - } else if (token == XContentParser.Token.START_OBJECT) { - if (suggestionName == null) { - throw new IllegalArgumentException("Suggestion must have name"); - } - if (suggesters.get(fieldName) == null) { - throw new IllegalArgumentException("Suggester[" + fieldName + "] not supported"); - } - final SuggestContextParser contextParser = suggesters.get(fieldName).getContextParser(); - suggestionContext = contextParser.parse(parser, shardContext); - } - } - if (suggestionContext != null) { - if (suggestText != null) { - suggestionContext.setText(suggestText); - } - if (prefix != null) { - suggestionContext.setPrefix(prefix); - } - if (regex != null) { - suggestionContext.setRegex(regex); - } - - if (suggestText != null && prefix == null) { - suggestionContext.setPrefix(suggestText); - suggestionContext.setText(suggestText); - } else if (suggestText == null && prefix != null) { - suggestionContext.setPrefix(prefix); - suggestionContext.setText(prefix); - } else if (regex != null) { - suggestionContext.setRegex(regex); - suggestionContext.setText(regex); - } - suggestionContexts.put(suggestionName, suggestionContext); - } else { - throw new IllegalArgumentException("suggestion context could not be parsed correctly"); - } - } - } - - for (Map.Entry entry : suggestionContexts.entrySet()) { - String suggestionName = entry.getKey(); - SuggestionContext suggestionContext = entry.getValue(); - SuggestUtils.verifySuggestion(mapperService, globalText, suggestionContext); - suggestionSearchContext.addSuggestion(suggestionName, suggestionContext); - } - return suggestionSearchContext; - } -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java index 541efa7ef437..29c7343f047f 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java @@ -37,28 +37,22 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import static java.util.Collections.singletonMap; +import static java.util.Collections.emptyMap; /** */ public class SuggestPhase extends AbstractComponent implements SearchPhase { - private final Map parseElements; - private final SuggestParseElement parseElement; @Inject - public SuggestPhase(Settings settings, SuggestParseElement suggestParseElement) { + public SuggestPhase(Settings settings) { super(settings); - this.parseElement = suggestParseElement; - parseElements = singletonMap("suggest", parseElement); } @Override public Map parseElements() { - return parseElements; - } - - public SuggestParseElement parseElement() { - return parseElement; + // this is used to parse SearchSourceBuilder.ext() bytes + // we don't allow any suggestion parsing for the extension + return emptyMap(); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/suggest/Suggester.java b/core/src/main/java/org/elasticsearch/search/suggest/Suggester.java index b9bf603f1c54..5772f2b55d38 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/Suggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/Suggester.java @@ -29,13 +29,6 @@ public abstract class Suggester> innerExecute(String name, T suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException; - /** - * link the suggester to its corresponding {@link SuggestContextParser} - * TODO: This method should eventually be removed by {@link #getBuilderPrototype()} once - * we don't directly parse from xContent to the SuggestionContext any more - */ - public abstract SuggestContextParser getContextParser(); - /** * link the suggester to its corresponding {@link SuggestionBuilder} */ diff --git a/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java b/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java index 0bce98c72079..644d9239b8c6 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/Suggesters.java @@ -42,7 +42,7 @@ public final class Suggesters extends ExtensionPoint.ClassMap { @Inject public Suggesters(Map suggesters) { - super("suggester", Suggester.class, new HashSet<>(Arrays.asList("phrase", "term", "completion")), Suggesters.class, SuggestParseElement.class, SuggestPhase.class); + super("suggester", Suggester.class, new HashSet<>(Arrays.asList("phrase", "term", "completion")), Suggesters.class, SuggestPhase.class); this.parsers = Collections.unmodifiableMap(addBuildIns(suggesters)); } @@ -55,10 +55,6 @@ public final class Suggesters extends ExtensionPoint.ClassMap { return map; } - public Suggester get(String type) { - return parsers.get(type); - } - public SuggestionBuilder getSuggestionPrototype(String suggesterName) { Suggester suggester = parsers.get(suggesterName); if (suggester == null) { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java deleted file mode 100644 index e5b70db6999e..000000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.suggest.completion; - -import org.apache.lucene.analysis.Analyzer; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.ObjectParser; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.core.CompletionFieldMapper; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.query.RegexpFlag; -import org.elasticsearch.search.suggest.SuggestContextParser; -import org.elasticsearch.search.suggest.SuggestUtils.Fields; -import org.elasticsearch.search.suggest.SuggestionSearchContext; - -import java.io.IOException; - -/** - * Parses query options for {@link CompletionSuggester} - * - * Acceptable input: - * { - * "field" : STRING - * "size" : INT - * "fuzzy" : BOOLEAN | FUZZY_OBJECT - * "contexts" : QUERY_CONTEXTS - * "regex" : REGEX_OBJECT - * } - * - * FUZZY_OBJECT : { - * "edit_distance" : STRING | INT - * "transpositions" : BOOLEAN - * "min_length" : INT - * "prefix_length" : INT - * "unicode_aware" : BOOLEAN - * "max_determinized_states" : INT - * } - * - * REGEX_OBJECT: { - * "flags" : REGEX_FLAGS - * "max_determinized_states" : INT - * } - * - * see {@link RegexpFlag} for REGEX_FLAGS - */ -public class CompletionSuggestParser implements SuggestContextParser { - - private static ObjectParser TLP_PARSER = new ObjectParser<>(CompletionSuggestionBuilder.SUGGESTION_NAME, null); - static { - TLP_PARSER.declareStringArray(CompletionSuggestionContext::setPayloadFields, CompletionSuggestionBuilder.PAYLOAD_FIELD); - TLP_PARSER.declareField((parser, completionSuggestionContext, context) -> { - if (parser.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { - if (parser.booleanValue()) { - completionSuggestionContext.setFuzzyOptions(new FuzzyOptions.Builder().build()); - } - } else { - completionSuggestionContext.setFuzzyOptions(FuzzyOptions.parse(parser)); - } - }, - FuzzyOptions.FUZZY_OPTIONS, ObjectParser.ValueType.OBJECT_OR_BOOLEAN); - TLP_PARSER.declareField((parser, completionSuggestionContext, context) -> completionSuggestionContext.setRegexOptions(RegexOptions.parse(parser)), - RegexOptions.REGEX_OPTIONS, ObjectParser.ValueType.OBJECT); - TLP_PARSER.declareString(SuggestionSearchContext.SuggestionContext::setField, Fields.FIELD); - TLP_PARSER.declareField((p, v, c) -> { - String analyzerName = p.text(); - Analyzer analyzer = c.mapperService.analysisService().analyzer(analyzerName); - if (analyzer == null) { - throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists"); - } - v.setAnalyzer(analyzer); - }, Fields.ANALYZER, ObjectParser.ValueType.STRING); - TLP_PARSER.declareInt(SuggestionSearchContext.SuggestionContext::setSize, Fields.SIZE); - TLP_PARSER.declareInt(SuggestionSearchContext.SuggestionContext::setShardSize, Fields.SHARD_SIZE); - TLP_PARSER.declareField((p, v, c) -> { - // Copy the current structure. We will parse, once the mapping is provided - XContentBuilder builder = XContentFactory.contentBuilder(p.contentType()); - builder.copyCurrentStructure(p); - BytesReference bytes = builder.bytes(); - c.contextParser = XContentFactory.xContent(bytes).createParser(bytes); - p.skipChildren(); - }, CompletionSuggestionBuilder.CONTEXTS_FIELD, ObjectParser.ValueType.OBJECT); // context is deprecated - } - - private static class ContextAndSuggest { - XContentParser contextParser; - final MapperService mapperService; - - ContextAndSuggest(MapperService mapperService) { - this.mapperService = mapperService; - } - } - - private final CompletionSuggester completionSuggester; - - public CompletionSuggestParser(CompletionSuggester completionSuggester) { - this.completionSuggester = completionSuggester; - } - - @Override - public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, QueryShardContext shardContext) throws IOException { - MapperService mapperService = shardContext.getMapperService(); - final CompletionSuggestionContext suggestion = new CompletionSuggestionContext(shardContext); - final ContextAndSuggest contextAndSuggest = new ContextAndSuggest(mapperService); - TLP_PARSER.parse(parser, suggestion, contextAndSuggest); - final XContentParser contextParser = contextAndSuggest.contextParser; - MappedFieldType mappedFieldType = mapperService.fullName(suggestion.getField()); - if (mappedFieldType == null) { - throw new ElasticsearchException("Field [" + suggestion.getField() + "] is not a completion suggest field"); - } else if (mappedFieldType instanceof CompletionFieldMapper.CompletionFieldType) { - CompletionFieldMapper.CompletionFieldType type = (CompletionFieldMapper.CompletionFieldType) mappedFieldType; - if (type.hasContextMappings() == false && contextParser != null) { - throw new IllegalArgumentException("suggester [" + type.name() + "] doesn't expect any context"); - } - suggestion.setQueryContexts(CompletionSuggestionBuilder.parseQueryContexts(contextParser, type)); - suggestion.setFieldType(type); - return suggestion; - } else { - throw new IllegalArgumentException("Field [" + suggestion.getField() + "] is not a completion suggest field"); - } - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index e3953c8e0b49..cef0a33fddb6 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -38,7 +38,6 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.suggest.Suggest; -import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.SuggestionBuilder; @@ -55,11 +54,6 @@ public class CompletionSuggester extends Suggester public static final CompletionSuggester PROTOTYPE = new CompletionSuggester(); - @Override - public SuggestContextParser getContextParser() { - return new CompletionSuggestParser(this); - } - @Override protected Suggest.Suggestion> innerExecute(String name, final CompletionSuggestionContext suggestionContext, final IndexSearcher searcher, CharsRefBuilder spare) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 7244c544cf2f..ca8aad7c8acd 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -63,6 +63,16 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder TLP_PARSER = new ObjectParser<>(SUGGESTION_NAME, null); static { @@ -261,8 +271,24 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder> queryContexts = new HashMap<>(contextMappings.size()); + assert contextParser.currentToken() == XContentParser.Token.START_OBJECT; + XContentParser.Token currentToken; + String currentFieldName; + while ((currentToken = contextParser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (currentToken == XContentParser.Token.FIELD_NAME) { + currentFieldName = contextParser.currentName(); + final ContextMapping mapping = contextMappings.get(currentFieldName); + queryContexts.put(currentFieldName, mapping.parseQueryContext(contextParser)); + } + } + suggestionContext.setQueryContexts(queryContexts); + } + } } else if (contextBytes != null) { throw new IllegalArgumentException("suggester [" + type.name() + "] doesn't expect any context"); } @@ -335,26 +361,4 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder> parseQueryContexts( - XContentParser contextParser, CompletionFieldMapper.CompletionFieldType type) throws IOException { - Map> queryContexts = Collections.emptyMap(); - if (type.hasContextMappings() && contextParser != null) { - ContextMappings contextMappings = type.getContextMappings(); - contextParser.nextToken(); - queryContexts = new HashMap<>(contextMappings.size()); - assert contextParser.currentToken() == XContentParser.Token.START_OBJECT; - XContentParser.Token currentToken; - String currentFieldName; - while ((currentToken = contextParser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (currentToken == XContentParser.Token.FIELD_NAME) { - currentFieldName = contextParser.currentName(); - final ContextMapping mapping = contextMappings.get(currentFieldName); - queryContexts.put(currentFieldName, mapping.parseQueryContext(contextParser)); - } - } - contextParser.close(); - } - return queryContexts; - } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java index 709124443bd1..8f05be04699c 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java @@ -46,6 +46,16 @@ public class FuzzyOptions implements ToXContent, Writeable { private static final ParseField UNICODE_AWARE_FIELD = new ParseField("unicode_aware"); private static final ParseField MAX_DETERMINIZED_STATES_FIELD = new ParseField("max_determinized_states"); + /** + * fuzzy : { + * "edit_distance" : STRING | INT + * "transpositions" : BOOLEAN + * "min_length" : INT + * "prefix_length" : INT + * "unicode_aware" : BOOLEAN + * "max_determinized_states" : INT + * } + */ private static ObjectParser PARSER = new ObjectParser<>(FUZZY_OPTIONS.getPreferredName(), Builder::new); static { PARSER.declareInt(Builder::setFuzzyMinLength, MIN_LENGTH_FIELD); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java index 81e524d6e3f5..8503dbdf46d2 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java @@ -42,6 +42,12 @@ public class RegexOptions implements ToXContent, Writeable { private static final ParseField FLAGS_VALUE = new ParseField("flags", "flags_value"); private static final ParseField MAX_DETERMINIZED_STATES = new ParseField("max_determinized_states"); + /** + * regex: { + * "flags" : STRING | INT + * "max_determinized_states" : INT + * } + */ private static ObjectParser PARSER = new ObjectParser<>(REGEX_OPTIONS.getPreferredName(), Builder::new); static { PARSER.declareInt(Builder::setMaxDeterminizedStates, MAX_DETERMINIZED_STATES); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java index 5b937500d6b6..a454735ae1cb 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java @@ -178,7 +178,7 @@ public final class DirectCandidateGenerator extends CandidateGenerator { protected long thresholdFrequency(long termFrequency, long dictionarySize) { if (termFrequency > 0) { - return (long) Math.max(0, Math.round(termFrequency * (Math.log10(termFrequency - frequencyPlateau) * (1.0 / Math.log10(logBase))) + 1)); + return Math.max(0, Math.round(termFrequency * (Math.log10(termFrequency - frequencyPlateau) * (1.0 / Math.log10(logBase))) + 1)); } return 0; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java deleted file mode 100644 index e4400fb5cd20..000000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java +++ /dev/null @@ -1,356 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.suggest.phrase; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.Terms; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.script.CompiledScript; -import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.script.Template; -import org.elasticsearch.search.suggest.SuggestContextParser; -import org.elasticsearch.search.suggest.SuggestUtils; -import org.elasticsearch.search.suggest.SuggestionSearchContext; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator; - -import java.io.IOException; -import java.util.Collections; - -public final class PhraseSuggestParser implements SuggestContextParser { - - private PhraseSuggester suggester; - - public PhraseSuggestParser(PhraseSuggester suggester) { - this.suggester = suggester; - } - - @Override - public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, QueryShardContext shardContext) throws IOException { - MapperService mapperService = shardContext.getMapperService(); - ScriptService scriptService = shardContext.getScriptService(); - PhraseSuggestionContext suggestion = new PhraseSuggestionContext(shardContext); - ParseFieldMatcher parseFieldMatcher = mapperService.getIndexSettings().getParseFieldMatcher(); - XContentParser.Token token; - String fieldName = null; - boolean gramSizeSet = false; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - } else if (token.isValue()) { - if (!SuggestUtils.parseSuggestContext(parser, mapperService, fieldName, suggestion, parseFieldMatcher)) { - if ("real_word_error_likelihood".equals(fieldName) || "realWorldErrorLikelihood".equals(fieldName)) { - suggestion.setRealWordErrorLikelihood(parser.floatValue()); - if (suggestion.realworldErrorLikelyhood() <= 0.0) { - throw new IllegalArgumentException("real_word_error_likelihood must be > 0.0"); - } - } else if ("confidence".equals(fieldName)) { - suggestion.setConfidence(parser.floatValue()); - if (suggestion.confidence() < 0.0) { - throw new IllegalArgumentException("confidence must be >= 0.0"); - } - } else if ("separator".equals(fieldName)) { - suggestion.setSeparator(new BytesRef(parser.text())); - } else if ("max_errors".equals(fieldName) || "maxErrors".equals(fieldName)) { - suggestion.setMaxErrors(parser.floatValue()); - if (suggestion.maxErrors() <= 0.0) { - throw new IllegalArgumentException("max_error must be > 0.0"); - } - } else if ("gram_size".equals(fieldName) || "gramSize".equals(fieldName)) { - suggestion.setGramSize(parser.intValue()); - if (suggestion.gramSize() < 1) { - throw new IllegalArgumentException("gram_size must be >= 1"); - } - gramSizeSet = true; - } else if ("force_unigrams".equals(fieldName) || "forceUnigrams".equals(fieldName)) { - suggestion.setRequireUnigram(parser.booleanValue()); - } else if ("token_limit".equals(fieldName) || "tokenLimit".equals(fieldName)) { - int tokenLimit = parser.intValue(); - if (tokenLimit <= 0) { - throw new IllegalArgumentException("token_limit must be >= 1"); - } - suggestion.setTokenLimit(tokenLimit); - } else { - throw new IllegalArgumentException("suggester[phrase] doesn't support field [" + fieldName + "]"); - } - } - } else if (token == Token.START_ARRAY) { - if (parseFieldMatcher.match(fieldName, DirectCandidateGeneratorBuilder.DIRECT_GENERATOR_FIELD)) { - // for now we only have a single type of generators - while ((token = parser.nextToken()) == Token.START_OBJECT) { - PhraseSuggestionContext.DirectCandidateGenerator generator = parseCandidateGenerator(parser, mapperService, parseFieldMatcher); - verifyGenerator(generator); - suggestion.addGenerator(generator); - } - } else { - throw new IllegalArgumentException("suggester[phrase] doesn't support array field [" + fieldName + "]"); - } - } else if (token == Token.START_OBJECT) { - if ("smoothing".equals(fieldName)) { - parseSmoothingModel(parser, suggestion, fieldName); - } else if ("highlight".equals(fieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - } else if (token.isValue()) { - if ("pre_tag".equals(fieldName) || "preTag".equals(fieldName)) { - suggestion.setPreTag(parser.utf8Bytes()); - } else if ("post_tag".equals(fieldName) || "postTag".equals(fieldName)) { - suggestion.setPostTag(parser.utf8Bytes()); - } else { - throw new IllegalArgumentException( - "suggester[phrase][highlight] doesn't support field [" + fieldName + "]"); - } - } - } - } else if ("collate".equals(fieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - } else if ("query".equals(fieldName)) { - if (suggestion.getCollateQueryScript() != null) { - throw new IllegalArgumentException("suggester[phrase][collate] query already set, doesn't support additional [" + fieldName + "]"); - } - Template template = Template.parse(parser, parseFieldMatcher); - CompiledScript compiledScript = scriptService.compile(template, ScriptContext.Standard.SEARCH, Collections.emptyMap()); - suggestion.setCollateQueryScript(compiledScript); - } else if ("params".equals(fieldName)) { - suggestion.setCollateScriptParams(parser.map()); - } else if ("prune".equals(fieldName)) { - if (parser.isBooleanValue()) { - suggestion.setCollatePrune(parser.booleanValue()); - } else { - throw new IllegalArgumentException("suggester[phrase][collate] prune must be either 'true' or 'false'"); - } - } else { - throw new IllegalArgumentException( - "suggester[phrase][collate] doesn't support field [" + fieldName + "]"); - } - } - } else { - throw new IllegalArgumentException("suggester[phrase] doesn't support array field [" + fieldName + "]"); - } - } else { - throw new IllegalArgumentException("suggester[phrase] doesn't support field [" + fieldName + "]"); - } - } - - if (suggestion.getField() == null) { - throw new IllegalArgumentException("The required field option is missing"); - } - - MappedFieldType fieldType = mapperService.fullName(suggestion.getField()); - if (fieldType == null) { - throw new IllegalArgumentException("No mapping found for field [" + suggestion.getField() + "]"); - } else if (suggestion.getAnalyzer() == null) { - // no analyzer name passed in, so try the field's analyzer, or the default analyzer - if (fieldType.searchAnalyzer() == null) { - suggestion.setAnalyzer(mapperService.searchAnalyzer()); - } else { - suggestion.setAnalyzer(fieldType.searchAnalyzer()); - } - } - - if (suggestion.model() == null) { - suggestion.setModel(StupidBackoffScorer.FACTORY); - } - - if (!gramSizeSet || suggestion.generators().isEmpty()) { - final ShingleTokenFilterFactory.Factory shingleFilterFactory = SuggestUtils.getShingleFilterFactory(suggestion.getAnalyzer()); - if (!gramSizeSet) { - // try to detect the shingle size - if (shingleFilterFactory != null) { - suggestion.setGramSize(shingleFilterFactory.getMaxShingleSize()); - if (suggestion.getAnalyzer() == null && shingleFilterFactory.getMinShingleSize() > 1 && !shingleFilterFactory.getOutputUnigrams()) { - throw new IllegalArgumentException("The default analyzer for field: [" + suggestion.getField() + "] doesn't emit unigrams. If this is intentional try to set the analyzer explicitly"); - } - } - } - if (suggestion.generators().isEmpty()) { - if (shingleFilterFactory != null && shingleFilterFactory.getMinShingleSize() > 1 && !shingleFilterFactory.getOutputUnigrams() && suggestion.getRequireUnigram()) { - throw new IllegalArgumentException("The default candidate generator for phrase suggest can't operate on field: [" + suggestion.getField() + "] since it doesn't emit unigrams. If this is intentional try to set the candidate generator field explicitly"); - } - // use a default generator on the same field - DirectCandidateGenerator generator = new DirectCandidateGenerator(); - generator.setField(suggestion.getField()); - suggestion.addGenerator(generator); - } - } - return suggestion; - } - - public void parseSmoothingModel(XContentParser parser, PhraseSuggestionContext suggestion, String fieldName) throws IOException { - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - if ("linear".equals(fieldName)) { - ensureNoSmoothing(suggestion); - final double[] lambdas = new double[3]; - while ((token = parser.nextToken()) != Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - } - if (token.isValue()) { - if ("trigram_lambda".equals(fieldName) || "trigramLambda".equals(fieldName)) { - lambdas[0] = parser.doubleValue(); - if (lambdas[0] < 0) { - throw new IllegalArgumentException("trigram_lambda must be positive"); - } - } else if ("bigram_lambda".equals(fieldName) || "bigramLambda".equals(fieldName)) { - lambdas[1] = parser.doubleValue(); - if (lambdas[1] < 0) { - throw new IllegalArgumentException("bigram_lambda must be positive"); - } - } else if ("unigram_lambda".equals(fieldName) || "unigramLambda".equals(fieldName)) { - lambdas[2] = parser.doubleValue(); - if (lambdas[2] < 0) { - throw new IllegalArgumentException("unigram_lambda must be positive"); - } - } else { - throw new IllegalArgumentException( - "suggester[phrase][smoothing][linear] doesn't support field [" + fieldName + "]"); - } - } - } - double sum = 0.0d; - for (int i = 0; i < lambdas.length; i++) { - sum += lambdas[i]; - } - if (Math.abs(sum - 1.0) > 0.001) { - throw new IllegalArgumentException("linear smoothing lambdas must sum to 1"); - } - suggestion.setModel(new WordScorer.WordScorerFactory() { - @Override - public WordScorer newScorer(IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) - throws IOException { - return new LinearInterpoatingScorer(reader, terms, field, realWordLikelyhood, separator, lambdas[0], lambdas[1], - lambdas[2]); - } - }); - } else if ("laplace".equals(fieldName)) { - ensureNoSmoothing(suggestion); - double theAlpha = Laplace.DEFAULT_LAPLACE_ALPHA; - - while ((token = parser.nextToken()) != Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - } - if (token.isValue() && "alpha".equals(fieldName)) { - theAlpha = parser.doubleValue(); - } - } - final double alpha = theAlpha; - suggestion.setModel(new WordScorer.WordScorerFactory() { - @Override - public WordScorer newScorer(IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) - throws IOException { - return new LaplaceScorer(reader, terms, field, realWordLikelyhood, separator, alpha); - } - }); - - } else if ("stupid_backoff".equals(fieldName) || "stupidBackoff".equals(fieldName)) { - ensureNoSmoothing(suggestion); - double theDiscount = StupidBackoff.DEFAULT_BACKOFF_DISCOUNT; - while ((token = parser.nextToken()) != Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - } - if (token.isValue() && "discount".equals(fieldName)) { - theDiscount = parser.doubleValue(); - } - } - final double discount = theDiscount; - suggestion.setModel(new WordScorer.WordScorerFactory() { - @Override - public WordScorer newScorer(IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) - throws IOException { - return new StupidBackoffScorer(reader, terms, field, realWordLikelyhood, separator, discount); - } - }); - - } else { - throw new IllegalArgumentException("suggester[phrase] doesn't support object field [" + fieldName + "]"); - } - } - } - } - - private void ensureNoSmoothing(PhraseSuggestionContext suggestion) { - if (suggestion.model() != null) { - throw new IllegalArgumentException("only one smoothing model supported"); - } - } - - private void verifyGenerator(PhraseSuggestionContext.DirectCandidateGenerator suggestion) { - // Verify options and set defaults - if (suggestion.field() == null) { - throw new IllegalArgumentException("The required field option is missing"); - } - } - - static PhraseSuggestionContext.DirectCandidateGenerator parseCandidateGenerator(XContentParser parser, MapperService mapperService, - ParseFieldMatcher parseFieldMatcher) throws IOException { - XContentParser.Token token; - String fieldName = null; - PhraseSuggestionContext.DirectCandidateGenerator generator = new PhraseSuggestionContext.DirectCandidateGenerator(); - while ((token = parser.nextToken()) != Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - } - if (token.isValue()) { - if (!SuggestUtils.parseDirectSpellcheckerSettings(parser, fieldName, generator, parseFieldMatcher)) { - if ("field".equals(fieldName)) { - generator.setField(parser.text()); - if (mapperService.fullName(generator.field()) == null) { - throw new IllegalArgumentException("No mapping found for field [" + generator.field() + "]"); - } - } else if ("size".equals(fieldName)) { - generator.size(parser.intValue()); - } else if ("pre_filter".equals(fieldName) || "preFilter".equals(fieldName)) { - String analyzerName = parser.text(); - Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName); - if (analyzer == null) { - throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists"); - } - generator.preFilter(analyzer); - } else if ("post_filter".equals(fieldName) || "postFilter".equals(fieldName)) { - String analyzerName = parser.text(); - Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName); - if (analyzer == null) { - throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists"); - } - generator.postFilter(analyzer); - } else { - throw new IllegalArgumentException("CandidateGenerator doesn't support [" + fieldName + "]"); - } - } - } - } - return generator; - } -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java index 8f3e5164e407..14bced639f25 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java @@ -38,7 +38,6 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.suggest.Suggest.Suggestion; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option; -import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestUtils; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.SuggestionBuilder; @@ -144,11 +143,6 @@ public final class PhraseSuggester extends Suggester { return new PhraseSuggestion.Entry(new Text(spare.toString()), 0, spare.length(), cutoffScore); } - @Override - public SuggestContextParser getContextParser() { - return new PhraseSuggestParser(this); - } - @Override public SuggestionBuilder getBuilderPrototype() { return PhraseSuggestionBuilder.PROTOTYPE; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/WordScorer.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/WordScorer.java index 9557715bcb94..69e62c1a1759 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/WordScorer.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/WordScorer.java @@ -77,7 +77,7 @@ public abstract class WordScorer { } return candidate.stringDistance; } - + public double score(Candidate[] path, CandidateSet[] candidateSet, int at, int gramSize) throws IOException { if (at == 0 || gramSize == 1) { return Math.log10(channelScore(path[at], candidateSet[at].originalTerm) * scoreUnigram(path[at])); @@ -87,21 +87,21 @@ public abstract class WordScorer { return Math.log10(channelScore(path[at], candidateSet[at].originalTerm) * scoreTrigram(path[at], path[at - 1], path[at - 2])); } } - + protected double scoreUnigram(Candidate word) throws IOException { return (1.0 + frequency(word.term)) / (vocabluarySize + numTerms); } - + protected double scoreBigram(Candidate word, Candidate w_1) throws IOException { return scoreUnigram(word); } - + protected double scoreTrigram(Candidate word, Candidate w_1, Candidate w_2) throws IOException { return scoreBigram(word, w_1); } - public static interface WordScorerFactory { - public WordScorer newScorer(IndexReader reader, Terms terms, - String field, double realWordLikelyhood, BytesRef separator) throws IOException; + public interface WordScorerFactory { + WordScorer newScorer(IndexReader reader, Terms terms, + String field, double realWordLikelyhood, BytesRef separator) throws IOException; } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java deleted file mode 100644 index 7e75976d3a39..000000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.suggest.term; - -import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.search.suggest.DirectSpellcheckerSettings; -import org.elasticsearch.search.suggest.SuggestContextParser; -import org.elasticsearch.search.suggest.SuggestUtils; -import org.elasticsearch.search.suggest.SuggestionSearchContext; - -import java.io.IOException; - -public final class TermSuggestParser implements SuggestContextParser { - - private TermSuggester suggester; - - public TermSuggestParser(TermSuggester suggester) { - this.suggester = suggester; - } - - @Override - public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, QueryShardContext shardContext) throws IOException { - MapperService mapperService = shardContext.getMapperService(); - XContentParser.Token token; - String fieldName = null; - TermSuggestionContext suggestion = new TermSuggestionContext(shardContext); - DirectSpellcheckerSettings settings = suggestion.getDirectSpellCheckerSettings(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - } else if (token.isValue()) { - parseTokenValue(parser, mapperService, fieldName, suggestion, settings, mapperService.getIndexSettings().getParseFieldMatcher()); - } else { - throw new IllegalArgumentException("suggester[term] doesn't support field [" + fieldName + "]"); - } - } - return suggestion; - } - - private void parseTokenValue(XContentParser parser, MapperService mapperService, String fieldName, TermSuggestionContext suggestion, - DirectSpellcheckerSettings settings, ParseFieldMatcher parseFieldMatcher) throws IOException { - if (!(SuggestUtils.parseSuggestContext(parser, mapperService, fieldName, suggestion, parseFieldMatcher) || SuggestUtils.parseDirectSpellcheckerSettings( - parser, fieldName, settings, parseFieldMatcher))) { - throw new IllegalArgumentException("suggester[term] doesn't support [" + fieldName + "]"); - - } - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java index 78ed8be6a28f..4bffb2dfe86e 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java @@ -28,7 +28,6 @@ import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.text.Text; -import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestUtils; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.SuggestionBuilder; @@ -67,12 +66,6 @@ public final class TermSuggester extends Suggester { return response; } - @Override - public SuggestContextParser getContextParser() { - return new TermSuggestParser(this); - } - - private List queryTerms(SuggestionContext suggestion, CharsRefBuilder spare) throws IOException { final List result = new ArrayList<>(); final String field = suggestion.getField(); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 2837716e3c68..dc2b6081bd6a 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -19,9 +19,7 @@ package org.elasticsearch.search.suggest; -import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -34,43 +32,23 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AnalysisService; -import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.mapper.ContentPath; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.core.StringFieldMapper; -import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.script.CompiledScript; -import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptEngineRegistry; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptServiceTests.TestEngineService; import org.elasticsearch.script.ScriptSettings; import org.elasticsearch.search.SearchModule; -import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.watcher.ResourceWatcherService; import org.junit.AfterClass; import org.junit.BeforeClass; import java.io.IOException; import java.nio.file.Path; import java.util.Collections; -import java.util.Iterator; -import java.util.Map; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.equalTo; @@ -83,8 +61,6 @@ public abstract class AbstractSuggestionBuilderTestCase params) { - return new CompiledScript(ScriptType.INLINE, "mockName", "mocklang", script); - } - }; suggesters = new Suggesters(Collections.emptyMap()); - parseElement = new SuggestParseElement(suggesters); namedWriteableRegistry = new NamedWriteableRegistry(); namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, TermSuggestionBuilder.PROTOTYPE); @@ -224,104 +192,6 @@ public abstract class AbstractSuggestionBuilderTestCase randomFieldTypeAndSuggestionBuilder() { - StringFieldType type = new StringFieldType(); - if (randomBoolean()) { - type.setSearchAnalyzer(new NamedAnalyzer("foo", new WhitespaceAnalyzer())); - } - return new Tuple<>(type, randomTestBuilder()); - } - - /** - * parses random suggestion builder via old parseElement method and via - * build, comparing the results for equality - */ - public void testBuild() throws IOException { - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), Settings.EMPTY); - - AnalysisService mockAnalysisService = new AnalysisService(idxSettings, Collections.emptyMap(), Collections.emptyMap(), - Collections.emptyMap(), Collections.emptyMap()) { - @Override - public NamedAnalyzer analyzer(String name) { - return new NamedAnalyzer(name, new WhitespaceAnalyzer()); - } - }; - - for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { - SuggestBuilder suggestBuilder = new SuggestBuilder(); - final Tuple mappedFieldTypeSBTuple = randomFieldTypeAndSuggestionBuilder(); - final MapperService mapperService = new MapperService(idxSettings, mockAnalysisService, null, - new IndicesModule().getMapperRegistry(), null) { - @Override - public MappedFieldType fullName(String fullName) { - return mappedFieldTypeSBTuple.v1(); - } - }; - SB suggestionBuilder = mappedFieldTypeSBTuple.v2(); - QueryShardContext mockShardContext = new QueryShardContext(idxSettings, - null, null, mapperService, null, scriptService, null) { - @Override - public MappedFieldType fieldMapper(String name) { - StringFieldMapper.Builder builder = new StringFieldMapper.Builder(name); - return builder.build(new Mapper.BuilderContext(idxSettings.getSettings(), new ContentPath(1))).fieldType(); - } - }; - mockShardContext.setMapUnmappedFieldAsString(true); - suggestBuilder.addSuggestion(randomAsciiOfLength(10), suggestionBuilder); - - if (suggestionBuilder.text() == null) { - // we either need suggestion text or global text - suggestBuilder.setGlobalText(randomAsciiOfLengthBetween(5, 50)); - } - if (suggestionBuilder.text() != null && suggestionBuilder.prefix() != null) { - suggestionBuilder.prefix(null); - } - - XContentBuilder xContentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); - if (randomBoolean()) { - xContentBuilder.prettyPrint(); - } - suggestBuilder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); - - XContentParser parser = XContentHelper.createParser(xContentBuilder.bytes()); - parser.nextToken(); // set cursor to START_OBJECT - SuggestionSearchContext parsedSuggestionSearchContext = parseElement.parseInternal(parser, mockShardContext); - - SuggestionSearchContext buildSuggestSearchContext = suggestBuilder.build(mockShardContext); - assertEquals(parsedSuggestionSearchContext.suggestions().size(), buildSuggestSearchContext.suggestions().size()); - Iterator> iterator = buildSuggestSearchContext.suggestions().entrySet().iterator(); - for (Map.Entry entry : parsedSuggestionSearchContext.suggestions().entrySet()) { - Map.Entry other = iterator.next(); - assertEquals(entry.getKey(), other.getKey()); - - SuggestionContext oldSchoolContext = entry.getValue(); - SuggestionContext newSchoolContext = other.getValue(); - assertNotSame(oldSchoolContext, newSchoolContext); - // deep comparison of analyzers is difficult here, but we check they are set or not set - if (oldSchoolContext.getAnalyzer() != null) { - assertNotNull(newSchoolContext.getAnalyzer()); - } else { - assertNull(newSchoolContext.getAnalyzer()); - } - assertEquals(oldSchoolContext.getField(), newSchoolContext.getField()); - assertEquals(oldSchoolContext.getPrefix(), newSchoolContext.getPrefix()); - assertEquals(oldSchoolContext.getRegex(), newSchoolContext.getRegex()); - assertEquals(oldSchoolContext.getShardSize(), newSchoolContext.getShardSize()); - assertEquals(oldSchoolContext.getSize(), newSchoolContext.getSize()); - assertEquals(oldSchoolContext.getSuggester().getClass(), newSchoolContext.getSuggester().getClass()); - assertEquals(oldSchoolContext.getText(), newSchoolContext.getText()); - assertEquals(oldSchoolContext.getClass(), newSchoolContext.getClass()); - - assertSuggestionContext(oldSchoolContext, newSchoolContext); - } - } - } - - /** - * compare two SuggestionContexte implementations for the special suggestion type under test - */ - protected abstract void assertSuggestionContext(SuggestionContext oldSuggestion, SuggestionContext newSuggestion); - private SB mutate(SB firstBuilder) throws IOException { SB mutation = serializedCopy(firstBuilder); assertNotSame(mutation, firstBuilder); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java index 42eb9fc182f4..150db34ff789 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java @@ -54,16 +54,6 @@ public class CustomSuggester extends Suggester { - Map options = parser.map(); - CustomSuggestionsContext suggestionContext = new CustomSuggestionsContext(shardContext, options); - suggestionContext.setField((String) options.get("field")); - return suggestionContext; - }; - } - public static class CustomSuggestionsContext extends SuggestionSearchContext.SuggestionContext { public Map options; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java index 9f854150ed42..6551f2370a71 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -112,34 +112,6 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe return builderAndInfo; } - @Override - protected void assertSuggestionContext(SuggestionContext oldSuggestion, SuggestionContext newSuggestion) { - assertThat(oldSuggestion, instanceOf(CompletionSuggestionContext.class)); - assertThat(newSuggestion, instanceOf(CompletionSuggestionContext.class)); - CompletionSuggestionContext oldCompletionSuggestion = (CompletionSuggestionContext) oldSuggestion; - CompletionSuggestionContext newCompletionSuggestion = (CompletionSuggestionContext) newSuggestion; - assertEquals(oldCompletionSuggestion.getFieldType(), newCompletionSuggestion.getFieldType()); - assertEquals(oldCompletionSuggestion.getPayloadFields(), newCompletionSuggestion.getPayloadFields()); - assertEquals(oldCompletionSuggestion.getFuzzyOptions(), newCompletionSuggestion.getFuzzyOptions()); - assertEquals(oldCompletionSuggestion.getRegexOptions(), newCompletionSuggestion.getRegexOptions()); - assertEquals(oldCompletionSuggestion.getQueryContexts(), newCompletionSuggestion.getQueryContexts()); - - } - - @Override - protected Tuple randomFieldTypeAndSuggestionBuilder() { - final BuilderAndInfo builderAndInfo = randomSuggestionBuilderWithContextInfo(); - CompletionFieldMapper.CompletionFieldType type = new CompletionFieldMapper.CompletionFieldType(); - List contextMappings = builderAndInfo.catContexts.stream() - .map(catContext -> new CategoryContextMapping.Builder(catContext).build()) - .collect(Collectors.toList()); - contextMappings.addAll(builderAndInfo.geoContexts.stream() - .map(geoContext -> new GeoContextMapping.Builder(geoContext).build()) - .collect(Collectors.toList())); - type.setContextMappings(new ContextMappings(contextMappings)); - return new Tuple<>(type, builderAndInfo.builder); - } - @Override protected void mutateSpecificParameters(CompletionSuggestionBuilder builder) throws IOException { switch (randomIntBetween(0, 5)) { diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index 413ee14224d8..4a47be481e0a 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.suggest.phrase; -import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -31,23 +30,10 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AnalysisService; -import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.mapper.ContentPath; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.core.StringFieldMapper; -import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType; -import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.IndexSettingsModule; import java.io.IOException; import java.util.Arrays; @@ -147,61 +133,6 @@ public class DirectCandidateGeneratorTests extends ESTestCase{ } } - /** - * test that build() outputs a {@link DirectCandidateGenerator} that is similar to the one - * we would get when parsing the xContent the test generator is rendering out - */ - public void testBuild() throws IOException { - - long start = System.currentTimeMillis(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), Settings.EMPTY); - - AnalysisService mockAnalysisService = new AnalysisService(idxSettings, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()) { - @Override - public NamedAnalyzer analyzer(String name) { - return new NamedAnalyzer(name, new WhitespaceAnalyzer()); - } - }; - - MapperService mockMapperService = new MapperService(idxSettings, mockAnalysisService , null, new IndicesModule().getMapperRegistry(), null) { - @Override - public MappedFieldType fullName(String fullName) { - return new StringFieldType(); - } - }; - - QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, mockMapperService, null, null, null) { - @Override - public MappedFieldType fieldMapper(String name) { - TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name); - return builder.build(new Mapper.BuilderContext(idxSettings.getSettings(), new ContentPath(1))).fieldType(); - } - }; - mockShardContext.setMapUnmappedFieldAsString(true); - - for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) { - DirectCandidateGeneratorBuilder generator = randomCandidateGenerator(); - // first, build via DirectCandidateGenerator#build() - DirectCandidateGenerator contextGenerator = generator.build(mockMapperService); - - // second, render random test generator to xContent and parse using - // PhraseSuggestParser - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); - if (randomBoolean()) { - builder.prettyPrint(); - } - generator.toXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = XContentHelper.createParser(builder.bytes()); - - DirectCandidateGenerator secondGenerator = PhraseSuggestParser.parseCandidateGenerator(parser, - mockMapperService, ParseFieldMatcher.EMPTY); - - // compare their properties - assertNotSame(contextGenerator, secondGenerator); - assertEqualGenerators(contextGenerator, secondGenerator); - } - } - public static void assertEqualGenerators(DirectCandidateGenerator first, DirectCandidateGenerator second) { assertEquals(first.field(), second.field()); assertEquals(first.accuracy(), second.accuracy(), Float.MIN_VALUE); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java index a34eeb298939..36131c80483a 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java @@ -160,40 +160,6 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC } } - @Override - protected void assertSuggestionContext(SuggestionContext oldSuggestion, SuggestionContext newSuggestion) { - assertThat(oldSuggestion, instanceOf(PhraseSuggestionContext.class)); - assertThat(newSuggestion, instanceOf(PhraseSuggestionContext.class)); - PhraseSuggestionContext oldPhraseSuggestion = (PhraseSuggestionContext) oldSuggestion; - PhraseSuggestionContext newPhraseSuggestion = (PhraseSuggestionContext) newSuggestion; - assertEquals(oldPhraseSuggestion.confidence(), newPhraseSuggestion.confidence(), Float.MIN_VALUE); - assertEquals(oldPhraseSuggestion.collatePrune(), newPhraseSuggestion.collatePrune()); - assertEquals(oldPhraseSuggestion.gramSize(), newPhraseSuggestion.gramSize()); - assertEquals(oldPhraseSuggestion.realworldErrorLikelyhood(), newPhraseSuggestion.realworldErrorLikelyhood(), Float.MIN_VALUE); - assertEquals(oldPhraseSuggestion.maxErrors(), newPhraseSuggestion.maxErrors(), Float.MIN_VALUE); - assertEquals(oldPhraseSuggestion.separator(), newPhraseSuggestion.separator()); - assertEquals(oldPhraseSuggestion.getTokenLimit(), newPhraseSuggestion.getTokenLimit()); - assertEquals(oldPhraseSuggestion.getRequireUnigram(), newPhraseSuggestion.getRequireUnigram()); - assertEquals(oldPhraseSuggestion.getPreTag(), newPhraseSuggestion.getPreTag()); - assertEquals(oldPhraseSuggestion.getPostTag(), newPhraseSuggestion.getPostTag()); - if (oldPhraseSuggestion.getCollateQueryScript() != null) { - // only assert that we have a compiled script on the other side - assertNotNull(newPhraseSuggestion.getCollateQueryScript()); - } - if (oldPhraseSuggestion.generators() != null) { - assertNotNull(newPhraseSuggestion.generators()); - assertEquals(oldPhraseSuggestion.generators().size(), newPhraseSuggestion.generators().size()); - Iterator secondList = newPhraseSuggestion.generators().iterator(); - for (DirectCandidateGenerator candidateGenerator : newPhraseSuggestion.generators()) { - DirectCandidateGeneratorTests.assertEqualGenerators(candidateGenerator, secondList.next()); - } - } - assertEquals(oldPhraseSuggestion.getCollateScriptParams(), newPhraseSuggestion.getCollateScriptParams()); - if (oldPhraseSuggestion.model() != null) { - assertNotNull(newPhraseSuggestion.model()); - } - } - public void testInvalidParameters() throws IOException { // test missing field name try { diff --git a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java index 419253e79e84..5e910905d40b 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java @@ -302,16 +302,6 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas } } - @Override - protected void assertSuggestionContext(SuggestionContext oldSuggestion, SuggestionContext newSuggestion) { - @SuppressWarnings("unchecked") - TermSuggestionContext oldContext = (TermSuggestionContext) oldSuggestion; - @SuppressWarnings("unchecked") - TermSuggestionContext newContext = (TermSuggestionContext) newSuggestion; - assertSpellcheckerSettings(oldContext.getDirectSpellCheckerSettings(), newContext.getDirectSpellCheckerSettings()); - - } - private void assertSpellcheckerSettings(DirectSpellcheckerSettings oldSettings, DirectSpellcheckerSettings newSettings) { final double delta = 0.0d; // make sure the objects aren't the same From 8b26c260d15107a16fc090e18da5190884b57bd6 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 11 Mar 2016 14:53:14 -0800 Subject: [PATCH 196/320] Plugins: Enforce plugin zip does not contain zip entries outside of the unzip dir When unzipping a plugin zip, the zip entries are resolved relative to the directory being unzipped into. However, there are currently no checks that the entry name was not absolute, or relatively points outside of the plugin dir. This change adds a check for those two cases. --- .../elasticsearch/plugins/InstallPluginCommand.java | 9 ++++++++- .../plugins/InstallPluginCommandTests.java | 12 ++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index 32c4bf185073..e72eb2100f61 100644 --- a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -250,7 +250,14 @@ class InstallPluginCommand extends Command { } hasEsDir = true; Path targetFile = target.resolve(entry.getName().substring("elasticsearch/".length())); - // TODO: handle name being an absolute path + + // Using the entry name as a path can result in an entry outside of the plugin dir, either if the + // name starts with the root of the filesystem, or it is a relative entry like ../whatever. + // This check attempts to identify both cases by first normalizing the path (which removes foo/..) + // and ensuring the normalized entry is still rooted with the target plugin directory. + if (targetFile.normalize().startsWith(target) == false) { + throw new IOException("Zip contains entry name '" + entry.getName() + "' resolving outside of plugin directory"); + } // be on the safe side: do not rely on that directories are always extracted // before their children (although this makes sense, but is it guaranteed?) diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index 514090d9869d..fb69c817f3a8 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -466,6 +466,18 @@ public class InstallPluginCommandTests extends ESTestCase { assertInstallCleaned(env); } + public void testZipRelativeOutsideEntryName() throws Exception { + Path zip = createTempDir().resolve("broken.zip"); + try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(zip))) { + stream.putNextEntry(new ZipEntry("elasticsearch/../blah")); + } + String pluginZip = zip.toUri().toURL().toString(); + IOException e = expectThrows(IOException.class, () -> { + installPlugin(pluginZip, createEnv()); + }); + assertTrue(e.getMessage(), e.getMessage().contains("resolving outside of plugin directory")); + } + // TODO: test batch flag? // TODO: test checksum (need maven/official below) // TODO: test maven, official, and staging install...need tests with fixtures... From b1cf2b2cb3a4df4fb81683f3c3e9c6c8ec38b8da Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Sat, 12 Mar 2016 14:58:30 +0100 Subject: [PATCH 197/320] Moved CONTRIBUTING.md back to the root directory The CONTRIBUTING.md file can be in the root directory or in the .github directory and will still be used for the contributing guidelines on Github. Moved back to the root directory so that it is more visible outside Github --- .github/CONTRIBUTING.md => CONTRIBUTING.md | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .github/CONTRIBUTING.md => CONTRIBUTING.md (100%) diff --git a/.github/CONTRIBUTING.md b/CONTRIBUTING.md similarity index 100% rename from .github/CONTRIBUTING.md rename to CONTRIBUTING.md From 121e7c8ca4f7bb1cd3fd887d63d900503307647a Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 10 Mar 2016 16:19:12 +0100 Subject: [PATCH 198/320] Add infrastructure to run REST tests on a multi-version cluster This change adds the infrastructure to run the rest tests on a multi-node cluster that users 2 different minor versions of elasticsearch. It doesn't implement any dedicated BWC tests but rather leverages the existing REST tests. Since we don't have a real version to test against, the tests uses the current version until the first minor / RC is released to ensure the infrastructure works. Relates to #14406 Closes #17072 --- .../elasticsearch/gradle/BuildPlugin.groovy | 6 ++ .../gradle/test/ClusterConfiguration.groovy | 8 +- .../gradle/test/ClusterFormationTasks.groovy | 89 +++++++++++++------ .../elasticsearch/gradle/test/NodeInfo.groovy | 14 +-- .../admin/cluster/node/info/NodeInfo.java | 3 +- .../admin/cluster/node/stats/NodeStats.java | 2 +- .../common/io/stream/StreamInput.java | 6 +- .../common/io/stream/Writeable.java | 13 +++ .../org/elasticsearch/ingest/IngestStats.java | 38 ++++---- .../elasticsearch/ingest/core/IngestInfo.java | 27 +++--- .../ingest/core/ProcessorInfo.java | 17 ++-- .../ingest/IngestStatsTests.java | 69 ++++++++++++++ .../nodesinfo/NodeInfoStreamingTests.java | 3 +- .../test/ingest_grok/10_basic.yaml | 15 ++++ qa/backwards-5.0/build.gradle | 23 +++++ .../backwards/MultiNodeBackwardsIT.java | 41 +++++++++ .../rest-api-spec/test/ingest/10_crud.yaml | 25 ------ .../rest-api-spec/test/ingest/70_bulk.yaml | 10 ++- settings.gradle | 1 + 19 files changed, 298 insertions(+), 112 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java create mode 100644 qa/backwards-5.0/build.gradle create mode 100644 qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/MultiNodeBackwardsIT.java diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index ca78157bcf2e..f80a98174d0d 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -307,6 +307,12 @@ class BuildPlugin implements Plugin { /** Adds repositores used by ES dependencies */ static void configureRepositories(Project project) { RepositoryHandler repos = project.repositories + if (System.getProperty("repos.mavenlocal") != null) { + // with -Drepos.mavenlocal=true we can force checking the local .m2 repo which is + // useful for development ie. bwc tests where we install stuff in the local repository + // such that we don't have to pass hardcoded files to gradle + repos.mavenLocal() + } repos.mavenCentral() repos.maven { name 'sonatype-snapshots' diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index 3e8b62253294..19b41cc8cdeb 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -23,8 +23,6 @@ import org.gradle.api.Project import org.gradle.api.file.FileCollection import org.gradle.api.tasks.Input -import java.time.LocalDateTime - /** Configuration for an elasticsearch cluster, used for integration tests. */ class ClusterConfiguration { @@ -34,6 +32,12 @@ class ClusterConfiguration { @Input int numNodes = 1 + @Input + int numBwcNodes = 0 + + @Input + String bwcVersion = null + @Input int httpPort = 0 diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 59a27ea36bd2..97073c67cfe6 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -53,11 +53,47 @@ class ClusterFormationTasks { // no need to add cluster formation tasks if the task won't run! return } - configureDistributionDependency(project, config.distribution) - List startTasks = [] + // first we remove everything in the cluster directory to ensure there are no leftovers in repos or anything + // this also forces unpacking of nodes and wipes logfiles etc. to prevent leftovers along those lines + // in theory this should not be necessary but repositories are only deleted in the cluster-state and not on-disk + // such that snapshots survive failures / test runs and there is no simple way today to fix that. + Task cleanup = project.tasks.create(name: "${task.name}#prepareCluster.clean", type: Delete, dependsOn: task.dependsOn.collect()) { + delete new File(project.buildDir, "cluster"); + } + List startTasks = [cleanup] List nodes = [] + if (config.numNodes < config.numBwcNodes) { + throw new GradleException("numNodes must be >= numBwcNodes [${config.numNodes} < ${config.numBwcNodes}]") + } + if (config.numBwcNodes > 0 && config.bwcVersion == null) { + throw new GradleException("bwcVersion must not be null if numBwcNodes is > 0") + } + // this is our current version distribution configuration we use for all kinds of REST tests etc. + project.configurations { + elasticsearchDistro + } + configureDistributionDependency(project, config.distribution, project.configurations.elasticsearchDistro, VersionProperties.elasticsearch) + if (config.bwcVersion != null && config.numBwcNodes > 0) { + // if we have a cluster that has a BWC cluster we also need to configure a dependency on the BWC version + // this version uses the same distribution etc. and only differs in the version we depend on. + // from here on everything else works the same as if it's the current version, we fetch the BWC version + // from mirrors using gradles built-in mechanism etc. + project.configurations { + elasticsearchBwcDistro + } + configureDistributionDependency(project, config.distribution, project.configurations.elasticsearchBwcDistro, config.bwcVersion) + } + for (int i = 0; i < config.numNodes; ++i) { - NodeInfo node = new NodeInfo(config, i, project, task) + // we start N nodes and out of these N nodes there might be M bwc nodes. + // for each of those nodes we might have a different configuratioon + String elasticsearchVersion = VersionProperties.elasticsearch + Configuration configuration = project.configurations.elasticsearchDistro + if (i < config.numBwcNodes) { + elasticsearchVersion = config.bwcVersion + configuration = project.configurations.elasticsearchBwcDistro + } + NodeInfo node = new NodeInfo(config, i, project, task, elasticsearchVersion) if (i == 0) { if (config.seedNodePortsFile != null) { // we might allow this in the future to be set but for now we are the only authority to set this! @@ -66,7 +102,7 @@ class ClusterFormationTasks { config.seedNodePortsFile = node.transportPortsFile; } nodes.add(node) - startTasks.add(configureNode(project, task, node)) + startTasks.add(configureNode(project, task, cleanup, node, configuration)) } Task wait = configureWaitTask("${task.name}#wait", project, nodes, startTasks) @@ -77,20 +113,14 @@ class ClusterFormationTasks { } /** Adds a dependency on the given distribution */ - static void configureDistributionDependency(Project project, String distro) { - String elasticsearchVersion = VersionProperties.elasticsearch + static void configureDistributionDependency(Project project, String distro, Configuration configuration, String elasticsearchVersion) { String packaging = distro if (distro == 'tar') { packaging = 'tar.gz' } else if (distro == 'integ-test-zip') { packaging = 'zip' } - project.configurations { - elasticsearchDistro - } - project.dependencies { - elasticsearchDistro "org.elasticsearch.distribution.${distro}:elasticsearch:${elasticsearchVersion}@${packaging}" - } + project.dependencies.add(configuration.name, "org.elasticsearch.distribution.${distro}:elasticsearch:${elasticsearchVersion}@${packaging}") } /** @@ -110,10 +140,10 @@ class ClusterFormationTasks { * * @return a task which starts the node. */ - static Task configureNode(Project project, Task task, NodeInfo node) { + static Task configureNode(Project project, Task task, Object dependsOn, NodeInfo node, Configuration configuration) { // tasks are chained so their execution order is maintained - Task setup = project.tasks.create(name: taskName(task, node, 'clean'), type: Delete, dependsOn: task.dependsOn.collect()) { + Task setup = project.tasks.create(name: taskName(task, node, 'clean'), type: Delete, dependsOn: dependsOn) { delete node.homeDir delete node.cwd doLast { @@ -122,7 +152,7 @@ class ClusterFormationTasks { } setup = configureCheckPreviousTask(taskName(task, node, 'checkPrevious'), project, setup, node) setup = configureStopTask(taskName(task, node, 'stopPrevious'), project, setup, node) - setup = configureExtractTask(taskName(task, node, 'extract'), project, setup, node) + setup = configureExtractTask(taskName(task, node, 'extract'), project, setup, node, configuration) setup = configureWriteConfigTask(taskName(task, node, 'configure'), project, setup, node) setup = configureExtraConfigFilesTask(taskName(task, node, 'extraConfig'), project, setup, node) setup = configureCopyPluginsTask(taskName(task, node, 'copyPlugins'), project, setup, node) @@ -158,27 +188,28 @@ class ClusterFormationTasks { } /** Adds a task to extract the elasticsearch distribution */ - static Task configureExtractTask(String name, Project project, Task setup, NodeInfo node) { - List extractDependsOn = [project.configurations.elasticsearchDistro, setup] - /* project.configurations.elasticsearchDistro.singleFile will be an - external artifact if this is being run by a plugin not living in the - elasticsearch source tree. If this is a plugin built in the - elasticsearch source tree or this is a distro in the elasticsearch - source tree then this should be the version of elasticsearch built - by the source tree. If it isn't then Bad Things(TM) will happen. */ + static Task configureExtractTask(String name, Project project, Task setup, NodeInfo node, Configuration configuration) { + List extractDependsOn = [configuration, setup] + /* configuration.singleFile will be an external artifact if this is being run by a plugin not living in the + elasticsearch source tree. If this is a plugin built in the elasticsearch source tree or this is a distro in + the elasticsearch source tree then this should be the version of elasticsearch built by the source tree. + If it isn't then Bad Things(TM) will happen. */ Task extract + switch (node.config.distribution) { case 'integ-test-zip': case 'zip': extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) { - from { project.zipTree(project.configurations.elasticsearchDistro.singleFile) } + from { + project.zipTree(configuration.singleFile) + } into node.baseDir } break; case 'tar': extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) { from { - project.tarTree(project.resources.gzip(project.configurations.elasticsearchDistro.singleFile)) + project.tarTree(project.resources.gzip(configuration.singleFile)) } into node.baseDir } @@ -187,7 +218,7 @@ class ClusterFormationTasks { File rpmDatabase = new File(node.baseDir, 'rpm-database') File rpmExtracted = new File(node.baseDir, 'rpm-extracted') /* Delay reading the location of the rpm file until task execution */ - Object rpm = "${ -> project.configurations.elasticsearchDistro.singleFile}" + Object rpm = "${ -> configuration.singleFile}" extract = project.tasks.create(name: name, type: LoggedExec, dependsOn: extractDependsOn) { commandLine 'rpm', '--badreloc', '--nodeps', '--noscripts', '--notriggers', '--dbpath', rpmDatabase, @@ -202,7 +233,7 @@ class ClusterFormationTasks { case 'deb': /* Delay reading the location of the deb file until task execution */ File debExtracted = new File(node.baseDir, 'deb-extracted') - Object deb = "${ -> project.configurations.elasticsearchDistro.singleFile}" + Object deb = "${ -> configuration.singleFile}" extract = project.tasks.create(name: name, type: LoggedExec, dependsOn: extractDependsOn) { commandLine 'dpkg-deb', '-x', deb, debExtracted doFirst { @@ -221,8 +252,8 @@ class ClusterFormationTasks { Map esConfig = [ 'cluster.name' : node.clusterName, 'pidfile' : node.pidFile, - 'path.repo' : "${node.homeDir}/repo", - 'path.shared_data' : "${node.homeDir}/../", + 'path.repo' : "${node.baseDir}/../repo", + 'path.shared_data' : "${node.baseDir}/../", // Define a node attribute so we can test that it exists 'node.testattr' : 'test', 'repositories.url.allowed_urls': 'http://snapshot.test*' diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index b41b18220000..5dcdcbed5f81 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -89,14 +89,14 @@ class NodeInfo { ByteArrayOutputStream buffer = new ByteArrayOutputStream() /** Creates a node to run as part of a cluster for the given task */ - NodeInfo(ClusterConfiguration config, int nodeNum, Project project, Task task) { + NodeInfo(ClusterConfiguration config, int nodeNum, Project project, Task task, String nodeVersion) { this.config = config this.nodeNum = nodeNum clusterName = "${task.path.replace(':', '_').substring(1)}" baseDir = new File(project.buildDir, "cluster/${task.name} node${nodeNum}") pidFile = new File(baseDir, 'es.pid') - homeDir = homeDir(baseDir, config.distribution) - confDir = confDir(baseDir, config.distribution) + homeDir = homeDir(baseDir, config.distribution, nodeVersion) + confDir = confDir(baseDir, config.distribution, nodeVersion) configFile = new File(confDir, 'elasticsearch.yml') // even for rpm/deb, the logs are under home because we dont start with real services File logsDir = new File(homeDir, 'logs') @@ -181,13 +181,13 @@ class NodeInfo { } /** Returns the directory elasticsearch home is contained in for the given distribution */ - static File homeDir(File baseDir, String distro) { + static File homeDir(File baseDir, String distro, String nodeVersion) { String path switch (distro) { case 'integ-test-zip': case 'zip': case 'tar': - path = "elasticsearch-${VersionProperties.elasticsearch}" + path = "elasticsearch-${nodeVersion}" break case 'rpm': case 'deb': @@ -199,12 +199,12 @@ class NodeInfo { return new File(baseDir, path) } - static File confDir(File baseDir, String distro) { + static File confDir(File baseDir, String distro, String nodeVersion) { switch (distro) { case 'integ-test-zip': case 'zip': case 'tar': - return new File(homeDir(baseDir, distro), 'config') + return new File(homeDir(baseDir, distro, nodeVersion), 'config') case 'rpm': case 'deb': return new File(baseDir, "${distro}-extracted/etc/elasticsearch") diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index 11c542863b5a..87ec2d052ab8 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -231,8 +231,7 @@ public class NodeInfo extends BaseNodeResponse { plugins.readFrom(in); } if (in.readBoolean()) { - ingest = new IngestInfo(); - ingest.readFrom(in); + ingest = new IngestInfo(in); } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java index c1d4bb78ba3c..1d62fc06f0fa 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java @@ -235,7 +235,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContent { breaker = AllCircuitBreakerStats.readOptionalAllCircuitBreakerStats(in); scriptStats = in.readOptionalStreamable(ScriptStats::new); discoveryStats = in.readOptionalStreamable(() -> new DiscoveryStats(null)); - ingestStats = in.readOptionalWritable(IngestStats.PROTO); + ingestStats = in.readOptionalWritable(IngestStats::new); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index e84766d021bb..3f40a7dbbe7c 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -65,6 +65,7 @@ import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.function.Function; import java.util.function.Supplier; import static org.elasticsearch.ElasticsearchException.readException; @@ -553,9 +554,10 @@ public abstract class StreamInput extends InputStream { } } - public T readOptionalWritable(T prototype) throws IOException { + + public T readOptionalWritable(Writeable.IOFunction supplier) throws IOException { if (readBoolean()) { - return (T) prototype.readFrom(this); + return supplier.apply(this); } else { return null; } diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/Writeable.java b/core/src/main/java/org/elasticsearch/common/io/stream/Writeable.java index 9ff3de736c5c..50cbf301be40 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/Writeable.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/Writeable.java @@ -37,4 +37,17 @@ public interface Writeable extends StreamableReader { * Write this into the {@linkplain StreamOutput}. */ void writeTo(StreamOutput out) throws IOException; + + @FunctionalInterface + interface IOFunction { + + /** + * Applies this function to the given argument. + * + * @param t the function argument + * @return the function result + */ + R apply(T t) throws IOException; + } + } diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestStats.java b/core/src/main/java/org/elasticsearch/ingest/IngestStats.java index a59ddce4fec7..9ccc4126763e 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestStats.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestStats.java @@ -31,12 +31,18 @@ import java.util.Map; import java.util.concurrent.TimeUnit; public class IngestStats implements Writeable, ToXContent { - - public final static IngestStats PROTO = new IngestStats(null, null); - private final Stats totalStats; private final Map statsPerPipeline; + public IngestStats(StreamInput in) throws IOException { + this.totalStats = new Stats(in); + int size = in.readVInt(); + this.statsPerPipeline = new HashMap<>(size); + for (int i = 0; i < size; i++) { + statsPerPipeline.put(in.readString(), new Stats(in)); + } + } + public IngestStats(Stats totalStats, Map statsPerPipeline) { this.totalStats = totalStats; this.statsPerPipeline = statsPerPipeline; @@ -58,16 +64,7 @@ public class IngestStats implements Writeable, ToXContent { @Override public IngestStats readFrom(StreamInput in) throws IOException { - Stats totalStats = Stats.PROTO.readFrom(in); - totalStats.readFrom(in); - int size = in.readVInt(); - Map statsPerPipeline = new HashMap<>(size); - for (int i = 0; i < size; i++) { - Stats stats = Stats.PROTO.readFrom(in); - statsPerPipeline.put(in.readString(), stats); - stats.readFrom(in); - } - return new IngestStats(totalStats, statsPerPipeline); + return new IngestStats(in); } @Override @@ -99,13 +96,18 @@ public class IngestStats implements Writeable, ToXContent { public static class Stats implements Writeable, ToXContent { - private final static Stats PROTO = new Stats(0, 0, 0, 0); - private final long ingestCount; private final long ingestTimeInMillis; private final long ingestCurrent; private final long ingestFailedCount; + public Stats(StreamInput in) throws IOException { + ingestCount = in.readVLong(); + ingestTimeInMillis = in.readVLong(); + ingestCurrent = in.readVLong(); + ingestFailedCount = in.readVLong(); + } + public Stats(long ingestCount, long ingestTimeInMillis, long ingestCurrent, long ingestFailedCount) { this.ingestCount = ingestCount; this.ingestTimeInMillis = ingestTimeInMillis; @@ -144,11 +146,7 @@ public class IngestStats implements Writeable, ToXContent { @Override public Stats readFrom(StreamInput in) throws IOException { - long ingestCount = in.readVLong(); - long ingestTimeInMillis = in.readVLong(); - long ingestCurrent = in.readVLong(); - long ingestFailedCount = in.readVLong(); - return new Stats(ingestCount, ingestTimeInMillis, ingestCurrent, ingestFailedCount); + return new Stats(in); } @Override diff --git a/core/src/main/java/org/elasticsearch/ingest/core/IngestInfo.java b/core/src/main/java/org/elasticsearch/ingest/core/IngestInfo.java index 8625e1d8884d..d128732203fe 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/IngestInfo.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/IngestInfo.java @@ -22,6 +22,7 @@ package org.elasticsearch.ingest.core; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -32,17 +33,22 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Objects; import java.util.Set; +import java.util.TreeSet; -public class IngestInfo implements Streamable, ToXContent { +public class IngestInfo implements Writeable, ToXContent { - private Set processors; + private final Set processors; - public IngestInfo() { - processors = Collections.emptySet(); + public IngestInfo(StreamInput in) throws IOException { + this(Collections.emptyList()); + final int size = in.readVInt(); + for (int i = 0; i < size; i++) { + processors.add(new ProcessorInfo(in)); + } } public IngestInfo(List processors) { - this.processors = new LinkedHashSet<>(processors); + this.processors = new TreeSet<>(processors); // we use a treeset here to have a test-able / predictable order } public Iterable getProcessors() { @@ -54,15 +60,8 @@ public class IngestInfo implements Streamable, ToXContent { } @Override - public void readFrom(StreamInput in) throws IOException { - int size = in.readVInt(); - Set processors = new LinkedHashSet<>(size); - for (int i = 0; i < size; i++) { - ProcessorInfo info = new ProcessorInfo(); - info.readFrom(in); - processors.add(info); - } - this.processors = processors; + public IngestInfo readFrom(StreamInput in) throws IOException { + return new IngestInfo(in); } @Override diff --git a/core/src/main/java/org/elasticsearch/ingest/core/ProcessorInfo.java b/core/src/main/java/org/elasticsearch/ingest/core/ProcessorInfo.java index 64c3d19719b6..f652b1829192 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/ProcessorInfo.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/ProcessorInfo.java @@ -22,16 +22,18 @@ package org.elasticsearch.ingest.core; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; -public class ProcessorInfo implements Streamable, ToXContent { +public class ProcessorInfo implements Writeable, ToXContent, Comparable { - private String type; + private final String type; - ProcessorInfo() { + public ProcessorInfo(StreamInput input) throws IOException { + type = input.readString(); } public ProcessorInfo(String type) { @@ -46,8 +48,8 @@ public class ProcessorInfo implements Streamable, ToXContent { } @Override - public void readFrom(StreamInput in) throws IOException { - this.type = in.readString(); + public ProcessorInfo readFrom(StreamInput in) throws IOException { + return new ProcessorInfo(in); } @Override @@ -78,4 +80,9 @@ public class ProcessorInfo implements Streamable, ToXContent { public int hashCode() { return type.hashCode(); } + + @Override + public int compareTo(ProcessorInfo o) { + return type.compareTo(o.type); + } } diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java new file mode 100644 index 000000000000..e7064b7e4492 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; + +public class IngestStatsTests extends ESTestCase { + + public void testSerialization() throws IOException { + IngestStats.Stats total = new IngestStats.Stats(5, 10, 20, 30); + IngestStats.Stats foo = new IngestStats.Stats(50, 100, 200, 300); + IngestStats ingestStats = new IngestStats(total, Collections.singletonMap("foo", foo)); + IngestStats serialize = serialize(ingestStats); + assertNotSame(serialize, ingestStats); + assertNotSame(serialize.getTotalStats(), total); + assertEquals(total.getIngestCount(), serialize.getTotalStats().getIngestCount()); + assertEquals(total.getIngestFailedCount(), serialize.getTotalStats().getIngestFailedCount()); + assertEquals(total.getIngestTimeInMillis(), serialize.getTotalStats().getIngestTimeInMillis()); + assertEquals(total.getIngestCurrent(), serialize.getTotalStats().getIngestCurrent()); + + assertEquals(ingestStats.getStatsPerPipeline().size(), 1); + assertTrue(ingestStats.getStatsPerPipeline().containsKey("foo")); + + Map left = ingestStats.getStatsPerPipeline(); + Map right = serialize.getStatsPerPipeline(); + + assertEquals(right.size(), 1); + assertTrue(right.containsKey("foo")); + assertEquals(left.size(), 1); + assertTrue(left.containsKey("foo")); + IngestStats.Stats leftStats = left.get("foo"); + IngestStats.Stats rightStats = right.get("foo"); + assertEquals(leftStats.getIngestCount(), rightStats.getIngestCount()); + assertEquals(leftStats.getIngestFailedCount(), rightStats.getIngestFailedCount()); + assertEquals(leftStats.getIngestTimeInMillis(), rightStats.getIngestTimeInMillis()); + assertEquals(leftStats.getIngestCurrent(), rightStats.getIngestCurrent()); + } + + private T serialize(Writeable writeable) throws IOException { + BytesStreamOutput out = new BytesStreamOutput(); + writeable.writeTo(out); + StreamInput in = StreamInput.wrap(out.bytes()); + return writeable.readFrom(in); + } +} diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java index 2a845303675d..bb56d1391118 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -47,6 +47,7 @@ import org.elasticsearch.transport.TransportInfo; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -137,7 +138,7 @@ public class NodeInfoStreamingTests extends ESTestCase { PluginsAndModules plugins = new PluginsAndModules(); plugins.addModule(DummyPluginInfo.INSTANCE); plugins.addPlugin(DummyPluginInfo.INSTANCE); - IngestInfo ingestInfo = new IngestInfo(); + IngestInfo ingestInfo = new IngestInfo(Collections.emptyList()); return new NodeInfo(VersionUtils.randomVersion(random()), build, node, serviceAttributes, settings, osInfo, process, jvm, threadPoolInfo, transport, htttpInfo, plugins, ingestInfo); } } diff --git a/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/10_basic.yaml b/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/10_basic.yaml index 5c0cca3772e3..ebb310ecf7a2 100644 --- a/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/10_basic.yaml +++ b/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/10_basic.yaml @@ -9,3 +9,18 @@ nodes.info: {} - match: { nodes.$master.modules.0.name: ingest-grok } + - match: { nodes.$master.ingest.processors.0.type: append } + - match: { nodes.$master.ingest.processors.1.type: convert } + - match: { nodes.$master.ingest.processors.2.type: date } + - match: { nodes.$master.ingest.processors.3.type: fail } + - match: { nodes.$master.ingest.processors.4.type: foreach } + - match: { nodes.$master.ingest.processors.5.type: grok } + - match: { nodes.$master.ingest.processors.6.type: gsub } + - match: { nodes.$master.ingest.processors.7.type: join } + - match: { nodes.$master.ingest.processors.8.type: lowercase } + - match: { nodes.$master.ingest.processors.9.type: remove } + - match: { nodes.$master.ingest.processors.10.type: rename } + - match: { nodes.$master.ingest.processors.11.type: set } + - match: { nodes.$master.ingest.processors.12.type: split } + - match: { nodes.$master.ingest.processors.13.type: trim } + - match: { nodes.$master.ingest.processors.14.type: uppercase } diff --git a/qa/backwards-5.0/build.gradle b/qa/backwards-5.0/build.gradle new file mode 100644 index 000000000000..93d361c989cb --- /dev/null +++ b/qa/backwards-5.0/build.gradle @@ -0,0 +1,23 @@ +apply plugin: 'elasticsearch.rest-test' + +/* This project runs the core REST tests against a 2 node cluster where one of the nodes has a different minor. + * Since we don't have a version to test against we currently use the hardcoded snapshot for to bascially run + * against ourself. To test that useing a different version works got into distribution/zip and execute: + * gradle clean install -Dbuild.snapshot=false + * + * This installs the release-build into a local .m2 repository, then change this version here to: + * bwcVersion = "5.0.0" + * + * now you can run the bwc tests with: + * gradle check -Drepos.mavenlocal=true + * + * (-Drepos.mavenlocal=true will force gradle to look for the zip distribuiton in the local .m2 repository) + */ +integTest { + includePackaged = true + cluster { + numNodes = 2 + numBwcNodes = 1 + bwcVersion = "5.0.0-SNAPSHOT" // this is the same as the current version until we released the first RC + } +} diff --git a/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/MultiNodeBackwardsIT.java b/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/MultiNodeBackwardsIT.java new file mode 100644 index 000000000000..d5094cf09f00 --- /dev/null +++ b/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/MultiNodeBackwardsIT.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.smoketest; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.RestTestCandidate; +import org.elasticsearch.test.rest.parser.RestTestParseException; + +import java.io.IOException; + +public class MultiNodeBackwardsIT extends ESRestTestCase { + + public MultiNodeBackwardsIT(RestTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws IOException, RestTestParseException { + return createParameters(0, 1); + } +} + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml index ced2e9e4850a..b494161aff11 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml @@ -1,28 +1,3 @@ ---- -"Check availability of default processors": - - do: - cluster.state: {} - - - set: {master_node: master} - - - do: - nodes.info: {} - - - match: { nodes.$master.ingest.processors.0.type: date } - - match: { nodes.$master.ingest.processors.1.type: uppercase } - - match: { nodes.$master.ingest.processors.2.type: set } - - match: { nodes.$master.ingest.processors.3.type: lowercase } - - match: { nodes.$master.ingest.processors.4.type: gsub } - - match: { nodes.$master.ingest.processors.5.type: convert } - - match: { nodes.$master.ingest.processors.6.type: remove } - - match: { nodes.$master.ingest.processors.7.type: fail } - - match: { nodes.$master.ingest.processors.8.type: foreach } - - match: { nodes.$master.ingest.processors.9.type: split } - - match: { nodes.$master.ingest.processors.10.type: trim } - - match: { nodes.$master.ingest.processors.11.type: rename } - - match: { nodes.$master.ingest.processors.12.type: join } - - match: { nodes.$master.ingest.processors.13.type: append } - --- "Test basic pipeline crud": - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/70_bulk.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/70_bulk.yaml index 63cb42f10352..7a3515b2ed2d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/70_bulk.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/70_bulk.yaml @@ -76,11 +76,12 @@ setup: - do: nodes.stats: metric: [ ingest ] - - gte: {nodes.$master.ingest.total.count: 1} + #we can't assert anything here since we might have more than one node in the cluster + - gte: {nodes.$master.ingest.total.count: 0} - gte: {nodes.$master.ingest.total.failed: 0} - gte: {nodes.$master.ingest.total.time_in_millis: 0} - match: {nodes.$master.ingest.total.current: 0} - - gte: {nodes.$master.ingest.pipelines.pipeline1.count: 1} + - gte: {nodes.$master.ingest.pipelines.pipeline1.count: 0} - match: {nodes.$master.ingest.pipelines.pipeline1.failed: 0} - gte: {nodes.$master.ingest.pipelines.pipeline1.time_in_millis: 0} - match: {nodes.$master.ingest.pipelines.pipeline1.current: 0} @@ -113,11 +114,12 @@ setup: - do: nodes.stats: metric: [ ingest ] - - gte: {nodes.$master.ingest.total.count: 1} + #we can't assert anything here since we might have more than one node in the cluster + - gte: {nodes.$master.ingest.total.count: 0} - gte: {nodes.$master.ingest.total.failed: 0} - gte: {nodes.$master.ingest.total.time_in_millis: 0} - match: {nodes.$master.ingest.total.current: 0} - - match: {nodes.$master.ingest.pipelines.pipeline2.count: 1} + - gte: {nodes.$master.ingest.pipelines.pipeline2.count: 0} - match: {nodes.$master.ingest.pipelines.pipeline2.failed: 0} - gte: {nodes.$master.ingest.pipelines.pipeline2.time_in_millis: 0} - match: {nodes.$master.ingest.pipelines.pipeline2.current: 0} diff --git a/settings.gradle b/settings.gradle index b1bb374fff1d..d03cac653eee 100644 --- a/settings.gradle +++ b/settings.gradle @@ -39,6 +39,7 @@ List projects = [ 'plugins:repository-s3', 'plugins:jvm-example', 'plugins:store-smb', + 'qa:backwards-5.0', 'qa:evil-tests', 'qa:smoke-test-client', 'qa:smoke-test-multinode', From 625695a92a3fc4c2850cd5d11ac5e739591beffc Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Sun, 13 Mar 2016 14:06:46 +0100 Subject: [PATCH 199/320] [TEST] MockRepository should also unblock repositories that are not blocked yet --- .../snapshots/AbstractSnapshotIntegTestCase.java | 8 ++++---- .../snapshots/SharedClusterSnapshotRestoreIT.java | 4 ++-- .../snapshots/mockstore/MockRepository.java | 14 ++++++-------- 3 files changed, 12 insertions(+), 14 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 5ab6b5855c4b..dc803a464124 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -149,15 +149,15 @@ public abstract class AbstractSnapshotIntegTestCase extends ESIntegTestCase { } } - public void waitForBlockOnAllDataNodes(String repository, TimeValue timeout) throws InterruptedException { + public void waitForBlockOnAnyDataNode(String repository, TimeValue timeout) throws InterruptedException { if (false == awaitBusy(() -> { for(RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) { MockRepository mockRepository = (MockRepository) repositoriesService.repository(repository); - if (mockRepository.blocked() == false) { - return false; + if (mockRepository.blocked()) { + return true; } } - return true; + return false; }, timeout.millis(), TimeUnit.MILLISECONDS)) { fail("Timeout waiting for repository block on any data node!!!"); } diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 00e4d5909910..5dc6d59692bc 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -1860,7 +1860,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas if (initBlocking) { waitForBlock(internalCluster().getMasterName(), "test-repo", TimeValue.timeValueMinutes(1)); } else { - waitForBlockOnAllDataNodes("test-repo", TimeValue.timeValueMinutes(1)); + waitForBlockOnAnyDataNode("test-repo", TimeValue.timeValueMinutes(1)); } try { if (allowPartial) { @@ -1957,7 +1957,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .execute(); logger.info("--> waiting for block to kick in"); - waitForBlockOnAllDataNodes("test-repo", TimeValue.timeValueMinutes(1)); + waitForBlockOnAnyDataNode("test-repo", TimeValue.timeValueMinutes(1)); logger.info("--> close index while restore is running"); try { diff --git a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java index 7a6b327ff899..3b5bde99f55f 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java +++ b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java @@ -176,14 +176,12 @@ public class MockRepository extends FsRepository { } public synchronized void unblockExecution() { - if (blocked) { - blocked = false; - // Clean blocking flags, so we wouldn't try to block again - blockOnDataFiles = false; - blockOnControlFiles = false; - blockOnInitialization = false; - this.notifyAll(); - } + blocked = false; + // Clean blocking flags, so we wouldn't try to block again + blockOnDataFiles = false; + blockOnControlFiles = false; + blockOnInitialization = false; + this.notifyAll(); } public boolean blocked() { From e9e1e2599845e7ebfa13d73e510890c97d704858 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Sun, 13 Mar 2016 15:01:09 +0100 Subject: [PATCH 200/320] Fix after merge with master --- .../org/elasticsearch/cluster/NodeConnectionsService.java | 6 +++++- .../elasticsearch/cluster/node/DiscoveryNodeService.java | 3 ++- .../elasticsearch/common/settings/IndexScopedSettings.java | 4 ++-- .../java/org/elasticsearch/common/settings/Setting.java | 6 +++--- 4 files changed, 12 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java b/core/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java index cce25652ed76..698f9d1090cc 100644 --- a/core/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java +++ b/core/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java @@ -35,6 +35,10 @@ import org.elasticsearch.transport.TransportService; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ScheduledFuture; +import static org.elasticsearch.common.settings.Setting.Property; +import static org.elasticsearch.common.settings.Setting.positiveTimeSetting; + + /** * This component is responsible for connecting to nodes once they are added to the cluster state, and disconnect when they are * removed. Also, it periodically checks that all connections are still open and if needed restores them. @@ -45,7 +49,7 @@ import java.util.concurrent.ScheduledFuture; public class NodeConnectionsService extends AbstractLifecycleComponent { public static final Setting CLUSTER_NODE_RECONNECT_INTERVAL_SETTING = - Setting.positiveTimeSetting("cluster.nodes.reconnect_interval", TimeValue.timeValueSeconds(10), false, Setting.Scope.CLUSTER); + positiveTimeSetting("cluster.nodes.reconnect_interval", TimeValue.timeValueSeconds(10), Property.NodeScope); private final ThreadPool threadPool; private final TransportService transportService; diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodeService.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodeService.java index 47c0e0052d3b..ccd30a99e9c6 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodeService.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodeService.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -40,7 +41,7 @@ public class DiscoveryNodeService extends AbstractComponent { public static final Setting NODE_ID_SEED_SETTING = // don't use node.id.seed so it won't be seen as an attribute - Setting.longSetting("node_id.seed", 0L, Long.MIN_VALUE, false, Setting.Scope.CLUSTER); + Setting.longSetting("node_id.seed", 0L, Long.MIN_VALUE, Property.NodeScope); private final List customAttributesProviders = new CopyOnWriteArrayList<>(); private final Version version; diff --git a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index 983bb27f5908..da6c34bdf4ae 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -137,14 +137,14 @@ public final class IndexScopedSettings extends AbstractScopedSettings { EngineConfig.INDEX_CODEC_SETTING, IndexWarmer.INDEX_NORMS_LOADING_SETTING, // validate that built-in similarities don't get redefined - Setting.groupSetting("index.similarity.", Property.IndexScope, (s) -> { + Setting.groupSetting("index.similarity.", (s) -> { Map groups = s.getAsGroups(); for (String key : SimilarityService.BUILT_IN.keySet()) { if (groups.containsKey(key)) { throw new IllegalArgumentException("illegal value for [index.similarity."+ key + "] cannot redefine built-in similarity"); } } - }), // this allows similarity settings to be passed + }, Property.IndexScope), // this allows similarity settings to be passed Setting.groupSetting("index.analysis.", Property.IndexScope) // this allows analysis settings to be passed ))); diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 6c97091fb6f0..f6f2d28b9211 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -586,10 +586,10 @@ public class Setting extends ToXContentToBytes { throw new ElasticsearchException(ex); } } - public static Setting groupSetting(String key, boolean dynamic, Scope scope) { - return groupSetting(key, dynamic, scope, (s) -> {}); - } public static Setting groupSetting(String key, Property... properties) { + return groupSetting(key, (s) -> {}, properties); + } + public static Setting groupSetting(String key, Consumer validator, Property... properties) { return new Setting(new GroupKey(key), (s) -> "", (s) -> null, properties) { @Override public boolean isGroupSetting() { From 25531b7299ff19b56ed939478332b75a4c574dd9 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Sun, 13 Mar 2016 15:27:54 +0100 Subject: [PATCH 201/320] Update after last review We check for null. Test added as well. --- .../org/elasticsearch/common/settings/Setting.java | 7 ++++++- .../elasticsearch/common/settings/SettingTests.java | 12 ++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index f6f2d28b9211..f0e1b2e64ea9 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -113,6 +113,8 @@ public class Setting extends ToXContentToBytes { private final Function parser; private final EnumSet properties; + private static final EnumSet EMPTY_PROPERTIES = EnumSet.noneOf(Property.class); + /** * Creates a new Setting instance. When no scope is provided, we default to {@link Property#NodeScope}. * @param key the settings key for this setting. @@ -125,8 +127,11 @@ public class Setting extends ToXContentToBytes { this.key = key; this.defaultValue = defaultValue; this.parser = parser; + if (properties == null) { + throw new IllegalArgumentException("properties can not be null for setting [" + key + "]"); + } if (properties.length == 0) { - this.properties = EnumSet.noneOf(Property.class); + this.properties = EMPTY_PROPERTIES; } else { this.properties = EnumSet.copyOf(Arrays.asList(properties)); } diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 1c1f06f5914f..14fdcb1e0ac4 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -445,4 +445,16 @@ public class SettingTests extends ESTestCase { assertThat(setting.hasIndexScope(), is(true)); assertThat(setting.hasNodeScope(), is(true)); } + + /** + * We can't have Null properties + */ + public void testRejectNullProperties() { + try { + Setting.simpleString("foo.bar", (Property[]) null); + fail(); + } catch (IllegalArgumentException ex) { + assertThat(ex.getMessage(), containsString("properties can not be null for setting")); + } + } } From 5c845f8bb557db280086fe016b5b141b0ae34f2c Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Sun, 13 Mar 2016 21:17:48 +0100 Subject: [PATCH 202/320] Reworked 5.0 breaking changes docs --- docs/reference/migration/migrate_5_0.asciidoc | 887 +----------------- .../migration/migrate_5_0/allocation.asciidoc | 54 ++ .../migration/migrate_5_0/cat.asciidoc | 33 + .../migration/migrate_5_0/index-apis.asciidoc | 48 + .../migration/migrate_5_0/java.asciidoc | 213 +++++ .../migration/migrate_5_0/mapping.asciidoc | 82 ++ .../migration/migrate_5_0/packaging.asciidoc | 24 + .../migration/migrate_5_0/percolator.asciidoc | 41 + .../migration/migrate_5_0/plugins.asciidoc | 99 ++ .../migration/migrate_5_0/rest.asciidoc | 17 + .../migration/migrate_5_0/search.asciidoc | 141 +++ .../migration/migrate_5_0/settings.asciidoc | 174 ++++ 12 files changed, 957 insertions(+), 856 deletions(-) create mode 100644 docs/reference/migration/migrate_5_0/allocation.asciidoc create mode 100644 docs/reference/migration/migrate_5_0/cat.asciidoc create mode 100644 docs/reference/migration/migrate_5_0/index-apis.asciidoc create mode 100644 docs/reference/migration/migrate_5_0/java.asciidoc create mode 100644 docs/reference/migration/migrate_5_0/mapping.asciidoc create mode 100644 docs/reference/migration/migrate_5_0/packaging.asciidoc create mode 100644 docs/reference/migration/migrate_5_0/percolator.asciidoc create mode 100644 docs/reference/migration/migrate_5_0/plugins.asciidoc create mode 100644 docs/reference/migration/migrate_5_0/rest.asciidoc create mode 100644 docs/reference/migration/migrate_5_0/search.asciidoc create mode 100644 docs/reference/migration/migrate_5_0/settings.asciidoc diff --git a/docs/reference/migration/migrate_5_0.asciidoc b/docs/reference/migration/migrate_5_0.asciidoc index 8e082a1e426d..23cadbbd9edc 100644 --- a/docs/reference/migration/migrate_5_0.asciidoc +++ b/docs/reference/migration/migrate_5_0.asciidoc @@ -4,877 +4,52 @@ This section discusses the changes that you need to be aware of when migrating your application to Elasticsearch 5.0. +[IMPORTANT] +.Reindex indices from Elasticseach 1.x or before +========================================= + +Indices created in Elasticsearch 1.x or before will need to be reindexed with +Elasticsearch 2.x in order to be readable by Elasticsearch 5.x. The easiest +way to do this is to upgrade to Elasticsearch 2.3 or later and to use the +`reindex` API. + +========================================= + +[float] +=== Also see: + * <> +* <> +* <> +* <> +* <> +* <> * <> * <> -* <> -* <> -* <> -* <> * <> -* <> -* <> -* <> -* <> -* <> * <> -* <> -* <> -* <> -* <> +* <> -[[breaking_50_search_changes]] -=== Warmers +include::migrate_5_0/search.asciidoc[] -Thanks to several changes like doc values by default or disk-based norms, -warmers have become quite useless. As a consequence, warmers and the warmer -API have been removed: it is not possible anymore to register queries that -will run before a new IndexSearcher is published. +include::migrate_5_0/mapping.asciidoc[] -Don't worry if you have warmers defined on your indices, they will simply be -ignored when upgrading to 5.0. +include::migrate_5_0/percolator.asciidoc[] -=== Search changes +include::migrate_5_0/index-apis.asciidoc[] -==== `search_type=count` removed +include::migrate_5_0/settings.asciidoc[] -The `count` search type was deprecated since version 2.0.0 and is now removed. -In order to get the same benefits, you just need to set the value of the `size` -parameter to `0`. +include::migrate_5_0/allocation.asciidoc[] -For instance, the following request: +include::migrate_5_0/rest.asciidoc[] -[source,sh] ---------------- -GET /my_index/_search?search_type=count -{ - "aggs": { - "my_terms": { - "terms": { - "field": "foo" - } - } - } -} ---------------- +include::migrate_5_0/cat.asciidoc[] -can be replaced with: +include::migrate_5_0/java.asciidoc[] -[source,sh] ---------------- -GET /my_index/_search -{ - "size": 0, - "aggs": { - "my_terms": { - "terms": { - "field": "foo" - } - } - } -} ---------------- +include::migrate_5_0/packaging.asciidoc[] -==== `search_type=scan` removed +include::migrate_5_0/plugins.asciidoc[] -The `scan` search type was deprecated since version 2.1.0 and is now removed. -All benefits from this search type can now be achieved by doing a scroll -request that sorts documents in `_doc` order, for instance: -[source,sh] ---------------- -GET /my_index/_search?scroll=2m -{ - "sort": [ - "_doc" - ] -} ---------------- - -Scroll requests sorted by `_doc` have been optimized to more efficiently resume -from where the previous request stopped, so this will have the same performance -characteristics as the former `scan` search type. - -==== Boost accuracy for queries on `_all` - -Per-field boosts on the `_all` are now compressed on a single byte instead of -4 bytes previously. While this will make the index more space-efficient, this -also means that the boosts will be less accurately encoded. - -[[breaking_50_rest_api_changes]] -=== REST API changes - -==== id values longer than 512 bytes are rejected - -When specifying an `_id` value longer than 512 bytes, the request will be -rejected. - -==== search exists api removed - -The search exists api has been removed in favour of using the search api with -`size` set to `0` and `terminate_after` set to `1`. - -==== `/_optimize` endpoint removed - -The deprecated `/_optimize` endpoint has been removed. The `/_forcemerge` -endpoint should be used in lieu of optimize. - -The `GET` HTTP verb for `/_forcemerge` is no longer supported, please use the -`POST` HTTP verb. - -==== Deprecated queries removed - -The following deprecated queries have been removed: - -* `filtered`: use `bool` query instead, which supports `filter` clauses too -* `and`: use `must` clauses in a `bool` query instead -* `or`: use should clauses in a `bool` query instead -* `limit`: use `terminate_after` parameter instead -* `fquery`: obsolete after filters and queries have been merged -* `query`: obsolete after filters and queries have been merged - -==== Unified fuzziness parameter - -* Removed support for the deprecated `min_similarity` parameter in `fuzzy query`, in favour of `similarity`. -* Removed support for the deprecated `fuzzy_min_sim` parameter in `query_string` query, in favour of `similarity`. -* Removed support for the deprecated `edit_distance` parameter in completion suggester, in favour of `similarity`. - -==== indices query - -Removed support for the deprecated `filter` and `no_match_filter` fields in `indices` query, -in favour of `query` and `no_match_query`. - -==== nested query - -Removed support for the deprecated `filter` fields in `nested` query, in favour of `query`. - -==== terms query - -Removed support for the deprecated `minimum_should_match` and `disable_coord` in `terms` query, use `bool` query instead. -Removed also support for the deprecated `execution` parameter. - -==== function_score query - -Removed support for the top level `filter` element in `function_score` query, replaced by `query`. - -==== highlighters - -Removed support for multiple highlighter names, the only supported ones are: `plain`, `fvh` and `postings`. - -==== top level filter - -Removed support for the deprecated top level `filter` in the search api, replaced by `post_filter`. - -==== `query_binary` and `filter_binary` removed - -Removed support for the undocumented `query_binary` and `filter_binary` sections of a search request. - -==== `span_near`'s' `collect_payloads` deprecated - -Payloads are now loaded when needed. - -[[breaking_50_cat_api]] -=== CAT API changes - -==== Use Accept header for specifying response media type - -Previous versions of Elasticsearch accepted the Content-type header -field for controlling the media type of the response in the cat API. -This is in opposition to the HTTP spec which specifies the Accept -header field for this purpose. Elasticsearch now uses the Accept header -field and support for using the Content-Type header field for this -purpose has been removed. - -==== Host field removed from the cat nodes API - -The `host` field has been removed from the cat nodes API as its value -is always equal to the `ip` field. The `name` field is available in the -cat nodes API and should be used instead of the `host` field. - -==== Changes to cat recovery API - -The fields `bytes_recovered` and `files_recovered` have been added to -the cat recovery API. These fields, respectively, indicate the total -number of bytes and files that have been recovered. - -The fields `total_files` and `total_bytes` have been renamed to -`files_total` and `bytes_total`, respectively. - -Additionally, the field `translog` has been renamed to -`translog_ops_recovered`, the field `translog_total` to -`translog_ops` and the field `translog_percent` to -`translog_ops_percent`. The short aliases for these fields are `tor`, -`to`, and `top`, respectively. - -[[breaking_50_parent_child_changes]] -=== Parent/Child changes - -The `children` aggregation, parent child inner hits and `has_child` and `has_parent` queries will not work on indices -with `_parent` field mapping created before version `2.0.0`. The data of these indices need to be re-indexed into a new index. - -The format of the join between parent and child documents have changed with the `2.0.0` release. The old -format can't read from version `5.0.0` and onwards. The new format allows for a much more efficient and -scalable join between parent and child documents and the join data structures are stored on disk -data structures as opposed as before the join data structures were stored in the jvm heap space. - -==== `score_type` has been removed - -The `score_type` option has been removed from the `has_child` and `has_parent` queries in favour of the `score_mode` option -which does the exact same thing. - -==== `sum` score mode removed - -The `sum` score mode has been removed in favour of the `total` mode which does the same and is already available in -previous versions. - -==== `max_children` option - -When `max_children` was set to `0` on the `has_child` query then there was no upper limit on how many children documents -are allowed to match. This has changed and `0` now really means to zero child documents are allowed. If no upper limit -is needed then the `max_children` option shouldn't be defined at all on the `has_child` query. - -==== `_parent` field no longer indexed - -The join between parent and child documents no longer relies on indexed fields and therefor from `5.0.0` onwards -the `_parent` indexed field won't be indexed. In order to find documents that referrer to a specific parent id -the new `parent_id` query can be used. The get response and hits inside the search response remain to include -the parent id under the `_parent` key. - -[[breaking_50_settings_changes]] -=== Settings changes - -From Elasticsearch 5.0 on all settings are validated before they are applied. Node level and default index -level settings are validated on node startup, dynamic cluster and index setting are validated before they are updated/added -to the cluster state. Every setting must be a _known_ setting or in other words all settings must be registered with the -node or transport client they are used with. This implies that plugins that define custom settings must register all of their -settings during pluging loading using the `SettingsModule#registerSettings(Setting)` method. - -==== Node settings - -The `name` setting has been removed and is replaced by `node.name`. Usage of `-Dname=some_node_name` is not supported -anymore. - -==== Transport Settings - -All settings with a `netty` infix have been replaced by their already existing `transport` synonyms. For instance `transport.netty.bind_host` is -no longer supported and should be replaced by the superseding setting `transport.bind_host`. - -==== Analysis settings - -The `index.analysis.analyzer.default_index` analyzer is not supported anymore. -If you wish to change the analyzer to use for indexing, change the -`index.analysis.analyzer.default` analyzer instead. - -==== Ping timeout settings - -Previously, there were three settings for the ping timeout: `discovery.zen.initial_ping_timeout`, -`discovery.zen.ping.timeout` and `discovery.zen.ping_timeout`. The former two have been removed and -the only setting key for the ping timeout is now `discovery.zen.ping_timeout`. The default value for -ping timeouts remains at three seconds. - -==== Recovery settings - -Recovery settings deprecated in 1.x have been removed: - - * `index.shard.recovery.translog_size` is superseded by `indices.recovery.translog_size` - * `index.shard.recovery.translog_ops` is superseded by `indices.recovery.translog_ops` - * `index.shard.recovery.file_chunk_size` is superseded by `indices.recovery.file_chunk_size` - * `index.shard.recovery.concurrent_streams` is superseded by `indices.recovery.concurrent_streams` - * `index.shard.recovery.concurrent_small_file_streams` is superseded by `indices.recovery.concurrent_small_file_streams` - * `indices.recovery.max_size_per_sec` is superseded by `indices.recovery.max_bytes_per_sec` - -If you are using any of these settings please take the time and review their purpose. All of the settings above are considered -_expert settings_ and should only be used if absolutely necessary. If you have set any of the above setting as persistent -cluster settings please use the settings update API and set their superseded keys accordingly. - -The following settings have been removed without replacement - - * `indices.recovery.concurrent_small_file_streams` - recoveries are now single threaded. The number of concurrent outgoing recoveries are throttled via allocation deciders - * `indices.recovery.concurrent_file_streams` - recoveries are now single threaded. The number of concurrent outgoing recoveries are throttled via allocation deciders - -==== Translog settings - -The `index.translog.flush_threshold_ops` setting is not supported anymore. In order to control flushes based on the transaction log -growth use `index.translog.flush_threshold_size` instead. Changing the translog type with `index.translog.fs.type` is not supported -anymore, the `buffered` implementation is now the only available option and uses a fixed `8kb` buffer. - -The translog by default is fsynced on a request basis such that the ability to fsync on every operation is not necessary anymore. In-fact it can -be a performance bottleneck and it's trappy since it enabled by a special value set on `index.translog.sync_interval`. `index.translog.sync_interval` -now doesn't accept a value less than `100ms` which prevents fsyncing too often if async durability is enabled. The special value `0` is not supported anymore. - -==== Request Cache Settings - -The deprecated settings `index.cache.query.enable` and `indices.cache.query.size` have been removed and are replaced with -`index.requests.cache.enable` and `indices.requests.cache.size` respectively. - -`indices.requests.cache.clean_interval` has been replaced with `indices.cache.clean_interval` and is no longer supported. - -==== Field Data Cache Settings - -`indices.fielddata.cache.clean_interval` has been replaced with `indices.cache.clean_interval` and is no longer supported. - -==== Allocation settings - -Allocation settings deprecated in 1.x have been removed: - - * `cluster.routing.allocation.concurrent_recoveries` is superseded by `cluster.routing.allocation.node_concurrent_recoveries` - -Please change the setting in your configuration files or in the clusterstate to use the new settings instead. - -==== Similarity settings - -The 'default' similarity has been renamed to 'classic'. - -==== Indexing settings - -`indices.memory.min_shard_index_buffer_size` and `indices.memory.max_shard_index_buffer_size` are removed since Elasticsearch now allows any one shard to any -amount of heap as long as the total indexing buffer heap used across all shards is below the node's `indices.memory.index_buffer_size` (default: 10% of the JVM heap) - -==== Removed es.max-open-files - -Setting the system property es.max-open-files to true to get -Elasticsearch to print the number of maximum open files for the -Elasticsearch process has been removed. This same information can be -obtained from the <> API, and a warning is logged -on startup if it is set too low. - -==== Removed es.netty.gathering - -Disabling Netty from using NIO gathering could be done via the escape -hatch of setting the system property "es.netty.gathering" to "false". -Time has proven enabling gathering by default is a non-issue and this -non-documented setting has been removed. - -==== Removed es.useLinkedTransferQueue - -The system property `es.useLinkedTransferQueue` could be used to -control the queue implementation used in the cluster service and the -handling of ping responses during discovery. This was an undocumented -setting and has been removed. - -[[breaking_50_mapping_changes]] -=== Mapping changes - -==== Default doc values settings - -Doc values are now also on by default on numeric and boolean fields that are -not indexed. - -==== Transform removed - -The `transform` feature from mappings has been removed. It made issues very hard to debug. - -==== Default number mappings - -When a floating-point number is encountered, it is now dynamically mapped as a -float by default instead of a double. The reasoning is that floats should be -more than enough for most cases but would decrease storage requirements -significantly. - -==== `index` property - -On all types but `string`, the `index` property now only accepts `true`/`false` -instead of `not_analyzed`/`no`. The `string` field still accepts -`analyzed`/`not_analyzed`/`no`. - -==== ++_source++'s `format` option - -The `_source` mapping does not support the `format` option anymore. This option -will still be accepted for indices created before the upgrade to 5.0 for backward -compatibility, but it will have no effect. Indices created on or after 5.0 will -reject this option. - -==== Object notation - -Core types don't support the object notation anymore, which allowed to provide -values as follows: - -[source,json] ---------------- -{ - "value": "field_value", - "boost": 42 -} ---------------- - -==== `fielddata.format` - -Setting `fielddata.format: doc_values` in the mappings used to implicitly -enable doc values on a field. This no longer works: the only way to enable or -disable doc values is by using the `doc_values` property of mappings. - - -[[breaking_50_plugins]] -=== Plugin changes - -The command `bin/plugin` has been renamed to `bin/elasticsearch-plugin`. -The structure of the plugin has changed. All the plugin files must be contained in a directory called `elasticsearch`. -If you use the gradle build, this structure is automatically generated. - -==== Site plugins removed - -Site plugins have been removed. It is recommended to migrate site plugins to Kibana plugins. - -==== Multicast plugin removed - -Multicast has been removed. Use unicast discovery, or one of the cloud discovery plugins. - -==== Plugins with custom query implementations - -Plugins implementing custom queries need to implement the `fromXContent(QueryParseContext)` method in their -`QueryParser` subclass rather than `parse`. This method will take care of parsing the query from `XContent` format -into an intermediate query representation that can be streamed between the nodes in binary format, effectively the -query object used in the java api. Also, the query parser needs to implement the `getBuilderPrototype` method that -returns a prototype of the `NamedWriteable` query, which allows to deserialize an incoming query by calling -`readFrom(StreamInput)` against it, which will create a new object, see usages of `Writeable`. The `QueryParser` -also needs to declare the generic type of the query that it supports and it's able to parse. -The query object can then transform itself into a lucene query through the new `toQuery(QueryShardContext)` method, -which returns a lucene query to be executed on the data node. - -Similarly, plugins implementing custom score functions need to implement the `fromXContent(QueryParseContext)` -method in their `ScoreFunctionParser` subclass rather than `parse`. This method will take care of parsing -the function from `XContent` format into an intermediate function representation that can be streamed between -the nodes in binary format, effectively the function object used in the java api. Also, the query parser needs -to implement the `getBuilderPrototype` method that returns a prototype of the `NamedWriteable` function, which -allows to deserialize an incoming function by calling `readFrom(StreamInput)` against it, which will create a -new object, see usages of `Writeable`. The `ScoreFunctionParser` also needs to declare the generic type of the -function that it supports and it's able to parse. The function object can then transform itself into a lucene -function through the new `toFunction(QueryShardContext)` method, which returns a lucene function to be executed -on the data node. - -==== Cloud AWS plugin changes - -Cloud AWS plugin has been split in two plugins: - -* {plugins}/discovery-ec2.html[Discovery EC2 plugin] -* {plugins}/repository-s3.html[Repository S3 plugin] - -Proxy settings for both plugins have been renamed: - -* from `cloud.aws.proxy_host` to `cloud.aws.proxy.host` -* from `cloud.aws.ec2.proxy_host` to `cloud.aws.ec2.proxy.host` -* from `cloud.aws.s3.proxy_host` to `cloud.aws.s3.proxy.host` -* from `cloud.aws.proxy_port` to `cloud.aws.proxy.port` -* from `cloud.aws.ec2.proxy_port` to `cloud.aws.ec2.proxy.port` -* from `cloud.aws.s3.proxy_port` to `cloud.aws.s3.proxy.port` - -==== Cloud Azure plugin changes - -Cloud Azure plugin has been split in three plugins: - -* {plugins}/discovery-azure.html[Discovery Azure plugin] -* {plugins}/repository-azure.html[Repository Azure plugin] -* {plugins}/store-smb.html[Store SMB plugin] - -If you were using the `cloud-azure` plugin for snapshot and restore, you had in `elasticsearch.yml`: - -[source,yaml] ------ -cloud: - azure: - storage: - account: your_azure_storage_account - key: your_azure_storage_key ------ - -You need to give a unique id to the storage details now as you can define multiple storage accounts: - -[source,yaml] ------ -cloud: - azure: - storage: - my_account: - account: your_azure_storage_account - key: your_azure_storage_key ------ - - -==== Cloud GCE plugin changes - -Cloud GCE plugin has been renamed to {plugins}/discovery-gce.html[Discovery GCE plugin]. - - -==== Mapper Attachments plugin deprecated - -Mapper attachments has been deprecated. Users should use now the {plugins}/ingest-attachment.html[`ingest-attachment`] -plugin. - - -[[breaking_50_java_api_changes]] -=== Java API changes - -==== Count api has been removed - -The deprecated count api has been removed from the Java api, use the search api instead and set size to 0. - -The following call - -[source,java] ------ -client.prepareCount(indices).setQuery(query).get(); ------ - -can be replaced with - -[source,java] ------ -client.prepareSearch(indices).setSource(new SearchSourceBuilder().size(0).query(query)).get(); ------ - -==== BoostingQueryBuilder - -Removed setters for mandatory positive/negative query. Both arguments now have -to be supplied at construction time already and have to be non-null. - -==== SpanContainingQueryBuilder - -Removed setters for mandatory big/little inner span queries. Both arguments now have -to be supplied at construction time already and have to be non-null. Updated -static factory methods in QueryBuilders accordingly. - -==== SpanOrQueryBuilder - -Making sure that query contains at least one clause by making initial clause mandatory -in constructor. - -==== SpanNearQueryBuilder - -Removed setter for mandatory slop parameter, needs to be set in constructor now. Also -making sure that query contains at least one clause by making initial clause mandatory -in constructor. Updated the static factory methods in QueryBuilders accordingly. - -==== SpanNotQueryBuilder - -Removed setter for mandatory include/exclude span query clause, needs to be set in constructor now. -Updated the static factory methods in QueryBuilders and tests accordingly. - -==== SpanWithinQueryBuilder - -Removed setters for mandatory big/little inner span queries. Both arguments now have -to be supplied at construction time already and have to be non-null. Updated -static factory methods in QueryBuilders accordingly. - -==== QueryFilterBuilder - -Removed the setter `queryName(String queryName)` since this field is not supported -in this type of query. Use `FQueryFilterBuilder.queryName(String queryName)` instead -when in need to wrap a named query as a filter. - -==== WrapperQueryBuilder - -Removed `wrapperQueryBuilder(byte[] source, int offset, int length)`. Instead simply -use `wrapperQueryBuilder(byte[] source)`. Updated the static factory methods in -QueryBuilders accordingly. - -==== QueryStringQueryBuilder - -Removed ability to pass in boost value using `field(String field)` method in form e.g. `field^2`. -Use the `field(String, float)` method instead. - -==== Operator - -Removed the enums called `Operator` from `MatchQueryBuilder`, `QueryStringQueryBuilder`, -`SimpleQueryStringBuilder`, and `CommonTermsQueryBuilder` in favour of using the enum -defined in `org.elasticsearch.index.query.Operator` in an effort to consolidate the -codebase and avoid duplication. - -==== queryName and boost support - -Support for `queryName` and `boost` has been streamlined to all of the queries. That is -a breaking change till queries get sent over the network as serialized json rather -than in `Streamable` format. In fact whenever additional fields are added to the json -representation of the query, older nodes might throw error when they find unknown fields. - -==== InnerHitsBuilder - -InnerHitsBuilder now has a dedicated addParentChildInnerHits and addNestedInnerHits methods -to differentiate between inner hits for nested vs. parent / child documents. This change -makes the type / path parameter mandatory. - -==== MatchQueryBuilder - -Moving MatchQueryBuilder.Type and MatchQueryBuilder.ZeroTermsQuery enum to MatchQuery.Type. -Also reusing new Operator enum. - -==== MoreLikeThisQueryBuilder - -Removed `MoreLikeThisQueryBuilder.Item#id(String id)`, `Item#doc(BytesReference doc)`, -`Item#doc(XContentBuilder doc)`. Use provided constructors instead. - -Removed `MoreLikeThisQueryBuilder#addLike` in favor of texts and/or items being provided -at construction time. Using arrays there instead of lists now. - -Removed `MoreLikeThisQueryBuilder#addUnlike` in favor to using the `unlike` methods -which take arrays as arguments now rather than the lists used before. - -The deprecated `docs(Item... docs)`, `ignoreLike(Item... docs)`, -`ignoreLike(String... likeText)`, `addItem(Item... likeItems)` have been removed. - -==== GeoDistanceQueryBuilder - -Removing individual setters for lon() and lat() values, both values should be set together - using point(lon, lat). - -==== GeoDistanceRangeQueryBuilder - -Removing setters for to(Object ...) and from(Object ...) in favour of the only two allowed input -arguments (String, Number). Removing setter for center point (point(), geohash()) because parameter -is mandatory and should already be set in constructor. -Also removing setters for lt(), lte(), gt(), gte() since they can all be replaced by equivalent -calls to to/from() and inludeLower()/includeUpper(). - -==== GeoPolygonQueryBuilder - -Require shell of polygon already to be specified in constructor instead of adding it pointwise. -This enables validation, but makes it necessary to remove the addPoint() methods. - -==== MultiMatchQueryBuilder - -Moving MultiMatchQueryBuilder.ZeroTermsQuery enum to MatchQuery.ZeroTermsQuery. -Also reusing new Operator enum. - -Removed ability to pass in boost value using `field(String field)` method in form e.g. `field^2`. -Use the `field(String, float)` method instead. - -==== MissingQueryBuilder - -The MissingQueryBuilder which was deprecated in 2.2.0 is removed. As a replacement use ExistsQueryBuilder -inside a mustNot() clause. So instead of using `new ExistsQueryBuilder(name)` now use -`new BoolQueryBuilder().mustNot(new ExistsQueryBuilder(name))`. - -==== NotQueryBuilder - -The NotQueryBuilder which was deprecated in 2.1.0 is removed. As a replacement use BoolQueryBuilder -with added mustNot() clause. So instead of using `new NotQueryBuilder(filter)` now use -`new BoolQueryBuilder().mustNot(filter)`. - -==== TermsQueryBuilder - -Remove the setter for `termsLookup()`, making it only possible to either use a TermsLookup object or -individual values at construction time. Also moving individual settings for the TermsLookup (lookupIndex, -lookupType, lookupId, lookupPath) to the separate TermsLookup class, using constructor only and moving -checks for validation there. Removed `TermsLookupQueryBuilder` in favour of `TermsQueryBuilder`. - -==== FunctionScoreQueryBuilder - -`add` methods have been removed, all filters and functions must be provided as constructor arguments by -creating an array of `FunctionScoreQueryBuilder.FilterFunctionBuilder` objects, containing one element -for each filter/function pair. - -`scoreMode` and `boostMode` can only be provided using corresponding enum members instead -of string values: see `FilterFunctionScoreQuery.ScoreMode` and `CombineFunction`. - -`CombineFunction.MULT` has been renamed to `MULTIPLY`. - -==== IdsQueryBuilder - -For simplicity, only one way of adding the ids to the existing list (empty by default) is left: `addIds(String...)` - -==== DocumentAlreadyExistsException removed - -`DocumentAlreadyExistsException` is removed and a `VersionConflictException` is thrown instead (with a better -error description). This will influence code that use the `IndexRequest.opType()` or `IndexRequest.create()` -to index a document only if it doesn't already exist. - -==== ShapeBuilders - -`InternalLineStringBuilder` is removed in favour of `LineStringBuilder`, `InternalPolygonBuilder` in favour of PolygonBuilder` and `Ring` has been replaced with `LineStringBuilder`. Also the abstract base classes `BaseLineStringBuilder` and `BasePolygonBuilder` haven been merged with their corresponding implementations. - -==== RescoreBuilder - -`RecoreBuilder.Rescorer` was merged with `RescoreBuilder`, which now is an abstract superclass. QueryRescoreBuilder currently is its only implementation. - -==== PhraseSuggestionBuilder - -The inner DirectCandidateGenerator class has been moved out to its own class called DirectCandidateGeneratorBuilder. - -==== Elasticsearch will no longer detect logging implementations - -Elasticsearch now logs only to log4j 1.2. Previously if log4j wasn't on the classpath it made some effort to degrade to -slf4j or java.util.logging. Now it'll fail to work without the log4j 1.2 api. The log4j-over-slf4j bridge ought to work -when using the java client. As should log4j 2's log4j-1.2-api. The Elasticsearch server now only supports log4j as -configured by logging.yml and it no longer makes any effort to work if log4j isn't present. - -[[breaking_50_cache_concurrency]] -=== Cache concurrency level settings removed - -Two cache concurrency level settings `indices.requests.cache.concurrency_level` and -`indices.fielddata.cache.concurrency_level` because they no longer apply to the cache implementation used for the -request cache and the field data cache. - -[[breaking_50_non_loopback]] -=== Remove bind option of `non_loopback` - -This setting would arbitrarily pick the first interface not marked as loopback. Instead, specify by address -scope (e.g. `_local_,_site_` for all loopback and private network addresses) or by explicit interface names, -hostnames, or addresses. - -[[breaking_50_thread_pool]] -=== Forbid changing of thread pool types - -Previously, <> could be dynamically adjusted. The thread pool type effectively -controls the backing queue for the thread pool and modifying this is an expert setting with minimal practical benefits -and high risk of being misused. The ability to change the thread pool type for any thread pool has been removed; do note -that it is still possible to adjust relevant thread pool parameters for each of the thread pools (e.g., depending on -the thread pool type, `keep_alive`, `queue_size`, etc.). - -[[breaking_50_cpu_stats]] -=== System CPU stats - -The recent CPU usage (as a percent) has been added to the OS stats -reported under the node stats API and the cat nodes API. The breaking -change here is that there is a new object in the `os` object in the node -stats response. This object is called `cpu` and includes "percent" and -`load_average` as fields. This moves the `load_average` field that was -previously a top-level field in the `os` object to the `cpu` object. The -format of the `load_average` field has changed to an object with fields -`1m`, `5m`, and `15m` representing the one-minute, five-minute and -fifteen-minute loads respectively. If any of these fields are not present, -it indicates that the corresponding value is not available. - -In the cat nodes API response, the `cpu` field is output by default. The -previous `load` field has been removed and is replaced by `load_1m`, -`load_5m`, and `load_15m` which represent the one-minute, five-minute -and fifteen-minute loads respectively. The field will be null if the -corresponding value is not available. - -Finally, the API for `org.elasticsearch.monitor.os.OsStats` has -changed. The `getLoadAverage` method has been removed. The value for -this can now be obtained from `OsStats.Cpu#getLoadAverage` but it is no -longer a double and is instead an object encapsulating the one-minute, -five-minute and fifteen-minute load averages. Additionally, the recent -CPU usage can be obtained from `OsStats.Cpu#getPercent`. - -=== Fields option -Only stored fields are retrievable with this option. -The fields option won't be able to load non stored fields from _source anymore. - -[[breaking_50_allocation]] -=== Primary shard allocation - -Previously, primary shards were only assigned if a quorum of shard copies were found (configurable using -`index.recovery.initial_shards`, now deprecated). In case where a primary had only a single replica, quorum was defined -to be a single shard. This meant that any shard copy of an index with replication factor 1 could become primary, even it -was a stale copy of the data on disk. This is now fixed by using allocation IDs. - -Allocation IDs assign unique identifiers to shard copies. This allows the cluster to differentiate between multiple -copies of the same data and track which shards have been active, so that after a cluster restart, shard copies -containing only the most recent data can become primaries. - -=== Indices Shard Stores command - -By using allocation IDs instead of version numbers to identify shard copies for primary shard allocation, the former versioning scheme -has become obsolete. This is reflected in the indices-shards-stores.html[Indices Shard Stores API]. A new field `allocation_id` replaces the -former `version` field in the result of the Indices Shard Stores command. This field is available for all shard copies that have been either -created with the current version of Elasticsearch or have been active in a cluster running a current version of Elasticsearch. For legacy -shard copies that have not been active in a current version of Elasticsearch, a `legacy_version` field is available instead (equivalent to -the former `version` field). - -=== Reroute commands - -The reroute command `allocate` has been split into two distinct commands `allocate_replica` and `allocate_empty_primary`. -This was done as we introduced a new `allocate_stale_primary` command. The new `allocate_replica` command corresponds to the -old `allocate` command with `allow_primary` set to false. The new `allocate_empty_primary` command corresponds to the old -`allocate` command with `allow_primary` set to true. - -==== `index.shared_filesystem.recover_on_any_node` changes - -The behavior of `index.shared_filesystem.recover_on_any_node = true` has been changed. Previously, in the case where no -shard copies could be found, an arbitrary node was chosen by potentially ignoring allocation deciders. Now, we take -balancing into account but don't assign the shard if the allocation deciders are not satisfied. The behavior has also changed -in the case where shard copies can be found. Previously, a node not holding the shard copy was chosen if none of the nodes -holding shard copies were satisfying the allocation deciders. Now, the shard will be assigned to a node having a shard copy, -even if none of the nodes holding a shard copy satisfy the allocation deciders. - -[[breaking_50_percolator]] -=== Percolator - -Adding percolator queries and modifications to existing percolator queries are no longer visible in immediately -to the percolator. A refresh is required to run before the changes are visible to the percolator. - -The reason that this has changed is that on newly created indices the percolator automatically indexes the query terms -and these query terms are used at percolate time to reduce the amount of queries the percolate API needs evaluate. -This optimization didn't work in the percolate API mode where modifications to queries are immediately visible. - -The percolator by defaults sets the `size` option to `10` whereas before this was set to unlimited. - -The percolate api can no longer accept documents that have fields that don't exist in the mapping. - -When percolating an existing document then specifying a document in the source of the percolate request is not allowed -any more. - -The percolate api no longer modifies the mappings. Before the percolate api could be used to dynamically introduce new -fields to the mappings based on the fields in the document being percolated. This no longer works, because these -unmapped fields are not persisted in the mapping. - -Percolator documents are no longer excluded from the search response. - -[[breaking_50_packaging]] -=== Packaging - -==== Default logging using systemd (since Elasticsearch 2.2.0) - -In previous versions of Elasticsearch, the default logging -configuration routed standard output to /dev/null and standard error to -the journal. However, there are often critical error messages at -startup that are logged to standard output rather than standard error -and these error messages would be lost to the nether. The default has -changed to now route standard output to the journal and standard error -to inherit this setting (these are the defaults for systemd). These -settings can be modified by editing the elasticsearch.service file. - -==== Longer startup times - -In Elasticsearch 5.0.0 the `-XX:+AlwaysPreTouch` flag has been added to the JVM -startup options. This option touches all memory pages used by the JVM heap -during initialization of the HotSpot VM to reduce the chance of having to commit -a memory page during GC time. This will increase the startup time of -Elasticsearch as well as increasing the initial resident memory usage of the -Java process. - -[[breaking_50_scripting]] -=== Scripting - -==== Script mode settings - -Previously script mode settings (e.g., "script.inline: true", -"script.engine.groovy.inline.aggs: false", etc.) accepted the values -`on`, `true`, `1`, and `yes` for enabling a scripting mode, and the -values `off`, `false`, `0`, and `no` for disabling a scripting mode. -The variants `on`, `1`, and `yes ` for enabling and `off`, `0`, -and `no` for disabling are no longer supported. - -==== Groovy dependencies - -In previous versions of Elasticsearch, the Groovy scripting capabilities -depended on the `org.codehaus.groovy:groovy-all` artifact. In addition -to pulling in the Groovy language, this pulls in a very large set of -functionality, none of which is needed for scripting within -Elasticsearch. Aside from the inherent difficulties in managing such a -large set of dependencies, this also increases the surface area for -security issues. This dependency has been reduced to the core Groovy -language `org.codehaus.groovy:groovy` artifact. - -[[breaking_50_term_vectors]] -=== Term vectors - -The term vectors APIs no longer persist unmapped fields in the mappings. - -The `dfs` parameter has been removed completely, term vectors don't support -distributed document frequencies anymore. - -[[breaking_50_security]] -=== Security - -The option to disable the security manager `--security.manager.enabled` has been removed. In order to grant special -permissions to elasticsearch users must tweak the local Java Security Policy. - -[[breaking_50_snapshot_restore]] -=== Snapshot/Restore - -==== Closing / deleting indices while running snapshot - -In previous versions of Elasticsearch, closing or deleting an index during a full snapshot would make the snapshot fail. This is now changed -by failing the close/delete index request instead. The behavior for partial snapshots remains unchanged: Closing or deleting an index during -a partial snapshot is still possible. The snapshot result is then marked as partial. diff --git a/docs/reference/migration/migrate_5_0/allocation.asciidoc b/docs/reference/migration/migrate_5_0/allocation.asciidoc new file mode 100644 index 000000000000..1e095831381b --- /dev/null +++ b/docs/reference/migration/migrate_5_0/allocation.asciidoc @@ -0,0 +1,54 @@ +[[breaking_50_allocation]] +=== Allocation changes + +==== Primary shard allocation + +Previously, primary shards were only assigned if a quorum of shard copies were +found (configurable using `index.recovery.initial_shards`, now deprecated). In +case where a primary had only a single replica, quorum was defined to be a +single shard. This meant that any shard copy of an index with replication +factor 1 could become primary, even it was a stale copy of the data on disk. +This is now fixed thanks to shard allocation IDs. + +Allocation IDs assign unique identifiers to shard copies. This allows the +cluster to differentiate between multiple copies of the same data and track +which shards have been active so that, after a cluster restart, only shard +copies containing the most recent data can become primaries. + +==== Indices Shard Stores command + +By using allocation IDs instead of version numbers to identify shard copies +for primary shard allocation, the former versioning scheme has become +obsolete. This is reflected in the +<>. + +A new `allocation_id` field replaces the former `version` field in the result +of the Indices Shard Stores command. This field is available for all shard +copies that have been either created with the current version of Elasticsearch +or have been active in a cluster running a current version of Elasticsearch. +For legacy shard copies that have not been active in a current version of +Elasticsearch, a `legacy_version` field is available instead (equivalent to +the former `version` field). + +==== Reroute commands + +The reroute command `allocate` has been split into two distinct commands +`allocate_replica` and `allocate_empty_primary`. This was done as we +introduced a new `allocate_stale_primary` command. The new `allocate_replica` +command corresponds to the old `allocate` command with `allow_primary` set to +false. The new `allocate_empty_primary` command corresponds to the old +`allocate` command with `allow_primary` set to true. + +==== `index.shared_filesystem.recover_on_any_node` changes + +The behavior of `index.shared_filesystem.recover_on_any_node: true` has been +changed. Previously, in the case where no shard copies could be found, an +arbitrary node was chosen by potentially ignoring allocation deciders. Now, we +take balancing into account but don't assign the shard if the allocation +deciders are not satisfied. + +The behavior has also changed in the case where shard copies can be found. +Previously, a node not holding the shard copy was chosen if none of the nodes +holding shard copies were satisfying the allocation deciders. Now, the shard +will be assigned to a node having a shard copy, even if none of the nodes +holding a shard copy satisfy the allocation deciders. diff --git a/docs/reference/migration/migrate_5_0/cat.asciidoc b/docs/reference/migration/migrate_5_0/cat.asciidoc new file mode 100644 index 000000000000..c3b1c84ee8de --- /dev/null +++ b/docs/reference/migration/migrate_5_0/cat.asciidoc @@ -0,0 +1,33 @@ +[[breaking_50_cat_api]] +=== CAT API changes + +==== Use Accept header for specifying response media type + +Previous versions of Elasticsearch accepted the Content-type header +field for controlling the media type of the response in the cat API. +This is in opposition to the HTTP spec which specifies the Accept +header field for this purpose. Elasticsearch now uses the Accept header +field and support for using the Content-Type header field for this +purpose has been removed. + +==== Host field removed from the cat nodes API + +The `host` field has been removed from the cat nodes API as its value +is always equal to the `ip` field. The `name` field is available in the +cat nodes API and should be used instead of the `host` field. + +==== Changes to cat recovery API + +The fields `bytes_recovered` and `files_recovered` have been added to +the cat recovery API. These fields, respectively, indicate the total +number of bytes and files that have been recovered. + +The fields `total_files` and `total_bytes` have been renamed to +`files_total` and `bytes_total`, respectively. + +Additionally, the field `translog` has been renamed to +`translog_ops_recovered`, the field `translog_total` to +`translog_ops` and the field `translog_percent` to +`translog_ops_percent`. The short aliases for these fields are `tor`, +`to`, and `top`, respectively. + diff --git a/docs/reference/migration/migrate_5_0/index-apis.asciidoc b/docs/reference/migration/migrate_5_0/index-apis.asciidoc new file mode 100644 index 000000000000..72651295bbcd --- /dev/null +++ b/docs/reference/migration/migrate_5_0/index-apis.asciidoc @@ -0,0 +1,48 @@ +[[breaking_50_index_apis]] +=== Index APIs changes + +==== Closing / deleting indices while running snapshot + +In previous versions of Elasticsearch, closing or deleting an index during a +full snapshot would make the snapshot fail. In 5.0, the close/delete index +request will fail instead. The behavior for partial snapshots remains +unchanged: Closing or deleting an index during a partial snapshot is still +possible. The snapshot result is then marked as partial. + +==== Warmers + +Thanks to several changes like doc values by default and disk-based norms, +warmers are no longer useful. As a consequence, warmers and the warmer API +have been removed: it is no longer possible to register queries that will run +before a new IndexSearcher is published. + +Don't worry if you have warmers defined on your indices, they will simply be +ignored when upgrading to 5.0. + +==== System CPU stats + +The recent CPU usage (as a percent) has been added to the OS stats +reported under the node stats API and the cat nodes API. The breaking +change here is that there is a new object in the `os` object in the node +stats response. This object is called `cpu` and includes percent` and +`load_average` as fields. This moves the `load_average` field that was +previously a top-level field in the `os` object to the `cpu` object. The +format of the `load_average` field has changed to an object with fields +`1m`, `5m`, and `15m` representing the one-minute, five-minute and +fifteen-minute loads respectively. If any of these fields are not present, +it indicates that the corresponding value is not available. + +In the cat nodes API response, the `cpu` field is output by default. The +previous `load` field has been removed and is replaced by `load_1m`, +`load_5m`, and `load_15m` which represent the one-minute, five-minute +and fifteen-minute loads respectively. The field will be null if the +corresponding value is not available. + +Finally, the API for `org.elasticsearch.monitor.os.OsStats` has +changed. The `getLoadAverage` method has been removed. The value for +this can now be obtained from `OsStats.Cpu#getLoadAverage` but it is no +longer a double and is instead an object encapsulating the one-minute, +five-minute and fifteen-minute load averages. Additionally, the recent +CPU usage can be obtained from `OsStats.Cpu#getPercent`. + + diff --git a/docs/reference/migration/migrate_5_0/java.asciidoc b/docs/reference/migration/migrate_5_0/java.asciidoc new file mode 100644 index 000000000000..d1b96eb94461 --- /dev/null +++ b/docs/reference/migration/migrate_5_0/java.asciidoc @@ -0,0 +1,213 @@ + + + +[[breaking_50_java_api_changes]] +=== Java API changes + +==== Count api has been removed + +The deprecated count api has been removed from the Java api, use the search api instead and set size to 0. + +The following call + +[source,java] +----- +client.prepareCount(indices).setQuery(query).get(); +----- + +can be replaced with + +[source,java] +----- +client.prepareSearch(indices).setSource(new SearchSourceBuilder().size(0).query(query)).get(); +----- + +==== Elasticsearch will no longer detect logging implementations + +Elasticsearch now logs only to log4j 1.2. Previously if log4j wasn't on the +classpath it made some effort to degrade to slf4j or java.util.logging. Now it +will fail to work without the log4j 1.2 api. The log4j-over-slf4j bridge ought +to work when using the java client, as should log4j 2's log4j-1.2-api. The +Elasticsearch server now only supports log4j as configured by `logging.yml` +and will fail if log4j isn't present. + +==== Groovy dependencies + +In previous versions of Elasticsearch, the Groovy scripting capabilities +depended on the `org.codehaus.groovy:groovy-all` artifact. In addition +to pulling in the Groovy language, this pulls in a very large set of +functionality, none of which is needed for scripting within +Elasticsearch. Aside from the inherent difficulties in managing such a +large set of dependencies, this also increases the surface area for +security issues. This dependency has been reduced to the core Groovy +language `org.codehaus.groovy:groovy` artifact. + +==== DocumentAlreadyExistsException removed + +`DocumentAlreadyExistsException` is removed and a `VersionConflictException` is thrown instead (with a better +error description). This will influence code that use the `IndexRequest.opType()` or `IndexRequest.create()` +to index a document only if it doesn't already exist. + +==== Changes to Query Builders + +===== BoostingQueryBuilder + +Removed setters for mandatory positive/negative query. Both arguments now have +to be supplied at construction time already and have to be non-null. + +===== SpanContainingQueryBuilder + +Removed setters for mandatory big/little inner span queries. Both arguments now have +to be supplied at construction time already and have to be non-null. Updated +static factory methods in QueryBuilders accordingly. + +===== SpanOrQueryBuilder + +Making sure that query contains at least one clause by making initial clause mandatory +in constructor. + +===== SpanNearQueryBuilder + +Removed setter for mandatory slop parameter, needs to be set in constructor now. Also +making sure that query contains at least one clause by making initial clause mandatory +in constructor. Updated the static factory methods in QueryBuilders accordingly. + +===== SpanNotQueryBuilder + +Removed setter for mandatory include/exclude span query clause, needs to be set in constructor now. +Updated the static factory methods in QueryBuilders and tests accordingly. + +===== SpanWithinQueryBuilder + +Removed setters for mandatory big/little inner span queries. Both arguments now have +to be supplied at construction time already and have to be non-null. Updated +static factory methods in QueryBuilders accordingly. + +===== QueryFilterBuilder + +Removed the setter `queryName(String queryName)` since this field is not supported +in this type of query. Use `FQueryFilterBuilder.queryName(String queryName)` instead +when in need to wrap a named query as a filter. + +===== WrapperQueryBuilder + +Removed `wrapperQueryBuilder(byte[] source, int offset, int length)`. Instead simply +use `wrapperQueryBuilder(byte[] source)`. Updated the static factory methods in +QueryBuilders accordingly. + +===== QueryStringQueryBuilder + +Removed ability to pass in boost value using `field(String field)` method in form e.g. `field^2`. +Use the `field(String, float)` method instead. + +===== Operator + +Removed the enums called `Operator` from `MatchQueryBuilder`, `QueryStringQueryBuilder`, +`SimpleQueryStringBuilder`, and `CommonTermsQueryBuilder` in favour of using the enum +defined in `org.elasticsearch.index.query.Operator` in an effort to consolidate the +codebase and avoid duplication. + +===== queryName and boost support + +Support for `queryName` and `boost` has been streamlined to all of the queries. That is +a breaking change till queries get sent over the network as serialized json rather +than in `Streamable` format. In fact whenever additional fields are added to the json +representation of the query, older nodes might throw error when they find unknown fields. + +===== InnerHitsBuilder + +InnerHitsBuilder now has a dedicated addParentChildInnerHits and addNestedInnerHits methods +to differentiate between inner hits for nested vs. parent / child documents. This change +makes the type / path parameter mandatory. + +===== MatchQueryBuilder + +Moving MatchQueryBuilder.Type and MatchQueryBuilder.ZeroTermsQuery enum to MatchQuery.Type. +Also reusing new Operator enum. + +===== MoreLikeThisQueryBuilder + +Removed `MoreLikeThisQueryBuilder.Item#id(String id)`, `Item#doc(BytesReference doc)`, +`Item#doc(XContentBuilder doc)`. Use provided constructors instead. + +Removed `MoreLikeThisQueryBuilder#addLike` in favor of texts and/or items being provided +at construction time. Using arrays there instead of lists now. + +Removed `MoreLikeThisQueryBuilder#addUnlike` in favor to using the `unlike` methods +which take arrays as arguments now rather than the lists used before. + +The deprecated `docs(Item... docs)`, `ignoreLike(Item... docs)`, +`ignoreLike(String... likeText)`, `addItem(Item... likeItems)` have been removed. + +===== GeoDistanceQueryBuilder + +Removing individual setters for lon() and lat() values, both values should be set together + using point(lon, lat). + +===== GeoDistanceRangeQueryBuilder + +Removing setters for to(Object ...) and from(Object ...) in favour of the only two allowed input +arguments (String, Number). Removing setter for center point (point(), geohash()) because parameter +is mandatory and should already be set in constructor. +Also removing setters for lt(), lte(), gt(), gte() since they can all be replaced by equivalent +calls to to/from() and inludeLower()/includeUpper(). + +===== GeoPolygonQueryBuilder + +Require shell of polygon already to be specified in constructor instead of adding it pointwise. +This enables validation, but makes it necessary to remove the addPoint() methods. + +===== MultiMatchQueryBuilder + +Moving MultiMatchQueryBuilder.ZeroTermsQuery enum to MatchQuery.ZeroTermsQuery. +Also reusing new Operator enum. + +Removed ability to pass in boost value using `field(String field)` method in form e.g. `field^2`. +Use the `field(String, float)` method instead. + +===== MissingQueryBuilder + +The MissingQueryBuilder which was deprecated in 2.2.0 is removed. As a replacement use ExistsQueryBuilder +inside a mustNot() clause. So instead of using `new ExistsQueryBuilder(name)` now use +`new BoolQueryBuilder().mustNot(new ExistsQueryBuilder(name))`. + +===== NotQueryBuilder + +The NotQueryBuilder which was deprecated in 2.1.0 is removed. As a replacement use BoolQueryBuilder +with added mustNot() clause. So instead of using `new NotQueryBuilder(filter)` now use +`new BoolQueryBuilder().mustNot(filter)`. + +===== TermsQueryBuilder + +Remove the setter for `termsLookup()`, making it only possible to either use a TermsLookup object or +individual values at construction time. Also moving individual settings for the TermsLookup (lookupIndex, +lookupType, lookupId, lookupPath) to the separate TermsLookup class, using constructor only and moving +checks for validation there. Removed `TermsLookupQueryBuilder` in favour of `TermsQueryBuilder`. + +===== FunctionScoreQueryBuilder + +`add` methods have been removed, all filters and functions must be provided as constructor arguments by +creating an array of `FunctionScoreQueryBuilder.FilterFunctionBuilder` objects, containing one element +for each filter/function pair. + +`scoreMode` and `boostMode` can only be provided using corresponding enum members instead +of string values: see `FilterFunctionScoreQuery.ScoreMode` and `CombineFunction`. + +`CombineFunction.MULT` has been renamed to `MULTIPLY`. + +===== IdsQueryBuilder + +For simplicity, only one way of adding the ids to the existing list (empty by default) is left: `addIds(String...)` + +===== ShapeBuilders + +`InternalLineStringBuilder` is removed in favour of `LineStringBuilder`, `InternalPolygonBuilder` in favour of PolygonBuilder` and `Ring` has been replaced with `LineStringBuilder`. Also the abstract base classes `BaseLineStringBuilder` and `BasePolygonBuilder` haven been merged with their corresponding implementations. + +===== RescoreBuilder + +`RecoreBuilder.Rescorer` was merged with `RescoreBuilder`, which now is an abstract superclass. QueryRescoreBuilder currently is its only implementation. + +===== PhraseSuggestionBuilder + +The inner DirectCandidateGenerator class has been moved out to its own class called DirectCandidateGeneratorBuilder. + diff --git a/docs/reference/migration/migrate_5_0/mapping.asciidoc b/docs/reference/migration/migrate_5_0/mapping.asciidoc new file mode 100644 index 000000000000..768a2438d3e7 --- /dev/null +++ b/docs/reference/migration/migrate_5_0/mapping.asciidoc @@ -0,0 +1,82 @@ +[[breaking_50_mapping_changes]] +=== Mapping changes + +==== `string` fields replaced by `text`/`keyword` fields + +The `string` field datatype has been replaced by the `text` field for full +text analyzed content, and the `keyword` field for not-analyzed exact string +values. For backwards compatibility purposes, during the 5.x series: + +* `string` fields on pre-5.0 indices will function as before. +* New `string` fields can be added to pre-5.0 indices as before. +* `text` and `keyword` fields can also be added to pre-5.0 indices. +* When adding a `string` field to a new index, the field mapping will be + rewritten as a `text` or `keyword` field if possible, otherwise + an exception will be thrown. Certain configurations that were possible + with `string` fields are no longer possible with `text`/`keyword` fields + such as enabling `term_vectors` on a not-analyzed `keyword` field. + +==== `index` property + +On all field datatypes (except for the deprecated `string` field), the `index` +property now only accepts `true`/`false` instead of `not_analyzed`/`no`. The +`string` field still accepts `analyzed`/`not_analyzed`/`no`. + +==== Doc values on unindexed fields + +Previously, setting a field to `index:no` would also disable doc-values. Now, +doc-values are always enabled on numeric and boolean fields unless +`doc_values` is set to `false`. + +==== Floating points use `float` instead of `double` + +When dynamically mapping a field containing a floating point number, the field +now defaults to using `float` instead of `double`. The reasoning is that +floats should be more than enough for most cases but would decrease storage +requirements significantly. + +==== `fielddata.format` + +Setting `fielddata.format: doc_values` in the mappings used to implicitly +enable doc-values on a field. This no longer works: the only way to enable or +disable doc-values is by using the `doc_values` property of mappings. + +==== Source-transform removed + +The source `transform` feature has been removed. Instead, use an ingest pipeline + +==== `_parent` field no longer indexed + +The join between parent and child documents no longer relies on indexed fields +and therefore from 5.0.0 onwards the `_parent` field is no longer indexed. In +order to find documents that referrer to a specific parent id the new +`parent_id` query can be used. The GET response and hits inside the search +response still include the parent id under the `_parent` key. + +==== Source `format` option + +The `_source` mapping no longer supports the `format` option. It will still be +accepted for indices created before the upgrade to 5.0 for backwards +compatibility, but it will have no effect. Indices created on or after 5.0 +will reject this option. + +==== Object notation + +Core types no longer support the object notation, which was used to provide +per document boosts as follows: + +[source,json] +--------------- +{ + "value": "field_value", + "boost": 42 +} +--------------- + +==== Boost accuracy for queries on `_all` + +Per-field boosts on the `_all` are now compressed into a single byte instead +of the 4 bytes used previously. While this will make the index much more +space-efficient, it also means that index time boosts will be less accurately +encoded. + diff --git a/docs/reference/migration/migrate_5_0/packaging.asciidoc b/docs/reference/migration/migrate_5_0/packaging.asciidoc new file mode 100644 index 000000000000..9be2d4accac2 --- /dev/null +++ b/docs/reference/migration/migrate_5_0/packaging.asciidoc @@ -0,0 +1,24 @@ +[[breaking_50_packaging]] +=== Packaging + +==== Default logging using systemd (since Elasticsearch 2.2.0) + +In previous versions of Elasticsearch, the default logging +configuration routed standard output to /dev/null and standard error to +the journal. However, there are often critical error messages at +startup that are logged to standard output rather than standard error +and these error messages would be lost to the nether. The default has +changed to now route standard output to the journal and standard error +to inherit this setting (these are the defaults for systemd). These +settings can be modified by editing the elasticsearch.service file. + +==== Longer startup times + +In Elasticsearch 5.0.0 the `-XX:+AlwaysPreTouch` flag has been added to the JVM +startup options. This option touches all memory pages used by the JVM heap +during initialization of the HotSpot VM to reduce the chance of having to commit +a memory page during GC time. This will increase the startup time of +Elasticsearch as well as increasing the initial resident memory usage of the +Java process. + + diff --git a/docs/reference/migration/migrate_5_0/percolator.asciidoc b/docs/reference/migration/migrate_5_0/percolator.asciidoc new file mode 100644 index 000000000000..3c560182c87b --- /dev/null +++ b/docs/reference/migration/migrate_5_0/percolator.asciidoc @@ -0,0 +1,41 @@ +[[breaking_50_percolator]] +=== Percolator changes + +==== Percolator is near-real time + +Previously percolators were activated in real-time, i.e. as soon as they were +indexed. Now, changes to the percolator query are visible in near-real time, +as soon as the index has been refreshed. This change was required because, in +indices created from 5.0 onwards, the terms used in a percolator query are +automatically indexed to allow for more efficient query selection during +percolation. + +==== Percolator mapping + +The percolate API can no longer accept documents that reference fields that +don't already exist in the mapping. + +The percolate API no longer modifies the mappings. Before the percolate API +could be used to dynamically introduce new fields to the mappings based on the +fields in the document being percolated. This no longer works, because these +unmapped fields are not persisted in the mapping. + +==== Percolator documents returned by search + +Documents with the `.percolate` type were previously excluded from the search +response, unless the `.percolate` type was specified explicitly in the search +request. Now, percolator documents are treated in the same way as any other +document and are returned by search requests. + +==== Percolator `size` default + +The percolator by default sets the `size` option to `10` whereas before this +was unlimited. + +==== Percolate API + +When percolating an existing document then specifying a document in the source +of the percolate request is not allowed any more. + + + diff --git a/docs/reference/migration/migrate_5_0/plugins.asciidoc b/docs/reference/migration/migrate_5_0/plugins.asciidoc new file mode 100644 index 000000000000..10268887417d --- /dev/null +++ b/docs/reference/migration/migrate_5_0/plugins.asciidoc @@ -0,0 +1,99 @@ +[[breaking_50_plugins]] +=== Plugin changes + +The command `bin/plugin` has been renamed to `bin/elasticsearch-plugin`. The +structure of the plugin ZIP archive has changed. All the plugin files must be +contained in a top-level directory called `elasticsearch`. If you use the +gradle build, this structure is automatically generated. + +==== Site plugins removed + +Site plugins have been removed. Site plugins should be reimplemented as Kibana +plugins. + +==== Multicast plugin removed + +Multicast has been removed. Use unicast discovery, or one of the cloud +discovery plugins. + +==== Plugins with custom query implementations + +Plugins implementing custom queries need to implement the `fromXContent(QueryParseContext)` method in their +`QueryParser` subclass rather than `parse`. This method will take care of parsing the query from `XContent` format +into an intermediate query representation that can be streamed between the nodes in binary format, effectively the +query object used in the java api. Also, the query parser needs to implement the `getBuilderPrototype` method that +returns a prototype of the `NamedWriteable` query, which allows to deserialize an incoming query by calling +`readFrom(StreamInput)` against it, which will create a new object, see usages of `Writeable`. The `QueryParser` +also needs to declare the generic type of the query that it supports and it's able to parse. +The query object can then transform itself into a lucene query through the new `toQuery(QueryShardContext)` method, +which returns a lucene query to be executed on the data node. + +Similarly, plugins implementing custom score functions need to implement the `fromXContent(QueryParseContext)` +method in their `ScoreFunctionParser` subclass rather than `parse`. This method will take care of parsing +the function from `XContent` format into an intermediate function representation that can be streamed between +the nodes in binary format, effectively the function object used in the java api. Also, the query parser needs +to implement the `getBuilderPrototype` method that returns a prototype of the `NamedWriteable` function, which +allows to deserialize an incoming function by calling `readFrom(StreamInput)` against it, which will create a +new object, see usages of `Writeable`. The `ScoreFunctionParser` also needs to declare the generic type of the +function that it supports and it's able to parse. The function object can then transform itself into a lucene +function through the new `toFunction(QueryShardContext)` method, which returns a lucene function to be executed +on the data node. + +==== Cloud AWS plugin changes + +Cloud AWS plugin has been split in two plugins: + +* {plugins}/discovery-ec2.html[Discovery EC2 plugin] +* {plugins}/repository-s3.html[Repository S3 plugin] + +Proxy settings for both plugins have been renamed: + +* from `cloud.aws.proxy_host` to `cloud.aws.proxy.host` +* from `cloud.aws.ec2.proxy_host` to `cloud.aws.ec2.proxy.host` +* from `cloud.aws.s3.proxy_host` to `cloud.aws.s3.proxy.host` +* from `cloud.aws.proxy_port` to `cloud.aws.proxy.port` +* from `cloud.aws.ec2.proxy_port` to `cloud.aws.ec2.proxy.port` +* from `cloud.aws.s3.proxy_port` to `cloud.aws.s3.proxy.port` + +==== Cloud Azure plugin changes + +Cloud Azure plugin has been split in three plugins: + +* {plugins}/discovery-azure.html[Discovery Azure plugin] +* {plugins}/repository-azure.html[Repository Azure plugin] +* {plugins}/store-smb.html[Store SMB plugin] + +If you were using the `cloud-azure` plugin for snapshot and restore, you had in `elasticsearch.yml`: + +[source,yaml] +----- +cloud: + azure: + storage: + account: your_azure_storage_account + key: your_azure_storage_key +----- + +You need to give a unique id to the storage details now as you can define multiple storage accounts: + +[source,yaml] +----- +cloud: + azure: + storage: + my_account: + account: your_azure_storage_account + key: your_azure_storage_key +----- + + +==== Cloud GCE plugin changes + +Cloud GCE plugin has been renamed to {plugins}/discovery-gce.html[Discovery GCE plugin]. + + +==== Mapper Attachments plugin deprecated + +Mapper attachments has been deprecated. Users should use now the {plugins}/ingest-attachment.html[`ingest-attachment`] +plugin. + diff --git a/docs/reference/migration/migrate_5_0/rest.asciidoc b/docs/reference/migration/migrate_5_0/rest.asciidoc new file mode 100644 index 000000000000..590a097f021d --- /dev/null +++ b/docs/reference/migration/migrate_5_0/rest.asciidoc @@ -0,0 +1,17 @@ + +[[breaking_50_rest_api_changes]] +=== REST API changes + +==== id values longer than 512 bytes are rejected + +When specifying an `_id` value longer than 512 bytes, the request will be +rejected. + +==== `/_optimize` endpoint removed + +The deprecated `/_optimize` endpoint has been removed. The `/_forcemerge` +endpoint should be used in lieu of optimize. + +The `GET` HTTP verb for `/_forcemerge` is no longer supported, please use the +`POST` HTTP verb. + diff --git a/docs/reference/migration/migrate_5_0/search.asciidoc b/docs/reference/migration/migrate_5_0/search.asciidoc new file mode 100644 index 000000000000..48807bf187ac --- /dev/null +++ b/docs/reference/migration/migrate_5_0/search.asciidoc @@ -0,0 +1,141 @@ +[[breaking_50_search_changes]] +=== Search and Query DSL changes + +==== `search_type` + +===== `search_type=count` removed + +The `count` search type was deprecated since version 2.0.0 and is now removed. +In order to get the same benefits, you just need to set the value of the `size` +parameter to `0`. + +For instance, the following request: + +[source,sh] +--------------- +GET /my_index/_search?search_type=count +{ + "aggs": { + "my_terms": { + "terms": { + "field": "foo" + } + } + } +} +--------------- + +can be replaced with: + +[source,sh] +--------------- +GET /my_index/_search +{ + "size": 0, + "aggs": { + "my_terms": { + "terms": { + "field": "foo" + } + } + } +} +--------------- + +===== `search_type=scan` removed + +The `scan` search type was deprecated since version 2.1.0 and is now removed. +All benefits from this search type can now be achieved by doing a scroll +request that sorts documents in `_doc` order, for instance: + +[source,sh] +--------------- +GET /my_index/_search?scroll=2m +{ + "sort": [ + "_doc" + ] +} +--------------- + +Scroll requests sorted by `_doc` have been optimized to more efficiently resume +from where the previous request stopped, so this will have the same performance +characteristics as the former `scan` search type. + +==== `fields` parameter + +The `fields` parameter used to try to retrieve field values from stored +fields, and fall back to extracting from the `_source` if a field is not +marked as stored. Now, the `fields` parameter will only return stored fields +-- it will no longer extract values from the `_source`. + +==== search-exists API removed + +The search exists api has been removed in favour of using the search api with +`size` set to `0` and `terminate_after` set to `1`. + + +==== Deprecated queries removed + +The following deprecated queries have been removed: + +`filtered`:: Use `bool` query instead, which supports `filter` clauses too. +`and`:: Use `must` clauses in a `bool` query instead. +`or`:: Use `should` clauses in a `bool` query instead. +`limit`:: Use the `terminate_after` parameter instead. +`fquery`:: Is obsolete after filters and queries have been merged. +`query`:: Is obsolete after filters and queries have been merged. +`query_binary`:: Was undocumented and has been removed. +`filter_binary`:: Was undocumented and has been removed. + + +==== Changes to queries + +* Removed support for the deprecated `min_similarity` parameter in `fuzzy + query`, in favour of `fuzziness`. + +* Removed support for the deprecated `fuzzy_min_sim` parameter in + `query_string` query, in favour of `fuzziness`. + +* Removed support for the deprecated `edit_distance` parameter in completion + suggester, in favour of `fuzziness`. + +* Removed support for the deprecated `filter` and `no_match_filter` fields in `indices` query, +in favour of `query` and `no_match_query`. + +* Removed support for the deprecated `filter` fields in `nested` query, in favour of `query`. + +* Removed support for the deprecated `minimum_should_match` and + `disable_coord` in `terms` query, use `bool` query instead. Also removed + support for the deprecated `execution` parameter. + +* Removed support for the top level `filter` element in `function_score` query, replaced by `query`. + +* The `collect_payloads` parameter of the `span_near` query has been deprecated. Payloads will be loaded when needed. + +* The `score_type` parameter to the `has_child` and `has_parent` queries has been removed in favour of `score_mode`. + Also, the `sum` score mode has been removed in favour of the `total` mode. + +* When the `max_children` parameter was set to `0` on the `has_child` query + then there was no upper limit on how many child documents were allowed to + match. Now, `0` really means that zero child documents are allowed. If no + upper limit is needed then the `max_children` parameter shouldn't be specified + at all. + + +==== Top level `filter` parameter + +Removed support for the deprecated top level `filter` in the search api, +replaced by `post_filter`. + +==== Highlighters + +Removed support for multiple highlighter names, the only supported ones are: +`plain`, `fvh` and `postings`. + +==== Term vectors API + +The term vectors APIs no longer persist unmapped fields in the mappings. + +The `dfs` parameter to the term vectors API has been removed completely. Term +vectors don't support distributed document frequencies anymore. diff --git a/docs/reference/migration/migrate_5_0/settings.asciidoc b/docs/reference/migration/migrate_5_0/settings.asciidoc new file mode 100644 index 000000000000..002d6cf05dfc --- /dev/null +++ b/docs/reference/migration/migrate_5_0/settings.asciidoc @@ -0,0 +1,174 @@ +[[breaking_50_settings_changes]] +=== Settings changes + +From Elasticsearch 5.0 on all settings are validated before they are applied. +Node level and default index level settings are validated on node startup, +dynamic cluster and index setting are validated before they are updated/added +to the cluster state. + +Every setting must be a *known* setting. All settings must have been +registered with the node or transport client they are used with. This implies +that plugins that define custom settings must register all of their settings +during plugin loading using the `SettingsModule#registerSettings(Setting)` +method. + +==== Node settings + +The `name` setting has been removed and is replaced by `node.name`. Usage of +`-Dname=some_node_name` is not supported anymore. + +==== Transport Settings + +All settings with a `netty` infix have been replaced by their already existing +`transport` synonyms. For instance `transport.netty.bind_host` is no longer +supported and should be replaced by the superseding setting +`transport.bind_host`. + +==== Script mode settings + +Previously script mode settings (e.g., "script.inline: true", +"script.engine.groovy.inline.aggs: false", etc.) accepted the values +`on`, `true`, `1`, and `yes` for enabling a scripting mode, and the +values `off`, `false`, `0`, and `no` for disabling a scripting mode. +The variants `on`, `1`, and `yes ` for enabling and `off`, `0`, +and `no` for disabling are no longer supported. + + +==== Security manager settings + +The option to disable the security manager `security.manager.enabled` has been +removed. In order to grant special permissions to elasticsearch users must +edit the local Java Security Policy. + +==== Network settings + +The `_non_loopback_` value for settings like `network.host` would arbitrarily +pick the first interface not marked as loopback. Instead, specify by address +scope (e.g. `_local_,_site_` for all loopback and private network addresses) +or by explicit interface names, hostnames, or addresses. + +==== Forbid changing of thread pool types + +Previously, <> could be dynamically +adjusted. The thread pool type effectively controls the backing queue for the +thread pool and modifying this is an expert setting with minimal practical +benefits and high risk of being misused. The ability to change the thread pool +type for any thread pool has been removed. It is still possible to adjust +relevant thread pool parameters for each of the thread pools (e.g., depending +on the thread pool type, `keep_alive`, `queue_size`, etc.). + + +==== Analysis settings + +The `index.analysis.analyzer.default_index` analyzer is not supported anymore. +If you wish to change the analyzer to use for indexing, change the +`index.analysis.analyzer.default` analyzer instead. + +==== Ping timeout settings + +Previously, there were three settings for the ping timeout: +`discovery.zen.initial_ping_timeout`, `discovery.zen.ping.timeout` and +`discovery.zen.ping_timeout`. The former two have been removed and the only +setting key for the ping timeout is now `discovery.zen.ping_timeout`. The +default value for ping timeouts remains at three seconds. + +==== Recovery settings + +Recovery settings deprecated in 1.x have been removed: + + * `index.shard.recovery.translog_size` is superseded by `indices.recovery.translog_size` + * `index.shard.recovery.translog_ops` is superseded by `indices.recovery.translog_ops` + * `index.shard.recovery.file_chunk_size` is superseded by `indices.recovery.file_chunk_size` + * `index.shard.recovery.concurrent_streams` is superseded by `indices.recovery.concurrent_streams` + * `index.shard.recovery.concurrent_small_file_streams` is superseded by `indices.recovery.concurrent_small_file_streams` + * `indices.recovery.max_size_per_sec` is superseded by `indices.recovery.max_bytes_per_sec` + +If you are using any of these settings please take the time to review their +purpose. All of the settings above are considered _expert settings_ and should +only be used if absolutely necessary. If you have set any of the above setting +as persistent cluster settings please use the settings update API and set +their superseded keys accordingly. + +The following settings have been removed without replacement + + * `indices.recovery.concurrent_small_file_streams` - recoveries are now single threaded. The number of concurrent outgoing recoveries are throttled via allocation deciders + * `indices.recovery.concurrent_file_streams` - recoveries are now single threaded. The number of concurrent outgoing recoveries are throttled via allocation deciders + +==== Translog settings + +The `index.translog.flush_threshold_ops` setting is not supported anymore. In +order to control flushes based on the transaction log growth use +`index.translog.flush_threshold_size` instead. + +Changing the translog type with `index.translog.fs.type` is not supported +anymore, the `buffered` implementation is now the only available option and +uses a fixed `8kb` buffer. + +The translog by default is fsynced after every `index`, `create`, `update`, +`delete`, or `bulk` request. The ability to fsync on every operation is not +necessary anymore. In fact, it can be a performance bottleneck and it's trappy +since it enabled by a special value set on `index.translog.sync_interval`. +Now, `index.translog.sync_interval` doesn't accept a value less than `100ms` +which prevents fsyncing too often if async durability is enabled. The special +value `0` is no longer supported. + +==== Request Cache Settings + +The deprecated settings `index.cache.query.enable` and +`indices.cache.query.size` have been removed and are replaced with +`index.requests.cache.enable` and `indices.requests.cache.size` respectively. + +`indices.requests.cache.clean_interval has been replaced with +`indices.cache.clean_interval and is no longer supported. + +==== Field Data Cache Settings + +The `indices.fielddata.cache.clean_interval` setting has been replaced with +`indices.cache.clean_interval`. + +==== Allocation settings + +The `cluster.routing.allocation.concurrent_recoveries` setting has been +replaced with `cluster.routing.allocation.node_concurrent_recoveries`. + +==== Similarity settings + +The 'default' similarity has been renamed to 'classic'. + +==== Indexing settings + +The `indices.memory.min_shard_index_buffer_size` and +`indices.memory.max_shard_index_buffer_size` have been removed as +Elasticsearch now allows any one shard to use amount of heap as long as the +total indexing buffer heap used across all shards is below the node's +`indices.memory.index_buffer_size` (defaults to 10% of the JVM heap). + +==== Removed es.max-open-files + +Setting the system property es.max-open-files to true to get +Elasticsearch to print the number of maximum open files for the +Elasticsearch process has been removed. This same information can be +obtained from the <> API, and a warning is logged +on startup if it is set too low. + +==== Removed es.netty.gathering + +Disabling Netty from using NIO gathering could be done via the escape +hatch of setting the system property "es.netty.gathering" to "false". +Time has proven enabling gathering by default is a non-issue and this +non-documented setting has been removed. + +==== Removed es.useLinkedTransferQueue + +The system property `es.useLinkedTransferQueue` could be used to +control the queue implementation used in the cluster service and the +handling of ping responses during discovery. This was an undocumented +setting and has been removed. + +==== Cache concurrency level settings removed + +Two cache concurrency level settings +`indices.requests.cache.concurrency_level` and +`indices.fielddata.cache.concurrency_level` because they no longer apply to +the cache implementation used for the request cache and the field data cache. + From 5f48b9c86a988ac37b7170e6075d0b4d6ee18a84 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Sun, 13 Mar 2016 21:18:44 +0100 Subject: [PATCH 203/320] Removed breaking changes docs for < 5.0 --- docs/reference/migration/migrate_1_0.asciidoc | 372 --------------- docs/reference/migration/migrate_1_4.asciidoc | 92 ---- docs/reference/migration/migrate_1_6.asciidoc | 17 - docs/reference/migration/migrate_2_0.asciidoc | 73 --- .../migration/migrate_2_0/aggs.asciidoc | 70 --- .../migration/migrate_2_0/crud.asciidoc | 130 ------ .../migration/migrate_2_0/index_apis.asciidoc | 43 -- .../migration/migrate_2_0/java.asciidoc | 147 ------ .../migration/migrate_2_0/mapping.asciidoc | 439 ------------------ .../migration/migrate_2_0/network.asciidoc | 39 -- .../migration/migrate_2_0/packaging.asciidoc | 84 ---- .../migrate_2_0/parent_child.asciidoc | 43 -- .../migration/migrate_2_0/query_dsl.asciidoc | 189 -------- .../migration/migrate_2_0/removals.asciidoc | 100 ---- .../migration/migrate_2_0/scripting.asciidoc | 103 ---- .../migration/migrate_2_0/search.asciidoc | 122 ----- .../migration/migrate_2_0/settings.asciidoc | 204 -------- .../migrate_2_0/snapshot_restore.asciidoc | 38 -- .../migration/migrate_2_0/stats.asciidoc | 52 --- .../migration/migrate_2_0/striping.asciidoc | 21 - docs/reference/migration/migrate_2_1.asciidoc | 87 ---- docs/reference/migration/migrate_2_2.asciidoc | 80 ---- docs/reference/migration/migrate_2_3.asciidoc | 19 - 23 files changed, 2564 deletions(-) delete mode 100644 docs/reference/migration/migrate_1_0.asciidoc delete mode 100644 docs/reference/migration/migrate_1_4.asciidoc delete mode 100644 docs/reference/migration/migrate_1_6.asciidoc delete mode 100644 docs/reference/migration/migrate_2_0.asciidoc delete mode 100644 docs/reference/migration/migrate_2_0/aggs.asciidoc delete mode 100644 docs/reference/migration/migrate_2_0/crud.asciidoc delete mode 100644 docs/reference/migration/migrate_2_0/index_apis.asciidoc delete mode 100644 docs/reference/migration/migrate_2_0/java.asciidoc delete mode 100644 docs/reference/migration/migrate_2_0/mapping.asciidoc delete mode 100644 docs/reference/migration/migrate_2_0/network.asciidoc delete mode 100644 docs/reference/migration/migrate_2_0/packaging.asciidoc delete mode 100644 docs/reference/migration/migrate_2_0/parent_child.asciidoc delete mode 100644 docs/reference/migration/migrate_2_0/query_dsl.asciidoc delete mode 100644 docs/reference/migration/migrate_2_0/removals.asciidoc delete mode 100644 docs/reference/migration/migrate_2_0/scripting.asciidoc delete mode 100644 docs/reference/migration/migrate_2_0/search.asciidoc delete mode 100644 docs/reference/migration/migrate_2_0/settings.asciidoc delete mode 100644 docs/reference/migration/migrate_2_0/snapshot_restore.asciidoc delete mode 100644 docs/reference/migration/migrate_2_0/stats.asciidoc delete mode 100644 docs/reference/migration/migrate_2_0/striping.asciidoc delete mode 100644 docs/reference/migration/migrate_2_1.asciidoc delete mode 100644 docs/reference/migration/migrate_2_2.asciidoc delete mode 100644 docs/reference/migration/migrate_2_3.asciidoc diff --git a/docs/reference/migration/migrate_1_0.asciidoc b/docs/reference/migration/migrate_1_0.asciidoc deleted file mode 100644 index 1e917c4a0d9c..000000000000 --- a/docs/reference/migration/migrate_1_0.asciidoc +++ /dev/null @@ -1,372 +0,0 @@ -[[breaking-changes-1.0]] -== Breaking changes in 1.0 - -This section discusses the changes that you need to be aware of when migrating -your application to Elasticsearch 1.0. - -=== System and settings - -* Elasticsearch now runs in the foreground by default. There is no more `-f` - flag on the command line. Instead, to run elasticsearch as a daemon, use - the `-d` flag: - -[source,sh] ---------------- -./bin/elasticsearch -d ---------------- - -* Command line settings can now be passed without the `-Des.` prefix, for - instance: - -[source,sh] ---------------- -./bin/elasticsearch --node.name=search_1 --cluster.name=production ---------------- - -* Elasticsearch on 64 bit Linux now uses <> by default. Make - sure that you set <> to a sufficiently high - number. The RPM and Debian packages default this value to `262144`. - -* The RPM and Debian packages no longer start Elasticsearch by default. - -* The `cluster.routing.allocation` settings (`disable_allocation`, - `disable_new_allocation` and `disable_replica_location`) have been - <>: -+ -[source,yaml] ---------------- -cluster.routing.allocation.enable: all|primaries|new_primaries|none ---------------- - -=== Stats and Info APIs - -The <>, <>, -<> and <> -APIs have all been changed to make their format more RESTful and less clumsy. - -For instance, if you just want the `nodes` section of the `cluster_state`, -instead of: - -[source,sh] ---------------- -GET /_cluster/state?filter_metadata&filter_routing_table&filter_blocks ---------------- - -you now use: - -[source,sh] ---------------- -GET /_cluster/state/nodes ---------------- - -Similarly for the `nodes_stats` API, if you want the `transport` and `http` -metrics only, instead of: - -[source,sh] ---------------- -GET /_nodes/stats?clear&transport&http ---------------- - -you now use: - -[source,sh] ---------------- -GET /_nodes/stats/transport,http ---------------- - -See the links above for full details. - - -=== Indices APIs - -The `mapping`, `alias`, `settings`, and `warmer` index APIs are all similar -but there are subtle differences in the order of the URL and the response -body. For instance, adding a mapping and a warmer look slightly different: - -[source,sh] ---------------- -PUT /{index}/{type}/_mapping -PUT /{index}/_warmer/{name} ---------------- - -These URLs have been unified as: - -[source,sh] ---------------- -PUT /{indices}/_mapping/{type} -PUT /{indices}/_alias/{name} -PUT /{indices}/_warmer/{name} - -GET /{indices}/_mapping/{types} -GET /{indices}/_alias/{names} -GET /{indices}/_settings/{names} -GET /{indices}/_warmer/{names} - -DELETE /{indices}/_mapping/{types} -DELETE /{indices}/_alias/{names} -DELETE /{indices}/_warmer/{names} ---------------- - -All of the `{indices}`, `{types}` and `{names}` parameters can be replaced by: - - * `_all`, `*` or blank (ie left out altogether), all of which mean ``all'' - * wildcards like `test*` - * comma-separated lists: `index_1,test_*` - -The only exception is `DELETE` which doesn't accept blank (missing) -parameters. If you want to delete something, you should be specific. - -Similarly, the return values for `GET` have been unified with the following -rules: - -* Only return values that exist. If you try to `GET` a mapping which doesn't - exist, then the result will be an empty object: `{}`. We no longer throw a - `404` if the requested mapping/warmer/alias/setting doesn't exist. - -* The response format always has the index name, then the section, then the - element name, for instance: -+ -[source,js] ---------------- -{ - "my_index": { - "mappings": { - "my_type": {...} - } - } -} ---------------- -+ -This is a breaking change for the `get_mapping` API. - -In the future we will also provide plural versions to allow putting multiple mappings etc in a single request. - -See <>, <>, <>, -<>, <>, -`warmers`, and <> for more details. - -=== Index request - -Previously a document could be indexed as itself, or wrapped in an outer -object which specified the `type` name: - -[source,js] ---------------- -PUT /my_index/my_type/1 -{ - "my_type": { - ... doc fields ... - } -} ---------------- - -This led to some ambiguity when a document also included a field with the same -name as the `type`. We no longer accept the outer `type` wrapper, but this -behaviour can be reenabled on an index-by-index basis with the setting: -`index.mapping.allow_type_wrapper`. - -=== Search requests - -While the `search` API takes a top-level `query` parameter, the -<>, `delete-by-query` and -<> requests expected the whole body to be a -query. These now _require_ a top-level `query` parameter: - -[source,js] ---------------- -GET /_count -{ - "query": { - "match": { - "title": "Interesting stuff" - } - } -} ---------------- - -Also, the top-level `filter` parameter in search has been renamed to -<>, to indicate that it should not -be used as the primary way to filter search results (use a -<> instead), but only to filter -results AFTER aggregations have been calculated. - -This example counts the top colors in all matching docs, but only returns docs -with color `red`: - -[source,js] ---------------- -GET /_search -{ - "query": { - "match_all": {} - }, - "aggs": { - "colors": { - "terms": { "field": "color" } - } - }, - "post_filter": { - "term": { - "color": "red" - } - } -} ---------------- - -=== Multi-fields - -Multi-fields are dead! Long live multi-fields! Well, the field type -`multi_field` has been removed. Instead, any of the core field types -(excluding `object` and `nested`) now accept a `fields` parameter. It's the -same thing, but nicer. Instead of: - -[source,js] ---------------- -"title": { - "type": "multi_field", - "fields": { - "title": { "type": "string" }, - "raw": { "type": "string", "index": "not_analyzed" } - } -} ---------------- - -you can now write: - -[source,js] ---------------- -"title": { - "type": "string", - "fields": { - "raw": { "type": "string", "index": "not_analyzed" } - } -} ---------------- - -Existing multi-fields will be upgraded to the new format automatically. - -Also, instead of having to use the arcane `path` and `index_name` parameters -in order to index multiple fields into a single ``custom +_all+ field'', you -can now use the <>. - -=== Stopwords - -Previously, the <> and -<> analyzers used the list of English stopwords -by default, which caused some hard to debug indexing issues. Now they are set to -use the empty stopwords list (ie `_none_`) instead. - -=== Dates without years - -When dates are specified without a year, for example: `Dec 15 10:00:00` they -are treated as dates in 2000 during indexing and range searches... except for -the upper included bound `lte` where they were treated as dates in 1970! Now, -all https://github.com/elastic/elasticsearch/issues/4451[dates without years] -use `1970` as the default. - -=== Parameters - -* Geo queries used to use `miles` as the default unit. And we - http://en.wikipedia.org/wiki/Mars_Climate_Orbiter[all know what - happened at NASA] because of that decision. The new default unit is - https://github.com/elastic/elasticsearch/issues/4515[`meters`]. - -* For all queries that support _fuzziness_, the `min_similarity`, `fuzziness` - and `edit_distance` parameters have been unified as the single parameter - `fuzziness`. See <> for details of accepted values. - -* The `ignore_missing` parameter has been replaced by the `expand_wildcards`, - `ignore_unavailable` and `allow_no_indices` parameters, all of which have - sensible defaults. See <> for more. - -* An index name (or pattern) is now required for destructive operations like - deleting indices: -+ -[source,sh] ---------------- -# v0.90 - delete all indices: -DELETE / - -# v1.0 - delete all indices: -DELETE /_all -DELETE /* ---------------- -+ -Setting `action.destructive_requires_name` to `true` provides further safety -by disabling wildcard expansion on destructive actions. - -=== Return values - -* The `ok` return value has been removed from all response bodies as it added - no useful information. - -* The `found`, `not_found` and `exists` return values have been unified as - `found` on all relevant APIs. - -* Field values, in response to the <> - parameter, are now always returned as arrays. A field could have single or - multiple values, which meant that sometimes they were returned as scalars - and sometimes as arrays. By always returning arrays, this simplifies user - code. The only exception to this rule is when `fields` is used to retrieve - metadata like the `routing` value, which are always singular. Metadata - fields are always returned as scalars. -+ -The `fields` parameter is intended to be used for retrieving stored fields, -rather than for fields extracted from the `_source`. That means that it can no -longer be used to return whole objects and it no longer accepts the -`_source.fieldname` format. For these you should use the -<> -parameters instead. - -* Settings, like `index.analysis.analyzer.default` are now returned as proper - nested JSON objects, which makes them easier to work with programmatically: -+ -[source,js] ---------------- -{ - "index": { - "analysis": { - "analyzer": { - "default": xxx - } - } - } -} ---------------- -+ -You can choose to return them in flattened format by passing `?flat_settings` -in the query string. - -* The <> API no longer supports the text response - format, but does support JSON and YAML. - -=== Deprecations - -* The `text` query has been removed. Use the - <> query instead. - -* The `field` query has been removed. Use the - <> query instead. - -* Per-document boosting with the `_boost` field has - been removed. You can use the - <> instead. - -* The `path` parameter in mappings has been deprecated. Use the - <> parameter instead. - -* The `custom_score` and `custom_boost_score` is no longer supported. You can - use <> instead. - -=== Percolator - -The percolator has been redesigned and because of this the dedicated `_percolator` index is no longer used by the percolator, -but instead the percolator works with a dedicated `.percolator` type. Read the http://www.elastic.co/blog/percolator-redesign-blog-post[redesigned percolator] -blog post for the reasons why the percolator has been redesigned. - -Elasticsearch will *not* delete the `_percolator` index when upgrading, only the percolate api will not use the queries -stored in the `_percolator` index. In order to use the already stored queries, you can just re-index the queries from the -`_percolator` index into any index under the reserved `.percolator` type. The format in which the percolate queries -were stored has *not* been changed. So a simple script that does a scan search to retrieve all the percolator queries -and then does a bulk request into another index should be sufficient. diff --git a/docs/reference/migration/migrate_1_4.asciidoc b/docs/reference/migration/migrate_1_4.asciidoc deleted file mode 100644 index c20504bbddfb..000000000000 --- a/docs/reference/migration/migrate_1_4.asciidoc +++ /dev/null @@ -1,92 +0,0 @@ -[[breaking-changes-1.4]] -== Breaking changes in 1.4 - -This section discusses the changes that you need to be aware of when migrating -your application from Elasticsearch 1.x to Elasticsearch 1.4. - -[float] -=== Percolator - -In indices created with version `1.4.0` or later, percolation queries can only -refer to fields that already exist in the mappings in that index. There are -two ways to make sure that a field mapping exist: - -* Add or update a mapping via the <> or - <> apis. -* Percolate a document before registering a query. Percolating a document can - add field mappings dynamically, in the same way as happens when indexing a - document. - -[float] -=== Aliases - -<> can include <> which -are automatically applied to any search performed via the alias. -<> created with version `1.4.0` or later can only -refer to field names which exist in the mappings of the index (or indices) -pointed to by the alias. - -Add or update a mapping via the <> or -<> apis. - -[float] -=== Indices APIs - -The get warmer api will return a section for `warmers` even if there are -no warmers. This ensures that the following two examples are equivalent: - -[source,js] --------------------------------------------------- -curl -XGET 'http://localhost:9200/_all/_warmers' - -curl -XGET 'http://localhost:9200/_warmers' --------------------------------------------------- - -The <> will return a section for `aliases` even if there are -no aliases. This ensures that the following two examples are equivalent: - -[source,js] --------------------------------------------------- -curl -XGET 'http://localhost:9200/_all/_aliases' - -curl -XGET 'http://localhost:9200/_aliases' --------------------------------------------------- - -The <> will return a section for `mappings` even if there are -no mappings. This ensures that the following two examples are equivalent: - -[source,js] --------------------------------------------------- -curl -XGET 'http://localhost:9200/_all/_mappings' - -curl -XGET 'http://localhost:9200/_mappings' --------------------------------------------------- - -[float] -=== Bulk UDP - -Bulk UDP has been deprecated and will be removed in 2.0. -You should use <> instead. -Each cluster must have an elected master node in order to be fully operational. Once a node loses its elected master -node it will reject some or all operations. - -[float] -=== Zen discovery - -On versions before `1.4.0.Beta1` all operations are rejected when a node loses its elected master. From `1.4.0.Beta1` -only write operations will be rejected by default. Read operations will still be served based on the information available -to the node, which may result in being partial and possibly also stale. If the default is undesired then the -pre `1.4.0.Beta1` behaviour can be enabled, see: <> - -[float] -=== More Like This Field - -The More Like This Field query has been deprecated in favor of the <> -restrained set to a specific `field`. It will be removed in 2.0. - -[float] -=== MVEL is deprecated - -Groovy is the new default scripting language in Elasticsearch, and is enabled in `sandbox` mode -by default. MVEL has been removed from core, but is available as a plugin: -https://github.com/elastic/elasticsearch-lang-mvel diff --git a/docs/reference/migration/migrate_1_6.asciidoc b/docs/reference/migration/migrate_1_6.asciidoc deleted file mode 100644 index 9540d3b67598..000000000000 --- a/docs/reference/migration/migrate_1_6.asciidoc +++ /dev/null @@ -1,17 +0,0 @@ -[[breaking-changes-1.6]] -== Breaking changes in 1.6 - -This section discusses the changes that you need to be aware of when migrating -your application from Elasticsearch 1.x to Elasticsearch 1.6. - -[float] -=== More Like This API - -The More Like This API query has been deprecated and will be removed in 2.0. Instead use the <>. - -[float] -=== `top_children` query - -The `top_children` query has been deprecated and will be removed in 2.0. Instead the `has_child` query should be used. -The `top_children` query isn't always faster than the `has_child` query and the `top_children` query is often inaccurate. -The total hits and any aggregations in the same search request will likely be off. diff --git a/docs/reference/migration/migrate_2_0.asciidoc b/docs/reference/migration/migrate_2_0.asciidoc deleted file mode 100644 index adf12e7da5c5..000000000000 --- a/docs/reference/migration/migrate_2_0.asciidoc +++ /dev/null @@ -1,73 +0,0 @@ -[[breaking-changes-2.0]] -== Breaking changes in 2.0 - -This section discusses the changes that you need to be aware of when migrating -your application to Elasticsearch 2.0. - -[float] -=== Indices created before 0.90 - -Elasticsearch 2.0 can read indices created in version 0.90 and above. If any -of your indices were created before 0.90 you will need to upgrade to the -latest 1.x version of Elasticsearch first, in order to upgrade your indices or -to delete the old indices. Elasticsearch will not start in the presence of old -indices. - -[float] -=== Elasticsearch migration plugin - -We have provided the https://github.com/elastic/elasticsearch-migration[Elasticsearch migration plugin] -to help you detect any issues that you may have when upgrading to -Elasticsearch 2.0. Please install and run the plugin *before* upgrading. - -[float] -=== Also see - -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> - -include::migrate_2_0/removals.asciidoc[] - -include::migrate_2_0/network.asciidoc[] - -include::migrate_2_0/striping.asciidoc[] - -include::migrate_2_0/mapping.asciidoc[] - -include::migrate_2_0/crud.asciidoc[] - -include::migrate_2_0/query_dsl.asciidoc[] - -include::migrate_2_0/search.asciidoc[] - -include::migrate_2_0/aggs.asciidoc[] - -include::migrate_2_0/parent_child.asciidoc[] - -include::migrate_2_0/scripting.asciidoc[] - -include::migrate_2_0/index_apis.asciidoc[] - -include::migrate_2_0/snapshot_restore.asciidoc[] - -include::migrate_2_0/packaging.asciidoc[] - -include::migrate_2_0/settings.asciidoc[] - -include::migrate_2_0/stats.asciidoc[] - -include::migrate_2_0/java.asciidoc[] diff --git a/docs/reference/migration/migrate_2_0/aggs.asciidoc b/docs/reference/migration/migrate_2_0/aggs.asciidoc deleted file mode 100644 index 1351b4cb4a39..000000000000 --- a/docs/reference/migration/migrate_2_0/aggs.asciidoc +++ /dev/null @@ -1,70 +0,0 @@ -[[breaking_20_aggregation_changes]] -=== Aggregation changes - -==== Min doc count defaults to zero - -Both the `histogram` and `date_histogram` aggregations now have a default -`min_doc_count` of `0` instead of `1`. - -==== Timezone for date field - -Specifying the `time_zone` parameter in queries or aggregations on fields of -type `date` must now be either an ISO 8601 UTC offset, or a timezone id. For -example, the value `+1:00` must now be written as `+01:00`. - -==== Time zones and offsets - -The `histogram` and the `date_histogram` aggregation now support a simplified -`offset` option that replaces the previous `pre_offset` and `post_offset` -rounding options. Instead of having to specify two separate offset shifts of -the underlying buckets, the `offset` option moves the bucket boundaries in -positive or negative direction depending on its argument. - -The `date_histogram` options for `pre_zone` and `post_zone` are replaced by -the `time_zone` option. The behavior of `time_zone` is equivalent to the -former `pre_zone` option. Setting `time_zone` to a value like "+01:00" now -will lead to the bucket calculations being applied in the specified time zone. -The `key` is returned as the timestamp in UTC, but the `key_as_string` is -returned in the time zone specified. - -In addition to this, the `pre_zone_adjust_large_interval` is removed because -we now always return dates and bucket keys in UTC. - -==== Including/excluding terms - -`include`/`exclude` filtering on the `terms` aggregation now uses the same -syntax as <> instead of the Java regular -expression syntax. While simple regexps should still work, more complex ones -might need some rewriting. Also, the `flags` parameter is no longer supported. - -==== Boolean fields - -Aggregations on `boolean` fields will now return `0` and `1` as keys, and -`"true"` and `"false"` as string keys. See <> for more -information. - - -==== Java aggregation classes - -The `date_histogram` aggregation now returns a `Histogram` object in the -response, and the `DateHistogram` class has been removed. Similarly the -`date_range`, `ipv4_range`, and `geo_distance` aggregations all return a -`Range` object in the response, and the `IPV4Range`, `DateRange`, and -`GeoDistance` classes have been removed. - -The motivation for this is to have a single response API for the Range and -Histogram aggregations regardless of the type of data being queried. To -support this some changes were made in the `MultiBucketAggregation` interface -which applies to all bucket aggregations: - -* The `getKey()` method now returns `Object` instead of `String`. The actual - object type returned depends on the type of aggregation requested (e.g. the - `date_histogram` will return a `DateTime` object for this method whereas a - `histogram` will return a `Number`). -* A `getKeyAsString()` method has been added to return the String - representation of the key. -* All other `getKeyAsX()` methods have been removed. -* The `getBucketAsKey(String)` methods have been removed on all aggregations - except the `filters` and `terms` aggregations. - - diff --git a/docs/reference/migration/migrate_2_0/crud.asciidoc b/docs/reference/migration/migrate_2_0/crud.asciidoc deleted file mode 100644 index ef3ba93e67ec..000000000000 --- a/docs/reference/migration/migrate_2_0/crud.asciidoc +++ /dev/null @@ -1,130 +0,0 @@ -[[breaking_20_crud_and_routing_changes]] -=== CRUD and routing changes - -==== Explicit custom routing - -Custom `routing` values can no longer be extracted from the document body, but -must be specified explicitly as part of the query string, or in the metadata -line in the <> API. See <> for an -example. - -==== Routing hash function - -The default hash function that is used for routing has been changed from -`djb2` to `murmur3`. This change should be transparent unless you relied on -very specific properties of `djb2`. This will help ensure a better balance of -the document counts between shards. - -In addition, the following routing-related node settings have been deprecated: - -`cluster.routing.operation.hash.type`:: - - This was an undocumented setting that allowed to configure which hash function - to use for routing. `murmur3` is now enforced on new indices. - -`cluster.routing.operation.use_type`:: - - This was an undocumented setting that allowed to take the `_type` of the - document into account when computing its shard (default: `false`). `false` is - now enforced on new indices. - -==== Delete API with custom routing - -The delete API used to be broadcast to all shards in the index which meant -that, when using custom routing, the `routing` parameter was optional. Now, -the delete request is forwarded only to the shard holding the document. If you -are using custom routing then you should specify the `routing` value when -deleting a document, just as is already required for the `index`, `create`, -and `update` APIs. - -To make sure that you never forget a routing value, make routing required with -the following mapping: - -[source,js] ---------------------------- -PUT my_index -{ - "mappings": { - "my_type": { - "_routing": { - "required": true - } - } - } -} ---------------------------- - -==== All stored meta-fields returned by default - -Previously, meta-fields like `_routing`, `_timestamp`, etc would only be -included in a GET request if specifically requested with the `fields` -parameter. Now, all meta-fields which have stored values will be returned by -default. Additionally, they are now returned at the top level (along with -`_index`, `_type`, and `_id`) instead of in the `fields` element. - -For instance, the following request: - -[source,sh] ---------------- -GET /my_index/my_type/1 ---------------- - -might return: - -[source,js] ---------------- -{ - "_index": "my_index", - "_type": "my_type", - "_id": "1", - "_timestamp": 10000000, <1> - "_source": { - "foo" : [ "bar" ] - } -} ---------------- -<1> The `_timestamp` is returned by default, and at the top level. - - -==== Async replication - -The `replication` parameter has been removed from all CRUD operations -(`index`, `create`, `update`, `delete`, `bulk`) as it interfered with the -<> feature. These operations are now -synchronous only and a request will only return once the changes have been -replicated to all active shards in the shard group. - -Instead, use more client processes to send more requests in parallel. - -==== Documents must be specified without a type wrapper - -Previously, the document body could be wrapped in another object with the name -of the `type`: - -[source,js] --------------------------- -PUT my_index/my_type/1 -{ - "my_type": { <1> - "text": "quick brown fox" - } -} --------------------------- -<1> This `my_type` wrapper is not part of the document itself, but represents the document type. - -This feature was deprecated before but could be reenabled with the -`mapping.allow_type_wrapper` index setting. This setting is no longer -supported. The above document should be indexed as follows: - -[source,js] --------------------------- -PUT my_index/my_type/1 -{ - "text": "quick brown fox" -} --------------------------- - -==== Term Vectors API - -Usage of `/_termvector` is deprecated in favor of `/_termvectors`. - diff --git a/docs/reference/migration/migrate_2_0/index_apis.asciidoc b/docs/reference/migration/migrate_2_0/index_apis.asciidoc deleted file mode 100644 index c177a887866c..000000000000 --- a/docs/reference/migration/migrate_2_0/index_apis.asciidoc +++ /dev/null @@ -1,43 +0,0 @@ -[[breaking_20_index_api_changes]] -=== Index API changes - -==== Index aliases - - -Fields used in alias filters no longer have to exist in the mapping at alias -creation time. Previously, alias filters were parsed at alias creation time -and the parsed form was cached in memory. Now, alias filters are parsed at -request time and the fields in filters are resolved from the current mapping. - -This also means that index aliases now support `has_parent` and `has_child` -queries. - -The <> will now throw an exception if no -matching aliases are found. This change brings the defaults for this API in -line with the other Indices APIs. The <> options can be used on a -request to change this behavior. - -==== File based index templates - -Index templates can no longer be configured on disk. Use the -<> API instead. - -==== Analyze API changes - - -The Analyze API now returns the `position` of the first token as `0` -instead of `1`. - -The `prefer_local` parameter has been removed. The `_analyze` API is a light -operation and the caller shouldn't be concerned about whether it executes on -the node that receives the request or another node. - -The `text()` method on `AnalyzeRequest` now returns `String[]` instead of -`String`. - -==== Removed `id_cache` from clear cache api - -The <> API no longer supports the `id_cache` -option. Instead, use the `fielddata` option to clear the cache for the -`_parent` field. - diff --git a/docs/reference/migration/migrate_2_0/java.asciidoc b/docs/reference/migration/migrate_2_0/java.asciidoc deleted file mode 100644 index b2f5ee63e0d5..000000000000 --- a/docs/reference/migration/migrate_2_0/java.asciidoc +++ /dev/null @@ -1,147 +0,0 @@ -[[breaking_20_java_api_changes]] -=== Java API changes - -==== Transport API construction - -The `TransportClient` construction code has changed, it now uses the builder -pattern. Instead of: - -[source,java] --------------------------------------------------- -Settings settings = Settings.settingsBuilder() - .put("cluster.name", "myClusterName").build(); -Client client = new TransportClient(settings); --------------------------------------------------- - -Use the following: - -[source,java] --------------------------------------------------- -Settings settings = Settings.settingsBuilder() - .put("cluster.name", "myClusterName").build(); -Client client = TransportClient.builder().settings(settings).build(); --------------------------------------------------- - -The transport client also no longer supports loading settings from config files. -If you have a config file, you can load it into settings yourself before -constructing the transport client: - -[source,java] --------------------------------------------------- -Settings settings = Settings.settingsBuilder() - .loadFromPath(pathToYourSettingsFile).build(); -Client client = TransportClient.builder().settings(settings).build(); --------------------------------------------------- - -==== Exception are only thrown on total failure - -Previously, many APIs would throw an exception if any shard failed to execute -the request. Now the exception is only thrown if all shards fail the request. -The responses for these APIs will always have a `getShardFailures` method that -you can and should check for failures. - - -==== IndexMissingException removed. - -Use `IndexNotFoundException` instead. - - -==== Automatically thread client listeners - -Previously, the user had to set request listener threads to `true` when on the -client side in order not to block IO threads on heavy operations. This proved -to be very trappy for users, and ended up creating problems that are very hard -to debug. - -In 2.0, Elasticsearch automatically threads listeners that are used from the -client when the client is a node client or a transport client. Threading can -no longer be manually set. - - -==== Query/filter refactoring - -`org.elasticsearch.index.queries.FilterBuilders` has been removed as part of the merge of -queries and filters. These filters are now available in `QueryBuilders` with the same name. -All methods that used to accept a `FilterBuilder` now accept a `QueryBuilder` instead. - -In addition some query builders have been removed or renamed: - -* `commonTerms(...)` renamed with `commonTermsQuery(...)` -* `queryString(...)` renamed with `queryStringQuery(...)` -* `simpleQueryString(...)` renamed with `simpleQueryStringQuery(...)` -* `textPhrase(...)` removed -* `textPhrasePrefix(...)` removed -* `textPhrasePrefixQuery(...)` removed -* `filtered(...)` removed. Use `filteredQuery(...)` instead. -* `inQuery(...)` removed. - -==== GetIndexRequest - -`GetIndexRequest.features()` now returns an array of Feature Enums instead of an array of String values. - -The following deprecated methods have been removed: - -* `GetIndexRequest.addFeatures(String[])` - Use - `GetIndexRequest.addFeatures(Feature[])` instead - -* `GetIndexRequest.features(String[])` - Use - `GetIndexRequest.features(Feature[])` instead. - -* `GetIndexRequestBuilder.addFeatures(String[])` - Use - `GetIndexRequestBuilder.addFeatures(Feature[])` instead. - -* `GetIndexRequestBuilder.setFeatures(String[])` - Use - `GetIndexRequestBuilder.setFeatures(Feature[])` instead. - - -==== BytesQueryBuilder removed - -The redundant BytesQueryBuilder has been removed in favour of the -WrapperQueryBuilder internally. - -==== TermsQueryBuilder execution removed - -The `TermsQueryBuilder#execution` method has been removed as it has no effect, it is ignored by the - corresponding parser. - -==== ImmutableSettings removed - -Use `Settings.builder()` instead of `ImmutableSettings.builder()`. - -==== InetSocketTransportAddress removed - -Use `InetSocketTransportAddress(InetSocketAddress address)` instead of `InetSocketTransportAddress(String, int)`. -You can create an InetSocketAddress instance with `InetSocketAddress(String, int)`. For example: - -[source,java] ------------------------------ -new InetSocketTransportAddress(new InetSocketAddress("127.0.0.1", 0)); ------------------------------ - -==== Request Builders refactoring - -An `action` parameter has been added to various request builders: - -* Instead of `new SnapshotsStatusRequestBuilder(elasticSearchClient)` use `new SnapshotsStatusRequestBuilder(elasticSearchClient, SnapshotsStatusAction.INSTANCE)`. - -* Instead of `new CreateSnapshotRequestBuilder(elasticSearchClient)` use `new CreateSnapshotRequestBuilder(elasticSearchClient, CreateSnapshotAction.INSTANCE)`. - -* Instead of `new CreateIndexRequestBuilder(elasticSearchClient, index)` use `new CreateIndexRequestBuilder(elasticSearchClient, CreateIndexAction.INSTANCE, index)`. - -==== Shading and package relocation removed - -Elasticsearch used to shade its dependencies and to relocate packages. We no longer use shading or relocation. -You might need to change your imports to the original package names: - -* `com.google.common` was `org.elasticsearch.common` -* `com.carrotsearch.hppc` was `org.elasticsearch.common.hppc` -* `jsr166e` was `org.elasticsearch.common.util.concurrent.jsr166e` -* `com.fasterxml.jackson` was `org.elasticsearch.common.jackson` -* `org.joda.time` was `org.elasticsearch.common.joda.time` -* `org.joda.convert` was `org.elasticsearch.common.joda.convert` -* `org.jboss.netty` was `org.elasticsearch.common.netty` -* `com.ning.compress` was `org.elasticsearch.common.compress` -* `com.github.mustachejava` was `org.elasticsearch.common.mustache` -* `com.tdunning.math.stats` was `org.elasticsearch.common.stats` -* `org.apache.commons.lang` was `org.elasticsearch.common.lang` -* `org.apache.commons.cli` was `org.elasticsearch.common.cli.commons` diff --git a/docs/reference/migration/migrate_2_0/mapping.asciidoc b/docs/reference/migration/migrate_2_0/mapping.asciidoc deleted file mode 100644 index b4ee0d54412c..000000000000 --- a/docs/reference/migration/migrate_2_0/mapping.asciidoc +++ /dev/null @@ -1,439 +0,0 @@ -[[breaking_20_mapping_changes]] -=== Mapping changes - -A number of changes have been made to mappings to remove ambiguity and to -ensure that conflicting mappings cannot be created. - -One major change is that dynamically added fields must have their mapping -confirmed by the master node before indexing continues. This is to avoid a -problem where different shards in the same index dynamically add different -mappings for the same field. These conflicting mappings can silently return -incorrect results and can lead to index corruption. - -This change can make indexing slower when frequently adding many new fields. -We are looking at ways of optimising this process but we chose safety over -performance for this extreme use case. - -==== Conflicting field mappings - -Fields with the same name, in the same index, in different types, must have -the same mapping, with the exception of the <>, <>, -<>, <>, <>, and <> -parameters, which may have different settings per field. - -[source,js] ---------------- -PUT my_index -{ - "mappings": { - "type_one": { - "properties": { - "name": { <1> - "type": "string" - } - } - }, - "type_two": { - "properties": { - "name": { <1> - "type": "string", - "analyzer": "english" - } - } - } - } -} ---------------- -<1> The two `name` fields have conflicting mappings and will prevent Elasticsearch - from starting. - -Elasticsearch will not start in the presence of conflicting field mappings. -These indices must be deleted or reindexed using a new mapping. - -The `ignore_conflicts` option of the put mappings API has been removed. -Conflicts can't be ignored anymore. - -==== Fields cannot be referenced by short name - -A field can no longer be referenced using its short name. Instead, the full -path to the field is required. For instance: - -[source,js] ---------------- -PUT my_index -{ - "mappings": { - "my_type": { - "properties": { - "title": { "type": "string" }, <1> - "name": { - "properties": { - "title": { "type": "string" }, <2> - "first": { "type": "string" }, - "last": { "type": "string" } - } - } - } - } - } -} ---------------- -<1> This field is referred to as `title`. -<2> This field is referred to as `name.title`. - -Previously, the two `title` fields in the example above could have been -confused with each other when using the short name `title`. - -==== Type name prefix removed - -Previously, two fields with the same name in two different types could -sometimes be disambiguated by prepending the type name. As a side effect, it -would add a filter on the type name to the relevant query. This feature was -ambiguous -- a type name could be confused with a field name -- and didn't -work everywhere e.g. aggregations. - -Instead, fields should be specified with the full path, but without a type -name prefix. If you wish to filter by the `_type` field, either specify the -type in the URL or add an explicit filter. - -The following example query in 1.x: - -[source,js] ----------------------------- -GET my_index/_search -{ - "query": { - "match": { - "my_type.some_field": "quick brown fox" - } - } -} ----------------------------- - -would be rewritten in 2.0 as: - -[source,js] ----------------------------- -GET my_index/my_type/_search <1> -{ - "query": { - "match": { - "some_field": "quick brown fox" <2> - } - } -} ----------------------------- -<1> The type name can be specified in the URL to act as a filter. -<2> The field name should be specified without the type prefix. - -==== Field names may not contain dots - -In 1.x, it was possible to create fields with dots in their name, for -instance: - -[source,js] ----------------------------- -PUT my_index -{ - "mappings": { - "my_type": { - "properties": { - "foo.bar": { <1> - "type": "string" - }, - "foo": { - "properties": { - "bar": { <1> - "type": "string" - } - } - } - } - } - } -} ----------------------------- -<1> These two fields cannot be distinguished as both are referred to as `foo.bar`. - -You can no longer create fields with dots in the name. - -==== Type names may not start with a dot - -In 1.x, Elasticsearch would issue a warning if a type name included a dot, -e.g. `my.type`. Now that type names are no longer used to distinguish between -fields in different types, this warning has been relaxed: type names may now -contain dots, but they may not *begin* with a dot. The only exception to this -is the special `.percolator` type. - -==== Type names may not be longer than 255 characters - -Mapping type names may not be longer than 255 characters. Long type names -will continue to function on indices created before upgrade, but it will not -be possible create types with long names in new indices. - -==== Types may no longer be deleted - -In 1.x it was possible to delete a type mapping, along with all of the -documents of that type, using the delete mapping API. This is no longer -supported, because remnants of the fields in the type could remain in the -index, causing corruption later on. - -Instead, if you need to delete a type mapping, you should reindex to a new -index which does not contain the mapping. If you just need to delete the -documents that belong to that type, then use the delete-by-query plugin -instead. - -[[migration-meta-fields]] -==== Type meta-fields - -The <> associated with had configuration options -removed, to make them more reliable: - -* `_id` configuration can no longer be changed. If you need to sort, use the <> field instead. -* `_type` configuration can no longer be changed. -* `_index` configuration can no longer be changed. -* `_routing` configuration is limited to marking routing as required. -* `_field_names` configuration is limited to disabling the field. -* `_size` configuration is limited to enabling the field. -* `_timestamp` configuration is limited to enabling the field, setting format and default value. -* `_boost` has been removed. -* `_analyzer` has been removed. - -Importantly, *meta-fields can no longer be specified as part of the document -body.* Instead, they must be specified in the query string parameters. For -instance, in 1.x, the `routing` could be specified as follows: - -[source,json] ------------------------------ -PUT my_index -{ - "mappings": { - "my_type": { - "_routing": { - "path": "group" <1> - }, - "properties": { - "group": { <1> - "type": "string" - } - } - } - } -} - -PUT my_index/my_type/1 <2> -{ - "group": "foo" -} ------------------------------ -<1> This 1.x mapping tells Elasticsearch to extract the `routing` value from the `group` field in the document body. -<2> This indexing request uses a `routing` value of `foo`. - -In 2.0, the routing must be specified explicitly: - -[source,json] ------------------------------ -PUT my_index -{ - "mappings": { - "my_type": { - "_routing": { - "required": true <1> - }, - "properties": { - "group": { - "type": "string" - } - } - } - } -} - -PUT my_index/my_type/1?routing=bar <2> -{ - "group": "foo" -} ------------------------------ -<1> Routing can be marked as required to ensure it is not forgotten during indexing. -<2> This indexing request uses a `routing` value of `bar`. - -==== `_timestamp` and `_ttl` deprecated - -The `_timestamp` and `_ttl` fields are deprecated, but will remain functional -for the remainder of the 2.x series. - -Instead of the `_timestamp` field, use a normal <> field and set -the value explicitly. - -The current `_ttl` functionality will be replaced in a future version with a -new implementation of TTL, possibly with different semantics, and will not -depend on the `_timestamp` field. - -==== Analyzer mappings - -Previously, `index_analyzer` and `search_analyzer` could be set separately, -while the `analyzer` setting would set both. The `index_analyzer` setting has -been removed in favour of just using the `analyzer` setting. - -If just the `analyzer` is set, it will be used at index time and at search time. To use a different analyzer at search time, specify both the `analyzer` and a `search_analyzer`. - -The `index_analyzer`, `search_analyzer`, and `analyzer` type-level settings -have also been removed, as it is no longer possible to select fields based on -the type name. - -The `_analyzer` meta-field, which allowed setting an analyzer per document has -also been removed. It will be ignored on older indices. - -==== Date fields and Unix timestamps - -Previously, `date` fields would first try to parse values as a Unix timestamp --- milliseconds-since-the-epoch -- before trying to use their defined date -`format`. This meant that formats like `yyyyMMdd` could never work, as values -would be interpreted as timestamps. - -In 2.0, we have added two formats: `epoch_millis` and `epoch_second`. Only -date fields that use these formats will be able to parse timestamps. - -These formats cannot be used in dynamic templates, because they are -indistinguishable from long values. - -==== Default date format - -The default date format has changed from `date_optional_time` to -`strict_date_optional_time`, which expects a 4 digit year, and a 2 digit month -and day, (and optionally, 2 digit hour, minute, and second). - -A dynamically added date field, by default, includes the `epoch_millis` -format to support timestamp parsing. For instance: - -[source,js] -------------------------- -PUT my_index/my_type/1 -{ - "date_one": "2015-01-01" <1> -} -------------------------- -<1> Has `format`: `"strict_date_optional_time||epoch_millis"`. - -==== `mapping.date.round_ceil` setting - -The `mapping.date.round_ceil` setting for date math parsing has been removed. - -[[migration-bool-fields]] -==== Boolean fields - -Boolean fields used to have a string fielddata with `F` meaning `false` and `T` -meaning `true`. They have been refactored to use numeric fielddata, with `0` -for `false` and `1` for `true`. As a consequence, the format of the responses of -the following APIs changed when applied to boolean fields: `0`/`1` is returned -instead of `F`/`T`: - -* <> -* <> -* <> - -In addition, terms aggregations use a custom formatter for boolean (like for -dates and ip addresses, which are also backed by numbers) in order to return -the user-friendly representation of boolean fields: `false`/`true`: - -[source,js] ---------------- -"buckets": [ - { - "key": 0, - "key_as_string": "false", - "doc_count": 42 - }, - { - "key": 1, - "key_as_string": "true", - "doc_count": 12 - } -] ---------------- - -==== `index_name` and `path` removed - -The `index_name` setting was used to change the name of the Lucene field, -and the `path` setting was used on `object` fields to determine whether the -Lucene field should use the full path (including parent object fields), or -just the final `name`. - -These setting have been removed as their purpose is better served with the -<> parameter. - -==== Murmur3 Fields - -Fields of type `murmur3` can no longer change `doc_values` or `index` setting. -They are always mapped as follows: - -[source,js] ---------------------- -{ - "type": "murmur3", - "index": "no", - "doc_values": true -} ---------------------- - -==== Mappings in config files not supported - -The ability to specify mappings in configuration files has been removed. To -specify default mappings that apply to multiple indexes, use -<> instead. - -Along with this change, the following settings have been removed: - -* `index.mapper.default_mapping_location` -* `index.mapper.default_percolator_mapping_location` - -==== Fielddata formats - -Now that doc values are the default for fielddata, specialized in-memory -formats have become an esoteric option. These fielddata formats have been removed: - -* `fst` on string fields -* `compressed` on geo points - -The default fielddata format will be used instead. - -==== Posting and doc-values codecs - -It is no longer possible to specify per-field postings and doc values formats -in the mappings. This setting will be ignored on indices created before 2.0 -and will cause mapping parsing to fail on indices created on or after 2.0. For -old indices, this means that new segments will be written with the default -postings and doc values formats of the current codec. - -It is still possible to change the whole codec by using the `index.codec` -setting. Please however note that using a non-default codec is discouraged as -it could prevent future versions of Elasticsearch from being able to read the -index. - -==== Compress and compress threshold - -The `compress` and `compress_threshold` options have been removed from the -`_source` field and fields of type `binary`. These fields are compressed by -default. If you would like to increase compression levels, use the new -<> setting instead. - -==== position_offset_gap - -The `position_offset_gap` option is renamed to 'position_increment_gap'. This was -done to clear away the confusion. Elasticsearch's 'position_increment_gap' now is -mapped directly to Lucene's 'position_increment_gap' - -The default `position_increment_gap` is now 100. Indexes created in Elasticsearch -2.0.0 will default to using 100 and indexes created before that will continue -to use the old default of 0. This was done to prevent phrase queries from -matching across different values of the same term unexpectedly. Specifically, -100 was chosen to cause phrase queries with slops up to 99 to match only within -a single value of a field. - -==== copy_to and multi fields - -A <> within a <> is ignored from version 2.0 on. With any version after -2.1 or 2.0.1 creating a mapping that has a copy_to within a multi field will result -in an exception. - - diff --git a/docs/reference/migration/migrate_2_0/network.asciidoc b/docs/reference/migration/migrate_2_0/network.asciidoc deleted file mode 100644 index d493bff56881..000000000000 --- a/docs/reference/migration/migrate_2_0/network.asciidoc +++ /dev/null @@ -1,39 +0,0 @@ -[[breaking_20_network_changes]] -=== Network changes - -==== Bind to localhost - -Elasticsearch 2.x will only bind to localhost by default. It will try to bind -to both 127.0.0.1 (IPv4) and [::1] (IPv6), but will work happily in -environments where only IPv4 or IPv6 is available. This change prevents -Elasticsearch from trying to connect to other nodes on your network unless you -specifically tell it to do so. When moving to production you should configure -the `network.host` parameter, either in the `elasticsearch.yml` config file or -on the command line: - -[source,sh] --------------------- -bin/elasticsearch --network.host 192.168.1.5 -bin/elasticsearch --network.host _non_loopback_ --------------------- - -The full list of options that network.host accepts can be found in the <>. - -==== Unicast discovery - -When bound to localhost, Elasticsearch will use unicast to contact -the first 5 ports in the `transport.tcp.port` range, which defaults to -`9300-9400`. This preserves the zero-config auto-clustering experience for the developer, -but it means that you will have to provide a list of <> -when moving to production, for instance: - -[source,yaml] ---------------------- -discovery.zen.ping.unicast.hosts: [ 192.168.1.2, 192.168.1.3 ] ---------------------- - -You don’t need to list all of the nodes in your cluster as unicast hosts, but -you should specify at least a quorum (majority) of master-eligible nodes. A -big cluster will typically have three dedicated master nodes, in which case we -recommend listing all three of them as unicast hosts. - diff --git a/docs/reference/migration/migrate_2_0/packaging.asciidoc b/docs/reference/migration/migrate_2_0/packaging.asciidoc deleted file mode 100644 index dae87187ba40..000000000000 --- a/docs/reference/migration/migrate_2_0/packaging.asciidoc +++ /dev/null @@ -1,84 +0,0 @@ -[[breaking_20_plugin_and_packaging_changes]] -=== Plugin and packaging changes - -==== Symbolic links and paths - -Elasticsearch 2.0 runs with the Java security manager enabled and is much more -restrictive about which paths it is allowed to access. Various paths can be -configured, e.g. `path.data`, `path.scripts`, `path.repo`. A configured path -may itself be a symbolic link, but no symlinks under that path will be -followed. - -==== Running `bin/elasticsearch` - -The command line parameter parsing has been rewritten to deal properly with -spaces in parameters. All config settings can still be specified on the -command line when starting Elasticsearch, but they must appear after the -built-in "static parameters", such as `-d` (to daemonize) and `-p` (the PID path). - -For instance: - -[source,sh] ------------ -bin/elasticsearch -d -p /tmp/foo.pid --http.cors.enabled=true --http.cors.allow-origin='*' ------------ - -For a list of static parameters, run `bin/elasticsearch -h` - -==== `-f` removed - -The `-f` parameter, which used to indicate that Elasticsearch should be run in -the foreground, was deprecated in 1.0 and removed in 2.0. - -==== `V` for version - -The `-v` parameter now means `--verbose` for both `bin/elasticsearch-plugin` and -`bin/elasticsearch` (although it has no effect on the latter). To output the -version, use `-V` or `--version` instead. - -==== Plugin manager should run as root - -The permissions of the `config`, `bin`, and `plugins` directories in the RPM -and deb packages have been made more restrictive. The plugin manager should -be run as root otherwise it will not be able to install plugins. - -==== Support for official plugins - -Almost all of the official Elasticsearch plugins have been moved to the main -`elasticsearch` repository. They will be released at the same time as -Elasticsearch and have the same version number as Elasticsearch. - -Official plugins can be installed as follows: - -[source,sh] ---------------- -sudo bin/elasticsearch-plugin install analysis-icu ---------------- - -Community-provided plugins can be installed as before. - -==== Plugins require descriptor file - -All plugins are now required to have a https://github.com/elastic/elasticsearch/blob/2.0/dev-tools/src/main/resources/plugin-metadata/plugin-descriptor.properties[plugin-descriptor.properties] file. If a node has a plugin installed which lacks this file, it will be unable to start. - -==== Repository naming structure changes - -Elasticsearch 2.0 changes the way the repository URLs are referenced. Instead -of specific repositories for both major and minor versions, the repositories will -use a major version reference only. - -The URL for apt packages now uses the following structure; - -[source,sh] ---------------- -deb http://packages.elastic.co/elasticsearch/2.x/debian stable main ---------------- - -And for yum packages it is; - -[source,sh] ---------------- -baseurl=http://packages.elastic.co/elasticsearch/2.x/centos ---------------- - -The <> page details this change. diff --git a/docs/reference/migration/migrate_2_0/parent_child.asciidoc b/docs/reference/migration/migrate_2_0/parent_child.asciidoc deleted file mode 100644 index 1addf8839738..000000000000 --- a/docs/reference/migration/migrate_2_0/parent_child.asciidoc +++ /dev/null @@ -1,43 +0,0 @@ -[[breaking_20_parent_child_changes]] -=== Parent/Child changes - -Parent/child has been rewritten completely to reduce memory usage and to -execute `has_child` and `has_parent` queries faster and more efficient. The -`_parent` field uses doc values by default. The refactored and improved -implementation is only active for indices created on or after version 2.0. - -In order to benefit from all the performance and memory improvements, we -recommend reindexing all existing indices that use the `_parent` field. - -==== Parent type cannot pre-exist - -A mapping type is declared as a child of another mapping type by specifying -the `_parent` meta field: - -[source,js] --------------------------- -DELETE * - -PUT my_index -{ - "mappings": { - "my_parent": {}, - "my_child": { - "_parent": { - "type": "my_parent" <1> - } - } - } -} --------------------------- -<1> The `my_parent` type is the parent of the `my_child` type. - -The mapping for the parent type can be added at the same time as the mapping -for the child type, but cannot be added before the child type. - -==== `top_children` query removed - -The `top_children` query has been removed in favour of the `has_child` query. -It wasn't always faster than the `has_child` query and the results were usually -inaccurate. The total hits and any aggregations in the same search request -would be incorrect if `top_children` was used. diff --git a/docs/reference/migration/migrate_2_0/query_dsl.asciidoc b/docs/reference/migration/migrate_2_0/query_dsl.asciidoc deleted file mode 100644 index a85ade8690b0..000000000000 --- a/docs/reference/migration/migrate_2_0/query_dsl.asciidoc +++ /dev/null @@ -1,189 +0,0 @@ -[[breaking_20_query_dsl_changes]] -=== Query DSL changes - -==== Queries and filters merged - -Queries and filters have been merged -- all filter clauses are now query -clauses. Instead, query clauses can now be used in _query context_ or in -_filter context_: - -Query context:: - -A query used in query context will calculate relevance scores and will not be -cacheable. Query context is used whenever filter context does not apply. - -Filter context:: -+ --- - -A query used in filter context will not calculate relevance scores, and will -be cacheable. Filter context is introduced by: - -* the `constant_score` query -* the `must_not` and (newly added) `filter` parameter in the `bool` query -* the `filter` and `filters` parameters in the `function_score` query -* any API called `filter`, such as the `post_filter` search parameter, or in - aggregations or index aliases --- - -==== `terms` query and filter - -The `execution` option of the `terms` filter is now deprecated and is ignored -if provided. Similarly, the `terms` query no longer supports the -`minimum_should_match` parameter. - -==== `or` and `and` now implemented via `bool` - -The `or` and `and` filters previously had a different execution pattern to the -`bool` filter. It used to be important to use `and`/`or` with certain filter -clauses, and `bool` with others. - -This distinction has been removed: the `bool` query is now smart enough to -handle both cases optimally. As a result of this change, the `or` and `and` -filters are now sugar syntax which are executed internally as a `bool` query. -These filters may be removed in the future. - -==== `filtered` query and `query` filter deprecated - -The `query` filter is deprecated as is it no longer needed -- all queries can -be used in query or filter context. - -The `filtered` query is deprecated in favour of the `bool` query. Instead of -the following: - -[source,js] -------------------------- -GET _search -{ - "query": { - "filtered": { - "query": { - "match": { - "text": "quick brown fox" - } - }, - "filter": { - "term": { - "status": "published" - } - } - } - } -} -------------------------- - -move the query and filter to the `must` and `filter` parameters in the `bool` -query: - -[source,js] -------------------------- -GET _search -{ - "query": { - "bool": { - "must": { - "match": { - "text": "quick brown fox" - } - }, - "filter": { - "term": { - "status": "published" - } - } - } - } -} -------------------------- - -==== Filter auto-caching - -It used to be possible to control which filters were cached with the `_cache` -option and to provide a custom `_cache_key`. These options are deprecated -and, if present, will be ignored. - -Query clauses used in filter context are now auto-cached when it makes sense -to do so. The algorithm takes into account the frequency of use, the cost of -query execution, and the cost of building the filter. - -The `terms` filter lookup mechanism no longer caches the values of the -document containing the terms. It relies on the filesystem cache instead. If -the lookup index is not too large, it is recommended to replicate it to all -nodes by setting `index.auto_expand_replicas: 0-all` in order to remove the -network overhead as well. - -==== Numeric queries use IDF for scoring - -Previously, term queries on numeric fields were deliberately prevented from -using the usual Lucene scoring logic and this behaviour was undocumented and, -to some, unexpected. - -Single `term` queries on numeric fields now score in the same way as string -fields, using IDF and norms (if enabled). - -To query numeric fields without scoring, the query clause should be used in -filter context, e.g. in the `filter` parameter of the `bool` query, or wrapped -in a `constant_score` query: - -[source,js] ----------------------------- -GET _search -{ - "query": { - "bool": { - "must": [ - { - "match": { <1> - "numeric_tag": 5 - } - } - ], - "filter": [ - { - "match": { <2> - "count": 5 - } - } - ] - } - } -} ----------------------------- -<1> This clause would include IDF in the relevance score calculation. -<2> This clause would have no effect on the relevance score. - -==== Fuzziness and fuzzy-like-this - -Fuzzy matching used to calculate the score for each fuzzy alternative, meaning -that rare misspellings would have a higher score than the more common correct -spellings. Now, fuzzy matching blends the scores of all the fuzzy alternatives -to use the IDF of the most frequently occurring alternative. - -Fuzziness can no longer be specified using a percentage, but should instead -use the number of allowed edits: - -* `0`, `1`, `2`, or -* `AUTO` (which chooses `0`, `1`, or `2` based on the length of the term) - -The `fuzzy_like_this` and `fuzzy_like_this_field` queries used a very -expensive approach to fuzzy matching and have been removed. - -==== More Like This - -The More Like This (`mlt`) API and the `more_like_this_field` (`mlt_field`) -query have been removed in favor of the -<> query. - -The parameter `percent_terms_to_match` has been removed in favor of -`minimum_should_match`. - -==== `limit` filter deprecated - -The `limit` filter is deprecated and becomes a no-op. You can achieve similar -behaviour using the <> parameter. - -==== Java plugins registering custom queries - -Java plugins that register custom queries can do so by using the -`IndicesQueriesModule#addQuery(Class)` method. Other -ways to register custom queries are not supported anymore. diff --git a/docs/reference/migration/migrate_2_0/removals.asciidoc b/docs/reference/migration/migrate_2_0/removals.asciidoc deleted file mode 100644 index 31693c3d3ac8..000000000000 --- a/docs/reference/migration/migrate_2_0/removals.asciidoc +++ /dev/null @@ -1,100 +0,0 @@ -[[breaking_20_removed_features]] -=== Removed features - -==== Rivers have been removed - -Elasticsearch does not support rivers anymore. While we had first planned to -keep them around to ease migration, keeping support for rivers proved to be -challenging as it conflicted with other important changes that we wanted to -bring to 2.0 like synchronous dynamic mappings updates, so we eventually -decided to remove them entirely. See -link:/blog/deprecating_rivers[Deprecating Rivers] for more background about -why we took this decision. - -==== Facets have been removed - -Facets, deprecated since 1.0, have now been removed. Instead, use the much -more powerful and flexible <> framework. -This also means that Kibana 3 will not work with Elasticsearch 2.0. - -==== MVEL has been removed - -The MVEL scripting language has been removed. The default scripting language -is now Groovy. - -==== Delete-by-query is now a plugin - -The old delete-by-query functionality was fast but unsafe. It could lead to -document differences between the primary and replica shards, and could even -produce out of memory exceptions and cause the cluster to crash. - -This feature has been reimplemented using the <> and -<> APIs, which may be slower for queries which match -large numbers of documents, but is safe. - -Currently, a long running delete-by-query job cannot be cancelled, which is -one of the reasons that this functionality is only available as a plugin. You -can install the plugin with: - -[source,sh] ------------------- -./bin/elasticsearch-plugin install delete-by-query ------------------- - -See {plugins}/plugins-delete-by-query.html for more information. - -==== Multicast Discovery is now a plugin - -Support for multicast is very patchy. Linux doesn’t allow multicast listening on localhost, -while OS/X sends multicast broadcasts across all interfaces regardless of the configured -bind address. On top of that, some networks have multicast disabled by default. - -This feature has been moved to a plugin. The default discovery mechanism now uses -unicast, with a default setup which looks for the first 5 ports on localhost. If you -still need to use multicast discovery, you can install the plugin with: - -[source,sh] ------------------- -./bin/elasticsearch-plugin install discovery-multicast ------------------- - -==== `_shutdown` API - -The `_shutdown` API has been removed without a replacement. Nodes should be -managed via the operating system and the provided start/stop scripts. - -==== `murmur3` is now a plugin - -The `murmur3` field, which indexes hashes of the field values, has been moved -out of core and is available as a plugin. It can be installed as: - -[source,sh] ------------------- -./bin/elasticsearch-plugin install mapper-murmur3 ------------------- - -==== `_size` is now a plugin - -The `_size` meta-data field, which indexes the size in bytes of the original -JSON document, has been moved out of core and is available as a plugin. It -can be installed as: - -[source,sh] ------------------- -./bin/elasticsearch-plugin install mapper-size ------------------- - -==== Thrift and memcached transport - -The thrift and memcached transport plugins are no longer supported. Instead, use -either the HTTP transport (enabled by default) or the node or transport Java client. - -==== Bulk UDP - -The bulk UDP API has been removed. Instead, use the standard -<> API, or use UDP to send documents to Logstash first. - -==== MergeScheduler pluggability - -The merge scheduler is no longer pluggable. - diff --git a/docs/reference/migration/migrate_2_0/scripting.asciidoc b/docs/reference/migration/migrate_2_0/scripting.asciidoc deleted file mode 100644 index 495d2daa2c54..000000000000 --- a/docs/reference/migration/migrate_2_0/scripting.asciidoc +++ /dev/null @@ -1,103 +0,0 @@ -[[breaking_20_scripting_changes]] -=== Scripting changes - -==== Scripting syntax - -The syntax for scripts has been made consistent across all APIs. The accepted -format is as follows: - -Inline/Dynamic scripts:: -+ --- - -[source,js] ---------------- -"script": { - "inline": "doc['foo'].value + val", <1> - "lang": "groovy", <2> - "params": { "val": 3 } <3> -} ---------------- -<1> The inline script to execute. -<2> The optional language of the script. -<3> Any named parameters. --- - -Indexed scripts:: -+ --- -[source,js] ---------------- -"script": { - "id": "my_script_id", <1> - "lang": "groovy", <2> - "params": { "val": 3 } <3> -} ---------------- -<1> The ID of the indexed script. -<2> The optional language of the script. -<3> Any named parameters. --- - -File scripts:: -+ --- -[source,js] ---------------- -"script": { - "file": "my_file", <1> - "lang": "groovy", <2> - "params": { "val": 3 } <3> -} ---------------- -<1> The filename of the script, without the `.lang` suffix. -<2> The optional language of the script. -<3> Any named parameters. --- - -For example, an update request might look like this: - -[source,js] ---------------- -POST my_index/my_type/1/_update -{ - "script": { - "inline": "ctx._source.count += val", - "params": { "val": 3 } - }, - "upsert": { - "count": 0 - } -} ---------------- - -A short syntax exists for running inline scripts in the default scripting -language without any parameters: - -[source,js] ----------------- -GET _search -{ - "script_fields": { - "concat_fields": { - "script": "doc['one'].value + ' ' + doc['two'].value" - } - } -} ----------------- - -==== Scripting settings - -The `script.disable_dynamic` node setting has been replaced by fine-grained -script settings described in <>. - -==== Groovy scripts sandbox - -The Groovy sandbox and related settings have been removed. Groovy is now a -non-sandboxed scripting language, without any option to turn the sandbox on. - -==== Plugins making use of scripts - -Plugins that make use of scripts must register their own script context -through `ScriptModule`. Script contexts can be used as part of fine-grained -settings to enable/disable scripts selectively. diff --git a/docs/reference/migration/migrate_2_0/search.asciidoc b/docs/reference/migration/migrate_2_0/search.asciidoc deleted file mode 100644 index 036313077ff2..000000000000 --- a/docs/reference/migration/migrate_2_0/search.asciidoc +++ /dev/null @@ -1,122 +0,0 @@ -[[breaking_20_search_changes]] -=== Search changes - -==== Partial fields - -Partial fields have been removed in favor of <>. - -==== `search_type=count` deprecated - -The `count` search type has been deprecated. All benefits from this search -type can now be achieved by using the (default) `query_then_fetch` search type -and setting `size` to `0`. - -==== The count api internally uses the search api - -The count api is now a shortcut to the search api with `size` set to 0. As a -result, a total failure will result in an exception being returned rather -than a normal response with `count` set to `0` and shard failures. - -==== All stored meta-fields returned by default - -Previously, meta-fields like `_routing`, `_timestamp`, etc would only be -included in the search results if specifically requested with the `fields` -parameter. Now, all meta-fields which have stored values will be returned by -default. Additionally, they are now returned at the top level (along with -`_index`, `_type`, and `_id`) instead of in the `fields` element. - -For instance, the following request: - -[source,sh] ---------------- -GET /my_index/_search?fields=foo ---------------- - -might return: - -[source,js] ---------------- -{ - [...] - "hits": { - "total": 1, - "max_score": 1, - "hits": [ - { - "_index": "my_index", - "_type": "my_type", - "_id": "1", - "_score": 1, - "_timestamp": 10000000, <1> - "fields": { - "foo" : [ "bar" ] - } - } - ] - } -} ---------------- -<1> The `_timestamp` is returned by default, and at the top level. - - -==== Script fields - -Script fields in 1.x were only returned as a single value. Even if the return -value of a script was a list, it would be returned as an array containing an -array: - -[source,js] ---------------- -"fields": { - "my_field": [ - [ - "v1", - "v2" - ] - ] -} ---------------- - -In elasticsearch 2.0, scripts that return a list of values are treated as -multivalued fields. The same example would return the following response, with -values in a single array. - -[source,js] ---------------- -"fields": { - "my_field": [ - "v1", - "v2" - ] -} ---------------- - -==== Timezone for date field - -Specifying the `time_zone` parameter in queries or aggregations on fields of -type `date` must now be either an ISO 8601 UTC offset, or a timezone id. For -example, the value `+1:00` must now be written as `+01:00`. - -==== Only highlight queried fields - -The default value for the `require_field_match` option has changed from -`false` to `true`, meaning that the highlighters will, by default, only take -the fields that were queried into account. - -This means that, when querying the `_all` field, trying to highlight on any -field other than `_all` will produce no highlighted snippets. Querying the -same fields that need to be highlighted is the cleaner solution to get -highlighted snippets back. Otherwise `require_field_match` option can be set -to `false` to ignore field names completely when highlighting. - -The postings highlighter doesn't support the `require_field_match` option -anymore, it will only highlight fields that were queried. - -==== Postings highlighter doesn't support `match_phrase_prefix` - -The `match` query with type set to `phrase_prefix` (or the -`match_phrase_prefix` query) is not supported by the postings highlighter. No -highlighted snippets will be returned. - - - diff --git a/docs/reference/migration/migrate_2_0/settings.asciidoc b/docs/reference/migration/migrate_2_0/settings.asciidoc deleted file mode 100644 index 06aa743a5d84..000000000000 --- a/docs/reference/migration/migrate_2_0/settings.asciidoc +++ /dev/null @@ -1,204 +0,0 @@ -[[breaking_20_setting_changes]] -=== Setting changes - -==== Command line flags - -Command line flags using single dash notation must be now specified as the first arguments. -For example if previously using: - -[source,sh] ---------------- -./elasticsearch --node.name=test_node -Des.path.conf=/opt/elasticsearch/conf/test_node ---------------- - -This will now need to be changed to: - -[source,sh] ---------------- -./elasticsearch -Des.path.conf=/opt/elasticsearch/conf/test_node --node.name=test_node ---------------- - -for the flag to take effect. - -[[migration-script-settings]] -==== Scripting settings - -The `script.disable_dynamic` node setting has been replaced by fine-grained -script settings described in the <>. -The following setting previously used to enable dynamic or inline scripts: - -[source,yaml] ---------------- -script.disable_dynamic: false ---------------- - -It should be replaced with the following two settings in `elasticsearch.yml` that -achieve the same result: - -[source,yaml] ---------------- -script.inline: true -script.indexed: true ---------------- - -==== Units required for time and byte-sized settings - -Any settings which accept time or byte values must now be specified with -units. For instance, it is too easy to set the `refresh_interval` to 1 -*millisecond* instead of 1 second: - -[source,js] ---------------- -PUT _settings -{ - "index.refresh_interval": 1 -} ---------------- - -In 2.0, the above request will throw an exception. Instead the refresh -interval should be set to `"1s"` for one second. - -==== Merge and merge throttling settings - -The tiered merge policy is now the only supported merge policy. These settings -have been removed: - -* `index.merge.policy.type` -* `index.merge.policy.min_merge_size` -* `index.merge.policy.max_merge_size` -* `index.merge.policy.merge_factor` -* `index.merge.policy.max_merge_docs` -* `index.merge.policy.calibrate_size_by_deletes` -* `index.merge.policy.min_merge_docs` -* `index.merge.policy.max_merge_docs` - -Merge throttling now uses a feedback loop to auto-throttle. These settings -have been removed: - -* `indices.store.throttle.type` -* `indices.store.throttle.max_bytes_per_sec` -* `index.store.throttle.type` -* `index.store.throttle.max_bytes_per_sec` - -==== Shadow replica settings - -The `node.enable_custom_paths` setting has been removed and replaced by the -`path.shared_data` setting to allow shadow replicas with custom paths to work -with the security manager. For example, if your previous configuration had: - -[source,yaml] ------- -node.enable_custom_paths: true ------- - -And you created an index using shadow replicas with `index.data_path` set to -`/opt/data/my_index` with the following: - -[source,js] --------------------------------------------------- -PUT /my_index -{ - "index": { - "number_of_shards": 1, - "number_of_replicas": 4, - "data_path": "/opt/data/my_index", - "shadow_replicas": true - } -} --------------------------------------------------- - -For 2.0, you will need to set `path.shared_data` to a parent directory of the -index's data_path, so: - -[source,yaml] ------------ -path.shared_data: /opt/data ------------ - -==== Resource watcher settings renamed - -The setting names for configuring the resource watcher have been renamed -to prevent clashes with the watcher plugin - -* `watcher.enabled` is now `resource.reload.enabled` -* `watcher.interval` is now `resource.reload.interval` -* `watcher.interval.low` is now `resource.reload.interval.low` -* `watcher.interval.medium` is now `resource.reload.interval.medium` -* `watcher.interval.high` is now `resource.reload.interval.high` - -==== index.gateway setting renamed - -* `index.gateway.local.sync` is now `index.translog.sync_interval` - -==== Hunspell dictionary configuration - -The parameter `indices.analysis.hunspell.dictionary.location` has been -removed, and `/hunspell` is always used. - -==== CORS allowed origins - -The CORS allowed origins setting, `http.cors.allow-origin`, no longer has a default value. Previously, the default value -was `*`, which would allow CORS requests from any origin and is considered insecure. The `http.cors.allow-origin` setting -should be specified with only the origins that should be allowed, like so: - -[source,yaml] ---------------- -http.cors.allow-origin: /https?:\/\/localhost(:[0-9]+)?/ ---------------- - -==== JSONP support - -JSONP callback support has now been removed. CORS should be used to access Elasticsearch -over AJAX instead: - -[source,yaml] ---------------- -http.cors.enabled: true -http.cors.allow-origin: /https?:\/\/localhost(:[0-9]+)?/ ---------------- - -==== In memory indices - -The `memory` / `ram` store (`index.store.type`) option was removed in -Elasticsearch. In-memory indices are no longer supported. - -==== Log messages truncated - -Log messages are now truncated at 10,000 characters. This can be changed in -the `logging.yml` configuration file with the `file.layout.conversionPattern` -setting. - -==== Custom config file - -It is no longer possible to specify a custom config file with the `CONF_FILE` -environment variable, or the `-Des.config`, `-Des.default.config`, or -`-Delasticsearch.config` parameters. - -Instead, the config file must be named `elasticsearch.yml` and must be located -in the default `config/` directory, unless a custom config directory is specified. - -The location of a custom config directory may be specified as follows: - -[source,sh] --------------- -./bin/elasticsearch --path.conf=/path/to/conf/dir -./bin/elasticsearch-plugin -Des.path.conf=/path/to/conf/dir install analysis-icu --------------- - -When using the RPM or debian packages, the plugin script and the -init/service scripts will consult the `CONF_DIR` environment variable -to check for a custom config location. The value of the `CONF_DIR` -variable can be set in the environment config file which is located either in -`/etc/default/elasticsearch` or `/etc/sysconfig/elasticsearch`. - -==== Custom analysis file paths - -It is no longer possible to set custom file path outside `CONF_DIR` for `*_path` settings -in <> or <> filters. -You must specify either relative path to `CONF_DIR` location or absolute path inside `CONF_DIR` location. - -==== `ES_CLASSPATH removed` - -The `ES_CLASSPATH` environment variable is no longer used to set the class -path. External libraries should preferably be loaded using the plugin -mechanism or, if you really must, be copied to the `lib/` directory. diff --git a/docs/reference/migration/migrate_2_0/snapshot_restore.asciidoc b/docs/reference/migration/migrate_2_0/snapshot_restore.asciidoc deleted file mode 100644 index c9b222abdc86..000000000000 --- a/docs/reference/migration/migrate_2_0/snapshot_restore.asciidoc +++ /dev/null @@ -1,38 +0,0 @@ -[[breaking_20_snapshot_and_restore_changes]] -=== Snapshot and Restore changes - -==== File-system repositories must be whitelisted - -Locations of the shared file system repositories and the URL repositories with -`file:` URLs now have to be registered before starting Elasticsearch using the -`path.repo` setting. The `path.repo` setting can contain one or more -repository locations: - -[source,yaml] ---------------- -path.repo: ["/mnt/daily", "/mnt/weekly"] ---------------- - -If the repository location is specified as an absolute path it has to start -with one of the locations specified in `path.repo`. If the location is -specified as a relative path, it will be resolved against the first location -specified in the `path.repo` setting. - -==== URL repositories must be whitelisted - -URL repositories with `http:`, `https:`, and `ftp:` URLs have to be -whitelisted before starting Elasticsearch with the -`repositories.url.allowed_urls` setting. This setting supports wildcards in -the place of host, path, query, and fragment. For example: - -[source,yaml] ------------------------------------ -repositories.url.allowed_urls: ["http://www.example.org/root/*", "https://*.mydomain.com/*?*#*"] ------------------------------------ - -==== Wildcard expansion - -The obsolete parameters `expand_wildcards_open` and `expand_wildcards_close` -are no longer supported by the snapshot and restore operations. These -parameters have been replaced by a single `expand_wildcards` parameter. See -<> for more. diff --git a/docs/reference/migration/migrate_2_0/stats.asciidoc b/docs/reference/migration/migrate_2_0/stats.asciidoc deleted file mode 100644 index dc80ecd83ec8..000000000000 --- a/docs/reference/migration/migrate_2_0/stats.asciidoc +++ /dev/null @@ -1,52 +0,0 @@ -[[breaking_20_stats_info_and_literal_cat_literal_changes]] -=== Stats, info, and `cat` changes - -==== Sigar removed - -We no longer ship the Sigar library for operating system dependent statistics, -as it no longer seems to be maintained. Instead, we rely on the statistics -provided by the JVM. This has resulted in a number of changes to the node -info, and node stats responses: - -* `network.*` has been removed from nodes info and nodes stats. -* `fs.*.dev` and `fs.*.disk*` have been removed from nodes stats. -* `os.*` has been removed from nodes stats, except for `os.timestamp`, - `os.load_average`, `os.mem.*`, and `os.swap.*`. -* `os.mem.total` and `os.swap.total` have been removed from nodes info. -* `process.mem.resident` and `process.mem.share` have been removed from node stats. - -==== Removed `id_cache` from stats apis - -Removed `id_cache` metric from nodes stats, indices stats and cluster stats -apis. This metric has also been removed from the shards cat, indices cat and -nodes cat apis. Parent/child memory is now reported under fielddata, because -it has internally be using fielddata for a while now. - -To just see how much parent/child related field data is taking, the -`fielddata_fields` option can be used on the stats apis. Indices stats -example: - -[source,js] --------------------------------------------------- -GET /_stats/fielddata?fielddata_fields=_parent --------------------------------------------------- - -==== Percolator stats - -The total time spent running percolator queries is now called `percolate.time` -instead of `percolate.get_time`. - -==== Cluster state REST API - -The cluster state API doesn't return the `routing_nodes` section anymore when -`routing_table` is requested. The newly introduced `routing_nodes` flag can be -used separately to control whether `routing_nodes` should be returned. - -==== Index status API - -The deprecated index status API has been removed. - -==== Nodes Stats API - -Queue lengths are now reported as basic numeric so they can easily processed by code. Before we used a human -readable format. For example, a queue with 1,000 items is now reported as `1000` instead of `1k`. diff --git a/docs/reference/migration/migrate_2_0/striping.asciidoc b/docs/reference/migration/migrate_2_0/striping.asciidoc deleted file mode 100644 index 2e80f29c7741..000000000000 --- a/docs/reference/migration/migrate_2_0/striping.asciidoc +++ /dev/null @@ -1,21 +0,0 @@ -[[breaking_20_multiple_literal_data_path_literal_striping]] -=== Multiple `path.data` striping - -Previously, if the `path.data` setting listed multiple data paths, then a -shard would be ``striped'' across all paths by writing a whole file to each -path in turn (in accordance with the `index.store.distributor` setting). The -result was that files from a single segment in a shard could be spread across -multiple disks, and the failure of any one disk could corrupt multiple shards. - -This striping is no longer supported. Instead, different shards may be -allocated to different paths, but all of the files in a single shard will be -written to the same path. - -If striping is detected while starting Elasticsearch 2.0.0 or later, *all of -the files belonging to the same shard will be migrated to the same path*. If -there is not enough disk space to complete this migration, the upgrade will be -cancelled and can only be resumed once enough disk space is made available. - -The `index.store.distributor` setting has also been removed. - - diff --git a/docs/reference/migration/migrate_2_1.asciidoc b/docs/reference/migration/migrate_2_1.asciidoc deleted file mode 100644 index 454a57f96bc8..000000000000 --- a/docs/reference/migration/migrate_2_1.asciidoc +++ /dev/null @@ -1,87 +0,0 @@ -[[breaking-changes-2.1]] -== Breaking changes in 2.1 - -This section discusses the changes that you need to be aware of when migrating -your application to Elasticsearch 2.1. - -* <> -* <> -* <> -* <> -* <> -* <> - -[[breaking_21_search_changes]] -=== Search changes - -==== `search_type=scan` deprecated - -The `scan` search type has been deprecated. All benefits from this search -type can now be achieved by doing a scroll request that sorts documents in -`_doc` order, for instance: - -[source,sh] ---------------- -GET /my_index/_search?scroll=2m -{ - "sort": [ - "_doc" - ] -} ---------------- - -Scroll requests sorted by `_doc` have been optimized to more efficiently resume -from where the previous request stopped, so this will have the same performance -characteristics as the former `scan` search type. - -==== from + size limits - -Elasticsearch will now return an error message if a query's `from` + `size` is -more than the `index.max_result_window` parameter. This parameter defaults to -10,000 which is safe for almost all clusters. Values higher than can consume -significant chunks of heap memory per search and per shard executing the -search. It's safest to leave this value as it is an use the scroll api for any -deep scrolling but this setting is dynamic so it can raised or lowered as -needed. - -[[breaking_21_update_changes]] -=== Update changes - -==== Updates now `detect_noop` by default - -We've switched the default value of the `detect_noop` option from `false` to -`true`. This means that Elasticsearch will ignore updates that don't change -source unless you explicitly set `"detect_noop": false`. `detect_noop` was -always computationally cheap compared to the expense of the update which can be -thought of as a delete operation followed by an index operation. - -[[breaking_21_removed_features]] -=== Removed features - -==== `indices.fielddata.cache.expire` - -The experimental feature `indices.fielddata.cache.expire` has been removed. -For indices that have this setting configured, this config will be ignored. - -[[breaking_21_more_like_this]] -=== More Like This - -The MoreLikeThisQueryBuilder#ignoreLike methods have been deprecated in favor -of using the unlike methods. - -MoreLikeThisBuilder#addItem has been deprecated in favor of using -MoreLikeThisBuilder#addLikeItem. - -[[breaking_21_nested_sorting]] -=== Nested sorting - -If sorting on field inside a nested object then the `nested_path` should be specified. -Before there was an attempt to resolve the nested path automatically, but that was sometimes incorrect. -To avoid confusion the `nested_path` should always be specified. - -[[breaking_21_index_apis]] -=== Index APIs - -==== Optimize API - -The Optimize API has been deprecated, all new optimize actions should use the new Force Merge API. diff --git a/docs/reference/migration/migrate_2_2.asciidoc b/docs/reference/migration/migrate_2_2.asciidoc deleted file mode 100644 index d6035c83b8ae..000000000000 --- a/docs/reference/migration/migrate_2_2.asciidoc +++ /dev/null @@ -1,80 +0,0 @@ -[[breaking-changes-2.2]] -== Breaking changes in 2.2 - -This section discusses the changes that you need to be aware of when migrating -your application to Elasticsearch 2.2. - -[[float]] -=== Mapping APIs - -==== Geo Point Type - -The `geo_point` format has been changed to reduce index size and the time required to both index and query -geo point data. To make these performance improvements possible both `doc_values` and `coerce` are required -and therefore cannot be changed. For this reason the `doc_values` and `coerce` parameters have been removed -from the <> field mapping. - -[float] -=== Scripting and security - -The Java Security Manager is being used to lock down the privileges available -to the scripting languages and to restrict the classes they are allowed to -load to a predefined whitelist. These changes may cause scripts which worked -in earlier versions to fail. See <> for more -details. - -[float] -=== Field stats API - -The field stats' response format has been changed for number based and date -fields. The `min_value` and `max_value` elements now return values as number -and the new `min_value_as_string` and `max_value_as_string` return the values -as string. - -[float] -=== Default logging using systemd - -In previous versions of Elasticsearch using systemd, the default logging -configuration routed standard output to `/dev/null` and standard error to -the journal. However, there are often critical error messages at -startup that are logged to standard output rather than standard error -and these error messages would be lost to the ether. The default has -changed to now route standard output to the journal and standard error -to inherit this setting (these are the defaults for systemd). These -settings can be modified by editing the `elasticsearch.service` file. - -[float] -=== Java Client - -Previously it was possible to iterate over `ClusterHealthResponse` to get information about `ClusterIndexHealth`. -While this is still possible, it requires now iterating over the values returned from `getIndices()`: - -[source,java] ---------------- -ClusterHealthResponse clusterHealthResponse = client.admin().cluster().prepareHealth().get(); -for (Map.Entry index : clusterHealthResponse.getIndices().entrySet()) { - String indexName = index.getKey(); - ClusterIndexHealth health = index.getValue(); -} ---------------- - -[float] -=== Cloud AWS Plugin - -Proxy settings have been deprecated and renamed: - -* from `cloud.aws.proxy_host` to `cloud.aws.proxy.host` -* from `cloud.aws.ec2.proxy_host` to `cloud.aws.ec2.proxy.host` -* from `cloud.aws.s3.proxy_host` to `cloud.aws.s3.proxy.host` -* from `cloud.aws.proxy_port` to `cloud.aws.proxy.port` -* from `cloud.aws.ec2.proxy_port` to `cloud.aws.ec2.proxy.port` -* from `cloud.aws.s3.proxy_port` to `cloud.aws.s3.proxy.port` - -If you are using proxy settings, update your settings as deprecated ones will -be removed in next major version. - -[float] -=== Multicast plugin deprecated - -The `discovery-multicast` plugin has been deprecated in 2.2.0 and has -been removed in 3.0.0. diff --git a/docs/reference/migration/migrate_2_3.asciidoc b/docs/reference/migration/migrate_2_3.asciidoc deleted file mode 100644 index 0d741e2adb2a..000000000000 --- a/docs/reference/migration/migrate_2_3.asciidoc +++ /dev/null @@ -1,19 +0,0 @@ -[[breaking-changes-2.3]] -== Breaking changes in 2.3 - -This section discusses the changes that you need to be aware of when migrating -your application to Elasticsearch 2.3. - -* <> - -[[breaking_23_index_apis]] -=== Mappings - -==== Limit to the number of `nested` fields - -Indexing a document with 100 nested fields actually indexes 101 documents as each nested -document is indexed as a separate document. To safeguard against ill-defined mappings -the number of nested fields that can be defined per index has been limited to 50. -This default limit can be changed with the index setting `index.mapping.nested_fields.limit`. -Note that the limit is only checked when new indices are created or mappings are updated. It -will thus only affect existing pre-2.3 indices if their mapping is changed. From e472d7894bd6b26000b60afa5aeb4b7e8f1a6375 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Fri, 12 Feb 2016 10:18:22 +0100 Subject: [PATCH 204/320] Log suppressed stack traces under DEBUG To make API's output more easy to read we are suppressing stack traces (#12991) unless explicitly requested by setting `error_trace=true` on the request. To compensate we are logging the stacktrace into the logs so people can look it up even the error_trace wasn't enabled. Currently we do so using the `INFO` level which can be verbose if an api is called repeatedly by some automation. For example, if someone tries to read from an index that doesn't exist we will respond with a 404 exception and log under info every time. We should reduce the level to `DEBUG` as we do with other API driven errors. Internal errors (rest codes >=500) are logged as WARN. Closes #16627 --- .../main/java/org/elasticsearch/rest/BytesRestResponse.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java b/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java index ac8eadade0b7..52f624849fc2 100644 --- a/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java +++ b/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java @@ -126,7 +126,11 @@ public class BytesRestResponse extends RestResponse { if (channel.request().paramAsBoolean("error_trace", !ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT)) { params = new ToXContent.DelegatingMapParams(Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE, "false"), channel.request()); } else { - SUPPRESSED_ERROR_LOGGER.info("{} Params: {}", t, channel.request().path(), channel.request().params()); + if (status.getStatus() < 500) { + SUPPRESSED_ERROR_LOGGER.debug("{} Params: {}", t, channel.request().path(), channel.request().params()); + } else { + SUPPRESSED_ERROR_LOGGER.warn("{} Params: {}", t, channel.request().path(), channel.request().params()); + } params = channel.request(); } builder.field("error"); From 4353b2e024fe8523678ed1072f2b92a4ed1e3618 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 13 Mar 2016 15:49:49 -0400 Subject: [PATCH 205/320] Do not pass double-dash arguments on startup This commit addresses an issue in the init scripts which are passing invalid command line arguments to the startup script. Closes #17087 --- distribution/deb/src/main/packaging/init.d/elasticsearch | 2 +- distribution/rpm/src/main/packaging/init.d/elasticsearch | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/distribution/deb/src/main/packaging/init.d/elasticsearch b/distribution/deb/src/main/packaging/init.d/elasticsearch index d0ad04cef7a4..078e79a92d1b 100755 --- a/distribution/deb/src/main/packaging/init.d/elasticsearch +++ b/distribution/deb/src/main/packaging/init.d/elasticsearch @@ -99,7 +99,7 @@ fi # Define other required variables PID_FILE="$PID_DIR/$NAME.pid" DAEMON=$ES_HOME/bin/elasticsearch -DAEMON_OPTS="-d -p $PID_FILE --default.path.home=$ES_HOME --default.path.logs=$LOG_DIR --default.path.data=$DATA_DIR --default.path.conf=$CONF_DIR" +DAEMON_OPTS="-d -p $PID_FILE -D es.default.path.home=$ES_HOME -D es.default.path.logs=$LOG_DIR -D es.default.path.data=$DATA_DIR -D es.default.path.conf=$CONF_DIR" export ES_HEAP_SIZE export ES_HEAP_NEWSIZE diff --git a/distribution/rpm/src/main/packaging/init.d/elasticsearch b/distribution/rpm/src/main/packaging/init.d/elasticsearch index c56944b7c3ca..1132fca4f9e3 100644 --- a/distribution/rpm/src/main/packaging/init.d/elasticsearch +++ b/distribution/rpm/src/main/packaging/init.d/elasticsearch @@ -117,7 +117,7 @@ start() { cd $ES_HOME echo -n $"Starting $prog: " # if not running, start it up here, usually something like "daemon $exec" - daemon --user $ES_USER --pidfile $pidfile $exec -p $pidfile -d -Des.default.path.home=$ES_HOME -Des.default.path.logs=$LOG_DIR -Des.default.path.data=$DATA_DIR -Des.default.path.conf=$CONF_DIR + daemon --user $ES_USER --pidfile $pidfile $exec -p $pidfile -d -D es.default.path.home=$ES_HOME -D es.default.path.logs=$LOG_DIR -D es.default.path.data=$DATA_DIR -D es.default.path.conf=$CONF_DIR retval=$? echo [ $retval -eq 0 ] && touch $lockfile From 8ac5a98b87c377e97174f7a279a2f103f458b67d Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 13 Mar 2016 19:12:06 -0400 Subject: [PATCH 206/320] Remove links to nonexistent migration docs --- docs/reference/migration/index.asciidoc | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/docs/reference/migration/index.asciidoc b/docs/reference/migration/index.asciidoc index 57401cb01d7a..f8d742b3b673 100644 --- a/docs/reference/migration/index.asciidoc +++ b/docs/reference/migration/index.asciidoc @@ -17,17 +17,3 @@ As a general rule: See <> for more info. -- include::migrate_5_0.asciidoc[] - -include::migrate_2_3.asciidoc[] - -include::migrate_2_2.asciidoc[] - -include::migrate_2_1.asciidoc[] - -include::migrate_2_0.asciidoc[] - -include::migrate_1_6.asciidoc[] - -include::migrate_1_4.asciidoc[] - -include::migrate_1_0.asciidoc[] From 8a05c2a2bef6b5980df286d885ae732657872d15 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 13 Mar 2016 09:10:56 -0400 Subject: [PATCH 207/320] Bootstrap does not set system properties Today, certain bootstrap properties are set and read via system properties. This action-at-distance way of managing these properties is rather confusing, and completely unnecessary. But another problem exists with setting these as system properties. Namely, these system properties are interpreted as Elasticsearch settings, not all of which are registered. This leads to Elasticsearch failing to startup if any of these special properties are set. Instead, these properties should be kept as local as possible, and passed around as method parameters where needed. This eliminates the action-at-distance way of handling these properties, and eliminates the need to register these non-setting properties. This commit does exactly that. Additionally, today we use the "-D" command line flag to set the properties, but this is confusing because "-D" is a special flag to the JVM for setting system properties. This creates confusion because some "-D" properties should be passed via arguments to the JVM (so via ES_JAVA_OPTS), and some should be passed as arguments to Elasticsearch. This commit changes the "-D" flag for Elasticsearch settings to "-E". --- .../elasticsearch/gradle/BuildPlugin.groovy | 1 + .../gradle/test/ClusterConfiguration.groovy | 7 + .../elasticsearch/gradle/test/NodeInfo.groovy | 10 +- .../resources/checkstyle_suppressions.xml | 2 - .../elasticsearch/bootstrap/Bootstrap.java | 60 +++--- .../bootstrap/BootstrapCliParser.java | 95 --------- .../bootstrap/Elasticsearch.java | 85 +++++++- .../common/logging/LogConfigurator.java | 4 +- .../common/settings/Settings.java | 25 +-- .../internal/InternalSettingsPreparer.java | 12 +- .../ElasticsearchCommandLineParsingTests.java | 195 ++++++++++++++++++ .../common/logging/config/logging.yml | 2 +- .../src/main/packaging/init.d/elasticsearch | 2 +- .../src/main/packaging/init.d/elasticsearch | 2 +- .../packaging/systemd/elasticsearch.service | 10 +- .../src/main/resources/bin/elasticsearch | 8 +- .../resources/bin/elasticsearch-plugin.bat | 2 +- .../main/resources/bin/elasticsearch.in.bat | 2 +- .../src/main/resources/config/logging.yml | 2 +- docs/plugins/plugin-script.asciidoc | 2 +- docs/reference/getting-started.asciidoc | 2 +- .../allocation/filtering.asciidoc | 2 +- .../migration/migrate_5_0/settings.asciidoc | 17 +- .../cluster/allocation_awareness.asciidoc | 2 +- docs/reference/modules/node.asciidoc | 2 +- docs/reference/setup.asciidoc | 7 +- docs/reference/setup/configuration.asciidoc | 4 +- docs/reference/setup/rolling_upgrade.asciidoc | 2 +- modules/lang-groovy/build.gradle | 4 +- modules/lang-mustache/build.gradle | 4 +- plugins/lang-javascript/build.gradle | 4 +- plugins/lang-python/build.gradle | 4 +- .../bootstrap/BootstrapCliParserTests.java | 164 --------------- qa/smoke-test-ingest-disabled/build.gradle | 2 +- .../build.gradle | 2 +- .../scripts/packaging_test_utils.bash | 2 +- .../packaging/scripts/plugin_test_cases.bash | 4 +- .../common/cli/CliToolTestCase.java | 65 ------ 38 files changed, 392 insertions(+), 429 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/bootstrap/BootstrapCliParser.java create mode 100644 core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCommandLineParsingTests.java delete mode 100644 qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java delete mode 100644 test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index ca78157bcf2e..a63d31e90852 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -407,6 +407,7 @@ class BuildPlugin implements Plugin { systemProperty 'jna.nosys', 'true' // default test sysprop values systemProperty 'tests.ifNoTests', 'fail' + // TODO: remove setting logging level via system property systemProperty 'es.logger.level', 'WARN' for (Map.Entry property : System.properties.entrySet()) { if (property.getKey().startsWith('tests.') || diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index 3e8b62253294..2adc59e9e9dd 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -73,6 +73,8 @@ class ClusterConfiguration { return tmpFile.exists() } + Map esSettings = new HashMap<>(); + Map systemProperties = new HashMap<>() Map settings = new HashMap<>() @@ -86,6 +88,11 @@ class ClusterConfiguration { LinkedHashMap setupCommands = new LinkedHashMap<>() + @Input + void esSetting(String setting, String value) { + esSettings.put(setting, value); + } + @Input void systemProperty(String property, String value) { systemProperties.put(property, value) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index b41b18220000..168a67a47286 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -129,14 +129,16 @@ class NodeInfo { 'JAVA_HOME' : project.javaHome, 'ES_GC_OPTS': config.jvmArgs // we pass these with the undocumented gc opts so the argline can set gc, etc ] - args.add("-Des.node.portsfile=true") - args.addAll(config.systemProperties.collect { key, value -> "-D${key}=${value}" }) + args.addAll("-E", "es.node.portsfile=true") + args.addAll(config.esSettings.collectMany { key, value -> ["-E", "${key}=${value}" ] }) + env.put('ES_JAVA_OPTS', config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ")) for (Map.Entry property : System.properties.entrySet()) { if (property.getKey().startsWith('es.')) { - args.add("-D${property.getKey()}=${property.getValue()}") + args.add("-E") + args.add("${property.getKey()}=${property.getValue()}") } } - args.add("-Des.path.conf=${confDir}") + args.addAll("-E", "es.path.conf=${confDir}") if (Os.isFamily(Os.FAMILY_WINDOWS)) { args.add('"') // end the entire command, quoted } diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index cbe612e5358b..0f5a59abea6a 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -259,7 +259,6 @@ - @@ -1597,7 +1596,6 @@ - diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 6cd2b4d80fe3..3ad592af6352 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -19,21 +19,14 @@ package org.elasticsearch.bootstrap; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.PrintStream; -import java.nio.file.Path; -import java.util.Locale; -import java.util.concurrent.CountDownLatch; - import org.apache.lucene.util.Constants; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.StringHelper; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; -import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.Terminal; import org.elasticsearch.common.PidFile; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.inject.CreationException; import org.elasticsearch.common.logging.ESLogger; @@ -47,7 +40,13 @@ import org.elasticsearch.monitor.process.ProcessProbe; import org.elasticsearch.node.Node; import org.elasticsearch.node.internal.InternalSettingsPreparer; -import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintStream; +import java.nio.file.Path; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.CountDownLatch; /** * Internal startup code. @@ -189,9 +188,14 @@ final class Bootstrap { node = new Node(nodeSettings); } - private static Environment initialSettings(boolean foreground) { - Terminal terminal = foreground ? Terminal.DEFAULT : null; - return InternalSettingsPreparer.prepareEnvironment(EMPTY_SETTINGS, terminal); + private static Environment initialSettings(boolean daemonize, String pathHome, String pidFile) { + Terminal terminal = daemonize ? null : Terminal.DEFAULT; + Settings.Builder builder = Settings.builder(); + builder.put(Environment.PATH_HOME_SETTING.getKey(), pathHome); + if (Strings.hasLength(pidFile)) { + builder.put(Environment.PIDFILE_SETTING.getKey(), pidFile); + } + return InternalSettingsPreparer.prepareEnvironment(builder.build(), terminal); } private void start() { @@ -218,22 +222,19 @@ final class Bootstrap { * This method is invoked by {@link Elasticsearch#main(String[])} * to startup elasticsearch. */ - static void init(String[] args) throws Throwable { + static void init( + final boolean daemonize, + final String pathHome, + final String pidFile, + final Map esSettings) throws Throwable { // Set the system property before anything has a chance to trigger its use initLoggerPrefix(); - BootstrapCliParser parser = new BootstrapCliParser(); - int status = parser.main(args, Terminal.DEFAULT); - - if (parser.shouldRun() == false || status != ExitCodes.OK) { - exit(status); - } + elasticsearchSettings(esSettings); INSTANCE = new Bootstrap(); - boolean foreground = !"false".equals(System.getProperty("es.foreground", System.getProperty("es-foreground"))); - - Environment environment = initialSettings(foreground); + Environment environment = initialSettings(daemonize, pathHome, pidFile); Settings settings = environment.settings(); LogConfigurator.configure(settings, true); checkForCustomConfFile(); @@ -249,7 +250,7 @@ final class Bootstrap { } try { - if (!foreground) { + if (daemonize) { Loggers.disableConsoleLogging(); closeSystOut(); } @@ -264,12 +265,12 @@ final class Bootstrap { INSTANCE.start(); - if (!foreground) { + if (daemonize) { closeSysError(); } } catch (Throwable e) { // disable console logging, so user does not see the exception twice (jvm will show it already) - if (foreground) { + if (!daemonize) { Loggers.disableConsoleLogging(); } ESLogger logger = Loggers.getLogger(Bootstrap.class); @@ -289,7 +290,7 @@ final class Bootstrap { logger.error("Exception", e); } // re-enable it if appropriate, so they can see any logging during the shutdown process - if (foreground) { + if (!daemonize) { Loggers.enableConsoleLogging(); } @@ -297,6 +298,13 @@ final class Bootstrap { } } + @SuppressForbidden(reason = "Sets system properties passed as CLI parameters") + private static void elasticsearchSettings(Map esSettings) { + for (Map.Entry esSetting : esSettings.entrySet()) { + System.setProperty(esSetting.getKey(), esSetting.getValue()); + } + } + @SuppressForbidden(reason = "System#out") private static void closeSystOut() { System.out.close(); diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCliParser.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCliParser.java deleted file mode 100644 index 5c927305f149..000000000000 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCliParser.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.bootstrap; - -import java.util.Arrays; - -import joptsimple.OptionSet; -import joptsimple.OptionSpec; -import org.elasticsearch.Build; -import org.elasticsearch.cli.Command; -import org.elasticsearch.cli.ExitCodes; -import org.elasticsearch.cli.UserError; -import org.elasticsearch.common.Strings; -import org.elasticsearch.cli.Terminal; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.monitor.jvm.JvmInfo; - -final class BootstrapCliParser extends Command { - - private final OptionSpec versionOption; - private final OptionSpec daemonizeOption; - private final OptionSpec pidfileOption; - private final OptionSpec propertyOption; - private boolean shouldRun = false; - - BootstrapCliParser() { - super("Starts elasticsearch"); - // TODO: in jopt-simple 5.0, make this mutually exclusive with all other options - versionOption = parser.acceptsAll(Arrays.asList("V", "version"), - "Prints elasticsearch version information and exits"); - daemonizeOption = parser.acceptsAll(Arrays.asList("d", "daemonize"), - "Starts Elasticsearch in the background"); - // TODO: in jopt-simple 5.0 this option type can be a Path - pidfileOption = parser.acceptsAll(Arrays.asList("p", "pidfile"), - "Creates a pid file in the specified path on start") - .withRequiredArg(); - propertyOption = parser.accepts("D", "Configures an Elasticsearch setting") - .withRequiredArg(); - } - - // TODO: don't use system properties as a way to do this, its horrible... - @SuppressForbidden(reason = "Sets system properties passed as CLI parameters") - @Override - protected void execute(Terminal terminal, OptionSet options) throws Exception { - if (options.has(versionOption)) { - terminal.println("Version: " + org.elasticsearch.Version.CURRENT - + ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date() - + ", JVM: " + JvmInfo.jvmInfo().version()); - return; - } - - // TODO: don't use sysprops for any of these! pass the args through to bootstrap... - if (options.has(daemonizeOption)) { - System.setProperty("es.foreground", "false"); - } - String pidFile = pidfileOption.value(options); - if (Strings.isNullOrEmpty(pidFile) == false) { - System.setProperty("es.pidfile", pidFile); - } - - for (String property : propertyOption.values(options)) { - String[] keyValue = property.split("=", 2); - if (keyValue.length != 2) { - throw new UserError(ExitCodes.USAGE, "Malformed elasticsearch setting, must be of the form key=value"); - } - String key = keyValue[0]; - if (key.startsWith("es.") == false) { - key = "es." + key; - } - System.setProperty(key, keyValue[1]); - } - shouldRun = true; - } - - boolean shouldRun() { - return shouldRun; - } -} diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 3b95c3f4a6ff..dfe49c52e98d 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -19,23 +19,98 @@ package org.elasticsearch.bootstrap; +import joptsimple.OptionSet; +import joptsimple.OptionSpec; +import joptsimple.util.KeyValuePair; +import org.elasticsearch.Build; +import org.elasticsearch.cli.Command; +import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.Terminal; +import org.elasticsearch.cli.UserError; +import org.elasticsearch.monitor.jvm.JvmInfo; + import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; /** * This class starts elasticsearch. */ -public final class Elasticsearch { +class Elasticsearch extends Command { + + private final OptionSpec versionOption; + private final OptionSpec daemonizeOption; + private final OptionSpec pathHomeOption; + private final OptionSpec pidfileOption; + private final OptionSpec propertyOption; /** no instantiation */ - private Elasticsearch() {} + Elasticsearch() { + super("starts elasticsearch"); + // TODO: in jopt-simple 5.0, make this mutually exclusive with all other options + versionOption = parser.acceptsAll(Arrays.asList("V", "version"), + "Prints elasticsearch version information and exits"); + daemonizeOption = parser.acceptsAll(Arrays.asList("d", "daemonize"), + "Starts Elasticsearch in the background"); + // TODO: in jopt-simple 5.0 this option type can be a Path + pathHomeOption = parser.acceptsAll(Arrays.asList("H", "path.home"), "").withRequiredArg(); + // TODO: in jopt-simple 5.0 this option type can be a Path + pidfileOption = parser.acceptsAll(Arrays.asList("p", "pidfile"), + "Creates a pid file in the specified path on start") + .withRequiredArg(); + propertyOption = parser.accepts("E", "Configure an Elasticsearch setting").withRequiredArg().ofType(KeyValuePair.class); + } /** * Main entry point for starting elasticsearch */ - public static void main(String[] args) throws Exception { + public static void main(final String[] args) throws Exception { + final Elasticsearch elasticsearch = new Elasticsearch(); + int status = main(args, elasticsearch, Terminal.DEFAULT); + if (status != ExitCodes.OK) { + exit(status); + } + } + + static int main(final String[] args, final Elasticsearch elasticsearch, final Terminal terminal) throws Exception { + return elasticsearch.main(args, terminal); + } + + @Override + protected void execute(Terminal terminal, OptionSet options) throws Exception { + if (options.has(versionOption)) { + if (options.has(daemonizeOption) || options.has(pathHomeOption) || options.has(pidfileOption)) { + throw new UserError(ExitCodes.USAGE, "Elasticsearch version option is mutually exclusive with any other option"); + } + terminal.println("Version: " + org.elasticsearch.Version.CURRENT + + ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date() + + ", JVM: " + JvmInfo.jvmInfo().version()); + return; + } + + final boolean daemonize = options.has(daemonizeOption); + final String pathHome = pathHomeOption.value(options); + final String pidFile = pidfileOption.value(options); + + final Map esSettings = new HashMap<>(); + for (final KeyValuePair kvp : propertyOption.values(options)) { + if (!kvp.key.startsWith("es.")) { + throw new UserError(ExitCodes.USAGE, "Elasticsearch settings must be prefixed with [es.] but was [" + kvp.key + "]"); + } + if (kvp.value.isEmpty()) { + throw new UserError(ExitCodes.USAGE, "Elasticsearch setting [" + kvp.key + "] must not be empty"); + } + esSettings.put(kvp.key, kvp.value); + } + + init(daemonize, pathHome, pidFile, esSettings); + } + + void init(final boolean daemonize, final String pathHome, final String pidFile, final Map esSettings) { try { - Bootstrap.init(args); - } catch (Throwable t) { + Bootstrap.init(daemonize, pathHome, pidFile, esSettings); + } catch (final Throwable t) { // format exceptions to the console in a special way // to avoid 2MB stacktraces from guice, etc. throw new StartupError(t); diff --git a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java index 28feca13c020..da628b09d2b2 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java +++ b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java @@ -110,9 +110,7 @@ public class LogConfigurator { if (resolveConfig) { resolveConfig(environment, settingsBuilder); } - settingsBuilder - .putProperties("elasticsearch.", BootstrapInfo.getSystemProperties()) - .putProperties("es.", BootstrapInfo.getSystemProperties()); + settingsBuilder.putProperties("es.", BootstrapInfo.getSystemProperties()); // add custom settings after config was added so that they are not overwritten by config settingsBuilder.put(settings); settingsBuilder.replacePropertyPlaceholders(); diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index aafaff3e9d71..e06e4ad893b2 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -1136,10 +1136,10 @@ public final class Settings implements ToXContent { * @param properties The properties to put * @return The builder */ - public Builder putProperties(String prefix, Dictionary properties) { - for (Object key1 : Collections.list(properties.keys())) { - String key = Objects.toString(key1); - String value = Objects.toString(properties.get(key)); + public Builder putProperties(String prefix, Dictionary properties) { + for (Object property : Collections.list(properties.keys())) { + String key = Objects.toString(property); + String value = Objects.toString(properties.get(property)); if (key.startsWith(prefix)) { map.put(key.substring(prefix.length()), value); } @@ -1154,19 +1154,12 @@ public final class Settings implements ToXContent { * @param properties The properties to put * @return The builder */ - public Builder putProperties(String prefix, Dictionary properties, String[] ignorePrefixes) { - for (Object key1 : Collections.list(properties.keys())) { - String key = Objects.toString(key1); - String value = Objects.toString(properties.get(key)); + public Builder putProperties(String prefix, Dictionary properties, String ignorePrefix) { + for (Object property : Collections.list(properties.keys())) { + String key = Objects.toString(property); + String value = Objects.toString(properties.get(property)); if (key.startsWith(prefix)) { - boolean ignore = false; - for (String ignorePrefix : ignorePrefixes) { - if (key.startsWith(ignorePrefix)) { - ignore = true; - break; - } - } - if (!ignore) { + if (!key.startsWith(ignorePrefix)) { map.put(key.substring(prefix.length()), value); } } diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java index b1ad3a3239fe..8864a70ccdce 100644 --- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java @@ -53,8 +53,8 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; public class InternalSettingsPreparer { private static final String[] ALLOWED_SUFFIXES = {".yml", ".yaml", ".json", ".properties"}; - static final String[] PROPERTY_PREFIXES = {"es.", "elasticsearch."}; - static final String[] PROPERTY_DEFAULTS_PREFIXES = {"es.default.", "elasticsearch.default."}; + static final String PROPERTY_PREFIX = "es."; + static final String PROPERTY_DEFAULTS_PREFIX = "es.default."; public static final String SECRET_PROMPT_VALUE = "${prompt.secret}"; public static final String TEXT_PROMPT_VALUE = "${prompt.text}"; @@ -126,13 +126,9 @@ public class InternalSettingsPreparer { output.put(input); if (useSystemProperties(input)) { if (loadDefaults) { - for (String prefix : PROPERTY_DEFAULTS_PREFIXES) { - output.putProperties(prefix, BootstrapInfo.getSystemProperties()); - } - } - for (String prefix : PROPERTY_PREFIXES) { - output.putProperties(prefix, BootstrapInfo.getSystemProperties(), PROPERTY_DEFAULTS_PREFIXES); + output.putProperties(PROPERTY_DEFAULTS_PREFIX, BootstrapInfo.getSystemProperties()); } + output.putProperties(PROPERTY_PREFIX, BootstrapInfo.getSystemProperties(), PROPERTY_DEFAULTS_PREFIX); } output.replacePropertyPlaceholders(); } diff --git a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCommandLineParsingTests.java b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCommandLineParsingTests.java new file mode 100644 index 000000000000..0d70cb8fba55 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCommandLineParsingTests.java @@ -0,0 +1,195 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.bootstrap; + +import org.elasticsearch.Build; +import org.elasticsearch.Version; +import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.MockTerminal; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.monitor.jvm.JvmInfo; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; +import org.junit.Before; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; + +public class ElasticsearchCommandLineParsingTests extends ESTestCase { + + public void testVersion() throws Exception { + runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "-d"); + runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "--daemonize"); + runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "-H", "/tmp/home"); + runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "--path.home", "/tmp/home"); + runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "-p", "/tmp/pid"); + runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "--pidfile", "/tmp/pid"); + runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "-d"); + runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "--daemonize"); + runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "-H", "/tmp/home"); + runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "--path.home", "/tmp/home"); + runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "-p", "/tmp/pid"); + runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "--pidfile", "/tmp/pid"); + runTestThatVersionIsReturned("-V"); + runTestThatVersionIsReturned("--version"); + } + + private void runTestThatVersionIsMutuallyExclusiveToOtherOptions(String... args) throws Exception { + runTestVersion( + ExitCodes.USAGE, + output -> assertThat( + output, + containsString("ERROR: Elasticsearch version option is mutually exclusive with any other option")), + args); + } + + private void runTestThatVersionIsReturned(String... args) throws Exception { + runTestVersion(ExitCodes.OK, output -> { + assertThat(output, containsString("Version: " + Version.CURRENT.toString())); + assertThat(output, containsString("Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date())); + assertThat(output, containsString("JVM: " + JvmInfo.jvmInfo().version())); + }, args); + } + + private void runTestVersion(int expectedStatus, Consumer outputConsumer, String... args) throws Exception { + runTest(expectedStatus, false, outputConsumer, (daemonize, pathHome, pidFile, esSettings) -> {}, args); + } + + public void testThatPidFileCanBeConfigured() throws Exception { + runPidFileTest(ExitCodes.USAGE, false, output -> assertThat(output, containsString("Option p/pidfile requires an argument")), "-p"); + runPidFileTest(ExitCodes.OK, true, output -> {}, "-p", "/tmp/pid"); + runPidFileTest(ExitCodes.OK, true, output -> {}, "--pidfile", "/tmp/pid"); + } + + private void runPidFileTest(final int expectedStatus, final boolean expectedInit, Consumer outputConsumer, final String... args) + throws Exception { + runTest( + expectedStatus, + expectedInit, + outputConsumer, + (daemonize, pathHome, pidFile, esSettings) -> assertThat(pidFile, equalTo("/tmp/pid")), + args); + } + + public void testThatParsingDaemonizeWorks() throws Exception { + runDaemonizeTest(true, "-d"); + runDaemonizeTest(true, "--daemonize"); + runDaemonizeTest(false); + } + + private void runDaemonizeTest(final boolean expectedDaemonize, final String... args) throws Exception { + runTest( + ExitCodes.OK, + true, + output -> {}, + (daemonize, pathHome, pidFile, esSettings) -> assertThat(daemonize, equalTo(expectedDaemonize)), + args); + } + + public void testElasticsearchSettings() throws Exception { + runTest( + ExitCodes.OK, + true, + output -> {}, + (daemonize, pathHome, pidFile, esSettings) -> { + assertThat(esSettings.size(), equalTo(2)); + assertThat(esSettings, hasEntry("es.foo", "bar")); + assertThat(esSettings, hasEntry("es.baz", "qux")); + }, + "-Ees.foo=bar", "-E", "es.baz=qux" + ); + } + + public void testElasticsearchSettingPrefix() throws Exception { + runElasticsearchSettingPrefixTest("-E", "foo"); + runElasticsearchSettingPrefixTest("-E", "foo=bar"); + runElasticsearchSettingPrefixTest("-E", "=bar"); + } + + private void runElasticsearchSettingPrefixTest(String... args) throws Exception { + runTest( + ExitCodes.USAGE, + false, + output -> assertThat(output, containsString("Elasticsearch settings must be prefixed with [es.] but was [")), + (daemonize, pathHome, pidFile, esSettings) -> {}, + args + ); + } + + public void testElasticsearchSettingCanNotBeEmpty() throws Exception { + runTest( + ExitCodes.USAGE, + false, + output -> assertThat(output, containsString("Elasticsearch setting [es.foo] must not be empty")), + (daemonize, pathHome, pidFile, esSettings) -> {}, + "-E", "es.foo=" + ); + } + + public void testUnknownOption() throws Exception { + runTest( + ExitCodes.USAGE, + false, + output -> assertThat(output, containsString("network.host is not a recognized option")), + (daemonize, pathHome, pidFile, esSettings) -> {}, + "--network.host"); + } + + private interface InitConsumer { + void accept(final boolean daemonize, final String pathHome, final String pidFile, final Map esSettings); + } + + private void runTest( + final int expectedStatus, + final boolean expectedInit, + final Consumer outputConsumer, + final InitConsumer initConsumer, + String... args) throws Exception { + final MockTerminal terminal = new MockTerminal(); + try { + final AtomicBoolean init = new AtomicBoolean(); + final int status = Elasticsearch.main(args, new Elasticsearch() { + @Override + void init(final boolean daemonize, final String pathHome, final String pidFile, final Map esSettings) { + init.set(true); + initConsumer.accept(daemonize, pathHome, pidFile, esSettings); + } + }, terminal); + assertThat(status, equalTo(expectedStatus)); + assertThat(init.get(), equalTo(expectedInit)); + outputConsumer.accept(terminal.getOutput()); + } catch (Throwable t) { + // if an unexpected exception is thrown, we log + // terminal output to aid debugging + logger.info(terminal.getOutput()); + // rethrow so the test fails + throw t; + } + } + +} diff --git a/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml b/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml index bd7a15f44341..515e4320fd24 100644 --- a/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml +++ b/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml @@ -1,4 +1,4 @@ -# you can override this using by setting a system property, for example -Des.logger.level=DEBUG +# you can override this using by setting a system property, for example -Ees.logger.level=DEBUG es.logger.level: INFO rootLogger: ${es.logger.level}, console logger: diff --git a/distribution/deb/src/main/packaging/init.d/elasticsearch b/distribution/deb/src/main/packaging/init.d/elasticsearch index 078e79a92d1b..e2d857a7ffe3 100755 --- a/distribution/deb/src/main/packaging/init.d/elasticsearch +++ b/distribution/deb/src/main/packaging/init.d/elasticsearch @@ -99,7 +99,7 @@ fi # Define other required variables PID_FILE="$PID_DIR/$NAME.pid" DAEMON=$ES_HOME/bin/elasticsearch -DAEMON_OPTS="-d -p $PID_FILE -D es.default.path.home=$ES_HOME -D es.default.path.logs=$LOG_DIR -D es.default.path.data=$DATA_DIR -D es.default.path.conf=$CONF_DIR" +DAEMON_OPTS="-d -p $PID_FILE -Ees.default.path.home=$ES_HOME -Ees.default.path.logs=$LOG_DIR -Ees.default.path.data=$DATA_DIR -Ees.default.path.conf=$CONF_DIR" export ES_HEAP_SIZE export ES_HEAP_NEWSIZE diff --git a/distribution/rpm/src/main/packaging/init.d/elasticsearch b/distribution/rpm/src/main/packaging/init.d/elasticsearch index 1132fca4f9e3..c68a5b65f3f4 100644 --- a/distribution/rpm/src/main/packaging/init.d/elasticsearch +++ b/distribution/rpm/src/main/packaging/init.d/elasticsearch @@ -117,7 +117,7 @@ start() { cd $ES_HOME echo -n $"Starting $prog: " # if not running, start it up here, usually something like "daemon $exec" - daemon --user $ES_USER --pidfile $pidfile $exec -p $pidfile -d -D es.default.path.home=$ES_HOME -D es.default.path.logs=$LOG_DIR -D es.default.path.data=$DATA_DIR -D es.default.path.conf=$CONF_DIR + daemon --user $ES_USER --pidfile $pidfile $exec -p $pidfile -d -Ees.default.path.home=$ES_HOME -Ees.default.path.logs=$LOG_DIR -Ees.default.path.data=$DATA_DIR -Ees.default.path.conf=$CONF_DIR retval=$? echo [ $retval -eq 0 ] && touch $lockfile diff --git a/distribution/src/main/packaging/systemd/elasticsearch.service b/distribution/src/main/packaging/systemd/elasticsearch.service index 301586c1038d..4f643f6a4a4a 100644 --- a/distribution/src/main/packaging/systemd/elasticsearch.service +++ b/distribution/src/main/packaging/systemd/elasticsearch.service @@ -20,11 +20,11 @@ Group=elasticsearch ExecStartPre=/usr/share/elasticsearch/bin/elasticsearch-systemd-pre-exec ExecStart=/usr/share/elasticsearch/bin/elasticsearch \ - -Des.pidfile=${PID_DIR}/elasticsearch.pid \ - -Des.default.path.home=${ES_HOME} \ - -Des.default.path.logs=${LOG_DIR} \ - -Des.default.path.data=${DATA_DIR} \ - -Des.default.path.conf=${CONF_DIR} + -Ees.pidfile=${PID_DIR}/elasticsearch.pid \ + -Ees.default.path.home=${ES_HOME} \ + -Ees.default.path.logs=${LOG_DIR} \ + -Ees.default.path.data=${DATA_DIR} \ + -Ees.default.path.conf=${CONF_DIR} StandardOutput=journal StandardError=inherit diff --git a/distribution/src/main/resources/bin/elasticsearch b/distribution/src/main/resources/bin/elasticsearch index b15105f1854c..0d0e0069ae2a 100755 --- a/distribution/src/main/resources/bin/elasticsearch +++ b/distribution/src/main/resources/bin/elasticsearch @@ -126,11 +126,11 @@ export HOSTNAME # manual parsing to find out, if process should be detached daemonized=`echo $* | egrep -- '(^-d |-d$| -d |--daemonize$|--daemonize )'` if [ -z "$daemonized" ] ; then - exec "$JAVA" $JAVA_OPTS $ES_JAVA_OPTS -Des.path.home="$ES_HOME" -cp "$ES_CLASSPATH" \ - org.elasticsearch.bootstrap.Elasticsearch start "$@" + exec "$JAVA" $JAVA_OPTS $ES_JAVA_OPTS -cp "$ES_CLASSPATH" \ + org.elasticsearch.bootstrap.Elasticsearch --path.home "$ES_HOME" "$@" else - exec "$JAVA" $JAVA_OPTS $ES_JAVA_OPTS -Des.path.home="$ES_HOME" -cp "$ES_CLASSPATH" \ - org.elasticsearch.bootstrap.Elasticsearch start "$@" <&- & + exec "$JAVA" $JAVA_OPTS $ES_JAVA_OPTS -cp "$ES_CLASSPATH" \ + org.elasticsearch.bootstrap.Elasticsearch --path.home "$ES_HOME" "$@" <&- & retval=$? pid=$! [ $retval -eq 0 ] || exit $retval diff --git a/distribution/src/main/resources/bin/elasticsearch-plugin.bat b/distribution/src/main/resources/bin/elasticsearch-plugin.bat index 6c6be019fc67..9ed797e6308c 100644 --- a/distribution/src/main/resources/bin/elasticsearch-plugin.bat +++ b/distribution/src/main/resources/bin/elasticsearch-plugin.bat @@ -48,7 +48,7 @@ GOTO loop SET HOSTNAME=%COMPUTERNAME% -"%JAVA_HOME%\bin\java" -client -Des.path.home="%ES_HOME%" !properties! -cp "%ES_HOME%/lib/*;" "org.elasticsearch.plugins.PluginCli" !args! +"%JAVA_HOME%\bin\java" -client -Ees.path.home="%ES_HOME%" !properties! -cp "%ES_HOME%/lib/*;" "org.elasticsearch.plugins.PluginCli" !args! goto finally diff --git a/distribution/src/main/resources/bin/elasticsearch.in.bat b/distribution/src/main/resources/bin/elasticsearch.in.bat index b909a4649527..80ed7894316b 100644 --- a/distribution/src/main/resources/bin/elasticsearch.in.bat +++ b/distribution/src/main/resources/bin/elasticsearch.in.bat @@ -104,4 +104,4 @@ ECHO additional elements via the plugin mechanism, or if code must really be 1>& ECHO added to the main classpath, add jars to lib\, unsupported 1>&2 EXIT /B 1 ) -set ES_PARAMS=-Delasticsearch -Des-foreground=yes -Des.path.home="%ES_HOME%" +set ES_PARAMS=-Delasticsearch -Ees.path.home="%ES_HOME%" diff --git a/distribution/src/main/resources/config/logging.yml b/distribution/src/main/resources/config/logging.yml index 939aa1eed0ef..187e79cffa07 100644 --- a/distribution/src/main/resources/config/logging.yml +++ b/distribution/src/main/resources/config/logging.yml @@ -1,4 +1,4 @@ -# you can override this using by setting a system property, for example -Des.logger.level=DEBUG +# you can override this using by setting a system property, for example -Ees.logger.level=DEBUG es.logger.level: INFO rootLogger: ${es.logger.level}, console, file logger: diff --git a/docs/plugins/plugin-script.asciidoc b/docs/plugins/plugin-script.asciidoc index d2e57b2efc80..fba4704ab972 100644 --- a/docs/plugins/plugin-script.asciidoc +++ b/docs/plugins/plugin-script.asciidoc @@ -167,7 +167,7 @@ can do this as follows: [source,sh] --------------------- -sudo bin/elasticsearch-plugin -Des.path.conf=/path/to/custom/config/dir install +sudo bin/elasticsearch-plugin -Ees.path.conf=/path/to/custom/config/dir install --------------------- You can also set the `CONF_DIR` environment variable to the custom config diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index 8ff832c673f4..47bcb3031ffd 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -163,7 +163,7 @@ As mentioned previously, we can override either the cluster or node name. This c [source,sh] -------------------------------------------------- -./elasticsearch --cluster.name my_cluster_name --node.name my_node_name +./elasticsearch -Ees.cluster.name=my_cluster_name -Ees.node.name=my_node_name -------------------------------------------------- Also note the line marked http with information about the HTTP address (`192.168.8.112`) and port (`9200`) that our node is reachable from. By default, Elasticsearch uses port `9200` to provide access to its REST API. This port is configurable if necessary. diff --git a/docs/reference/index-modules/allocation/filtering.asciidoc b/docs/reference/index-modules/allocation/filtering.asciidoc index 784fa1af24ca..44c9b1a712cf 100644 --- a/docs/reference/index-modules/allocation/filtering.asciidoc +++ b/docs/reference/index-modules/allocation/filtering.asciidoc @@ -14,7 +14,7 @@ attribute as follows: [source,sh] ------------------------ -bin/elasticsearch --node.rack rack1 --node.size big <1> +bin/elasticsearch -Ees.node.rack=rack1 -Ees.node.size=big <1> ------------------------ <1> These attribute settings can also be specified in the `elasticsearch.yml` config file. diff --git a/docs/reference/migration/migrate_5_0/settings.asciidoc b/docs/reference/migration/migrate_5_0/settings.asciidoc index 002d6cf05dfc..5d39f87773db 100644 --- a/docs/reference/migration/migrate_5_0/settings.asciidoc +++ b/docs/reference/migration/migrate_5_0/settings.asciidoc @@ -153,7 +153,7 @@ on startup if it is set too low. ==== Removed es.netty.gathering -Disabling Netty from using NIO gathering could be done via the escape +Disabling Netty from using NIO gathring could be done via the escape hatch of setting the system property "es.netty.gathering" to "false". Time has proven enabling gathering by default is a non-issue and this non-documented setting has been removed. @@ -172,3 +172,18 @@ Two cache concurrency level settings `indices.fielddata.cache.concurrency_level` because they no longer apply to the cache implementation used for the request cache and the field data cache. +==== Using system properties to configure Elasticsearch + +Elasticsearch can be configured by setting system properties on the +command line via `-Des.name.of.property=value.of.property`. This will be +removed in a future version of Elasticsearch. Instead, use +`-E es.name.of.setting=value.of.setting`. Note that in all cases the +name of the setting must be prefixed with `es.`. + +==== Removed using double-dashes to configure Elasticsearch + +Elasticsearch could previously be configured on the command line by +setting settings via `--name.of.setting value.of.setting`. This feature +has been removed. Instead, use +`-Ees.name.of.setting=value.of.setting`. Note that in all cases the +name of the setting must be prefixed with `es.`. diff --git a/docs/reference/modules/cluster/allocation_awareness.asciidoc b/docs/reference/modules/cluster/allocation_awareness.asciidoc index ee3cbc17f5fe..5735b52a1a8b 100644 --- a/docs/reference/modules/cluster/allocation_awareness.asciidoc +++ b/docs/reference/modules/cluster/allocation_awareness.asciidoc @@ -21,7 +21,7 @@ attribute called `rack_id` -- we could use any attribute name. For example: [source,sh] ---------------------- -./bin/elasticsearch --node.rack_id rack_one <1> +./bin/elasticsearch -Ees.node.rack_id=rack_one <1> ---------------------- <1> This setting could also be specified in the `elasticsearch.yml` config file. diff --git a/docs/reference/modules/node.asciidoc b/docs/reference/modules/node.asciidoc index 0117d193043e..f3da77876679 100644 --- a/docs/reference/modules/node.asciidoc +++ b/docs/reference/modules/node.asciidoc @@ -233,7 +233,7 @@ Like all node settings, it can also be specified on the command line as: [source,sh] ----------------------- -./bin/elasticsearch --path.data /var/elasticsearch/data +./bin/elasticsearch -Ees.path.data=/var/elasticsearch/data ----------------------- TIP: When using the `.zip` or `.tar.gz` distributions, the `path.data` setting diff --git a/docs/reference/setup.asciidoc b/docs/reference/setup.asciidoc index 15f23e6fe1e8..f0bf03985b08 100644 --- a/docs/reference/setup.asciidoc +++ b/docs/reference/setup.asciidoc @@ -67,13 +67,12 @@ There are added features when using the `elasticsearch` shell script. The first, which was explained earlier, is the ability to easily run the process either in the foreground or the background. -Another feature is the ability to pass `-D` or getopt long style -configuration parameters directly to the script. When set, all override -anything set using either `JAVA_OPTS` or `ES_JAVA_OPTS`. For example: +Another feature is the ability to pass `-E` configuration parameters +directly to the script. For example: [source,sh] -------------------------------------------------- -$ bin/elasticsearch -Des.index.refresh_interval=5s --node.name=my-node +$ bin/elasticsearch -Ees.index.refresh_interval=5s -Ees.node.name=my-node -------------------------------------------------- ************************************************************************* diff --git a/docs/reference/setup/configuration.asciidoc b/docs/reference/setup/configuration.asciidoc index bef563cd9655..1a687f15fb9a 100644 --- a/docs/reference/setup/configuration.asciidoc +++ b/docs/reference/setup/configuration.asciidoc @@ -259,7 +259,7 @@ command, for example: [source,sh] -------------------------------------------------- -$ elasticsearch -Des.network.host=10.0.0.4 +$ elasticsearch -Ees.network.host=10.0.0.4 -------------------------------------------------- Another option is to set `es.default.` prefix instead of `es.` prefix, @@ -336,7 +336,7 @@ course, the above can also be set as a "collapsed" setting, for example: [source,sh] -------------------------------------------------- -$ elasticsearch -Des.index.refresh_interval=5s +$ elasticsearch -Ees.index.refresh_interval=5s -------------------------------------------------- All of the index level configuration can be found within each diff --git a/docs/reference/setup/rolling_upgrade.asciidoc b/docs/reference/setup/rolling_upgrade.asciidoc index b3c00d337f8c..cb9073b558e6 100644 --- a/docs/reference/setup/rolling_upgrade.asciidoc +++ b/docs/reference/setup/rolling_upgrade.asciidoc @@ -80,7 +80,7 @@ To upgrade using a zip or compressed tarball: overwrite the `config` or `data` directories. * Either copy the files in the `config` directory from your old installation - to your new installation, or use the `--path.conf` option on the command + to your new installation, or use the `-E path.conf=` option on the command line to point to an external config directory. * Either copy the files in the `data` directory from your old installation diff --git a/modules/lang-groovy/build.gradle b/modules/lang-groovy/build.gradle index 2160210ba732..340dd620ca64 100644 --- a/modules/lang-groovy/build.gradle +++ b/modules/lang-groovy/build.gradle @@ -28,8 +28,8 @@ dependencies { integTest { cluster { - systemProperty 'es.script.inline', 'true' - systemProperty 'es.script.indexed', 'true' + esSetting 'es.script.inline', 'true' + esSetting 'es.script.indexed', 'true' } } diff --git a/modules/lang-mustache/build.gradle b/modules/lang-mustache/build.gradle index f41ffb901285..36b58792d86c 100644 --- a/modules/lang-mustache/build.gradle +++ b/modules/lang-mustache/build.gradle @@ -28,7 +28,7 @@ dependencies { integTest { cluster { - systemProperty 'es.script.inline', 'true' - systemProperty 'es.script.indexed', 'true' + esSetting 'es.script.inline', 'true' + esSetting 'es.script.indexed', 'true' } } diff --git a/plugins/lang-javascript/build.gradle b/plugins/lang-javascript/build.gradle index dae5204db207..41d858243189 100644 --- a/plugins/lang-javascript/build.gradle +++ b/plugins/lang-javascript/build.gradle @@ -28,7 +28,7 @@ dependencies { integTest { cluster { - systemProperty 'es.script.inline', 'true' - systemProperty 'es.script.indexed', 'true' + esSetting 'es.script.inline', 'true' + esSetting 'es.script.indexed', 'true' } } diff --git a/plugins/lang-python/build.gradle b/plugins/lang-python/build.gradle index 0980d7f62c9e..bc9db2a20c25 100644 --- a/plugins/lang-python/build.gradle +++ b/plugins/lang-python/build.gradle @@ -28,8 +28,8 @@ dependencies { integTest { cluster { - systemProperty 'es.script.inline', 'true' - systemProperty 'es.script.indexed', 'true' + esSetting 'es.script.inline', 'true' + esSetting 'es.script.indexed', 'true' } } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java deleted file mode 100644 index fc7504fc97f5..000000000000 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/BootstrapCliParserTests.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.bootstrap; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; - -import joptsimple.OptionException; -import org.elasticsearch.Build; -import org.elasticsearch.Version; -import org.elasticsearch.cli.Command; -import org.elasticsearch.cli.CommandTestCase; -import org.elasticsearch.cli.UserError; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.monitor.jvm.JvmInfo; -import org.junit.After; -import org.junit.Before; - -import static org.hamcrest.Matchers.is; - -@SuppressForbidden(reason = "modifies system properties intentionally") -public class BootstrapCliParserTests extends CommandTestCase { - - @Override - protected Command newCommand() { - return new BootstrapCliParser(); - } - - private List propertiesToClear = new ArrayList<>(); - private Map properties; - - @Before - public void before() { - this.properties = new HashMap<>(System.getProperties()); - } - - @After - public void clearProperties() { - for (String property : propertiesToClear) { - System.clearProperty(property); - } - propertiesToClear.clear(); - assertEquals("properties leaked", properties, new HashMap<>(System.getProperties())); - } - - void assertShouldRun(boolean shouldRun) { - BootstrapCliParser parser = (BootstrapCliParser)command; - assertEquals(shouldRun, parser.shouldRun()); - } - - public void testVersion() throws Exception { - String output = execute("-V"); - assertTrue(output, output.contains(Version.CURRENT.toString())); - assertTrue(output, output.contains(Build.CURRENT.shortHash())); - assertTrue(output, output.contains(Build.CURRENT.date())); - assertTrue(output, output.contains(JvmInfo.jvmInfo().version())); - assertShouldRun(false); - - terminal.reset(); - output = execute("--version"); - assertTrue(output, output.contains(Version.CURRENT.toString())); - assertTrue(output, output.contains(Build.CURRENT.shortHash())); - assertTrue(output, output.contains(Build.CURRENT.date())); - assertTrue(output, output.contains(JvmInfo.jvmInfo().version())); - assertShouldRun(false); - } - - public void testPidfile() throws Exception { - registerProperties("es.pidfile"); - - // missing argument - OptionException e = expectThrows(OptionException.class, () -> { - execute("-p"); - }); - assertEquals("Option p/pidfile requires an argument", e.getMessage()); - assertShouldRun(false); - - // good cases - terminal.reset(); - execute("--pidfile", "/tmp/pid"); - assertSystemProperty("es.pidfile", "/tmp/pid"); - assertShouldRun(true); - - System.clearProperty("es.pidfile"); - terminal.reset(); - execute("-p", "/tmp/pid"); - assertSystemProperty("es.pidfile", "/tmp/pid"); - assertShouldRun(true); - } - - public void testNoDaemonize() throws Exception { - registerProperties("es.foreground"); - - execute(); - assertSystemProperty("es.foreground", null); - assertShouldRun(true); - } - - public void testDaemonize() throws Exception { - registerProperties("es.foreground"); - - execute("-d"); - assertSystemProperty("es.foreground", "false"); - assertShouldRun(true); - - System.clearProperty("es.foreground"); - execute("--daemonize"); - assertSystemProperty("es.foreground", "false"); - assertShouldRun(true); - } - - public void testConfig() throws Exception { - registerProperties("es.foo", "es.spam"); - - execute("-Dfoo=bar", "-Dspam=eggs"); - assertSystemProperty("es.foo", "bar"); - assertSystemProperty("es.spam", "eggs"); - assertShouldRun(true); - } - - public void testConfigMalformed() throws Exception { - UserError e = expectThrows(UserError.class, () -> { - execute("-Dfoo"); - }); - assertTrue(e.getMessage(), e.getMessage().contains("Malformed elasticsearch setting")); - } - - public void testUnknownOption() throws Exception { - OptionException e = expectThrows(OptionException.class, () -> { - execute("--network.host"); - }); - assertTrue(e.getMessage(), e.getMessage().contains("network.host is not a recognized option")); - } - - private void registerProperties(String ... systemProperties) { - propertiesToClear.addAll(Arrays.asList(systemProperties)); - } - - private void assertSystemProperty(String name, String expectedValue) throws Exception { - String msg = String.format(Locale.ROOT, "Expected property %s to be %s, terminal output was %s", name, expectedValue, terminal.getOutput()); - assertThat(msg, System.getProperty(name), is(expectedValue)); - } -} diff --git a/qa/smoke-test-ingest-disabled/build.gradle b/qa/smoke-test-ingest-disabled/build.gradle index ca71697a7b42..f8ebd6317869 100644 --- a/qa/smoke-test-ingest-disabled/build.gradle +++ b/qa/smoke-test-ingest-disabled/build.gradle @@ -21,6 +21,6 @@ apply plugin: 'elasticsearch.rest-test' integTest { cluster { - systemProperty 'es.node.ingest', 'false' + esSetting 'es.node.ingest', 'false' } } diff --git a/qa/smoke-test-reindex-with-groovy/build.gradle b/qa/smoke-test-reindex-with-groovy/build.gradle index a42f5e708a27..749f5c1237c6 100644 --- a/qa/smoke-test-reindex-with-groovy/build.gradle +++ b/qa/smoke-test-reindex-with-groovy/build.gradle @@ -21,6 +21,6 @@ apply plugin: 'elasticsearch.rest-test' integTest { cluster { - systemProperty 'es.script.inline', 'true' + esSetting 'es.script.inline', 'true' } } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash index 11961e069211..852d03ea6f6b 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash @@ -303,7 +303,7 @@ run_elasticsearch_service() { # This line is attempting to emulate the on login behavior of /usr/share/upstart/sessions/jayatana.conf [ -f /usr/share/java/jayatanaag.jar ] && export JAVA_TOOL_OPTIONS="-javaagent:/usr/share/java/jayatanaag.jar" # And now we can start Elasticsearch normally, in the background (-d) and with a pidfile (-p). -$timeoutCommand/tmp/elasticsearch/bin/elasticsearch $background -p /tmp/elasticsearch/elasticsearch.pid -Des.path.conf=$CONF_DIR $commandLineArgs +$timeoutCommand/tmp/elasticsearch/bin/elasticsearch $background -p /tmp/elasticsearch/elasticsearch.pid -Ees.path.conf=$CONF_DIR $commandLineArgs BASH [ "$status" -eq "$expectedStatus" ] elif is_systemd; then diff --git a/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash index c81d850d94d5..e829141def01 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash @@ -102,7 +102,7 @@ fi echo "CONF_FILE=$CONF_FILE" >> /etc/sysconfig/elasticsearch; fi - run_elasticsearch_service 1 -Des.default.config="$CONF_FILE" + run_elasticsearch_service 1 -Ees.default.config="$CONF_FILE" # remove settings again otherwise cleaning up before next testrun will fail if is_dpkg ; then @@ -408,7 +408,7 @@ fi remove_jvm_example local relativePath=${1:-$(readlink -m jvm-example-*.zip)} - sudo -E -u $ESPLUGIN_COMMAND_USER "$ESHOME/bin/elasticsearch-plugin" install "file://$relativePath" -Des.logger.level=DEBUG > /tmp/plugin-cli-output + sudo -E -u $ESPLUGIN_COMMAND_USER "$ESHOME/bin/elasticsearch-plugin" install "file://$relativePath" -Ees.logger.level=DEBUG > /tmp/plugin-cli-output local loglines=$(cat /tmp/plugin-cli-output | wc -l) if [ "$GROUP" == "TAR PLUGINS" ]; then [ "$loglines" -gt "7" ] || { diff --git a/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java deleted file mode 100644 index 576ecf2d1ee2..000000000000 --- a/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.cli; - -import java.io.IOException; - -import org.elasticsearch.cli.MockTerminal; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.StreamsUtils; -import org.junit.After; -import org.junit.Before; - -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.isEmptyString; -import static org.hamcrest.Matchers.not; - -public abstract class CliToolTestCase extends ESTestCase { - - @Before - @SuppressForbidden(reason = "sets es.default.path.home during tests") - public void setPathHome() { - System.setProperty("es.default.path.home", createTempDir().toString()); - } - - @After - @SuppressForbidden(reason = "clears es.default.path.home during tests") - public void clearPathHome() { - System.clearProperty("es.default.path.home"); - } - - public static String[] args(String command) { - if (!Strings.hasLength(command)) { - return Strings.EMPTY_ARRAY; - } - return command.split("\\s+"); - } - - public static void assertTerminalOutputContainsHelpFile(MockTerminal terminal, String classPath) throws IOException { - String output = terminal.getOutput(); - assertThat(output, not(isEmptyString())); - String expectedDocs = StreamsUtils.copyToStringFromClasspath(classPath); - // convert to *nix newlines as MockTerminal used for tests also uses *nix newlines - expectedDocs = expectedDocs.replace("\r\n", "\n"); - assertThat(output, containsString(expectedDocs)); - } -} From 5596e310684c2e540d932c91690cf73a3b083465 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 11 Mar 2016 17:21:36 +0100 Subject: [PATCH 208/320] Upgrade to lucene-6.0.0-f0aa4fc. #17075 --- buildSrc/version.properties | 2 +- .../lucene/queryparser/classic/MapperQueryParser.java | 5 +++-- .../search/vectorhighlight/CustomFieldQuery.java | 11 +++++------ .../common/lucene/search/MultiPhrasePrefixQuery.java | 4 ++-- .../org/elasticsearch/index/search/MatchQuery.java | 7 +++---- .../org/elasticsearch/bootstrap/security.policy | 2 +- .../org/elasticsearch/bootstrap/test-framework.policy | 2 +- .../fieldcomparator/ReplaceMissingTests.java | 2 +- .../search/child/ChildQuerySearchIT.java | 6 +++--- ...e-analyzers-common-6.0.0-snapshot-bea235f.jar.sha1 | 1 - ...e-analyzers-common-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + ...ne-backward-codecs-6.0.0-snapshot-bea235f.jar.sha1 | 1 - ...ne-backward-codecs-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + .../lucene-core-6.0.0-snapshot-bea235f.jar.sha1 | 1 - .../lucene-core-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + .../lucene-grouping-6.0.0-snapshot-bea235f.jar.sha1 | 1 - .../lucene-grouping-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + ...lucene-highlighter-6.0.0-snapshot-bea235f.jar.sha1 | 1 - ...lucene-highlighter-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + .../lucene-join-6.0.0-snapshot-bea235f.jar.sha1 | 1 - .../lucene-join-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + .../lucene-memory-6.0.0-snapshot-bea235f.jar.sha1 | 1 - .../lucene-memory-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + .../lucene-misc-6.0.0-snapshot-bea235f.jar.sha1 | 1 - .../lucene-misc-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + .../lucene-queries-6.0.0-snapshot-bea235f.jar.sha1 | 1 - .../lucene-queries-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + ...lucene-queryparser-6.0.0-snapshot-bea235f.jar.sha1 | 1 - ...lucene-queryparser-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + .../lucene-sandbox-6.0.0-snapshot-bea235f.jar.sha1 | 1 - .../lucene-sandbox-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + .../lucene-spatial-6.0.0-snapshot-bea235f.jar.sha1 | 1 - .../lucene-spatial-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + ...ene-spatial-extras-6.0.0-snapshot-bea235f.jar.sha1 | 1 - ...ene-spatial-extras-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + .../lucene-spatial3d-6.0.0-snapshot-bea235f.jar.sha1 | 1 - .../lucene-spatial3d-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + .../lucene-suggest-6.0.0-snapshot-bea235f.jar.sha1 | 1 - .../lucene-suggest-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + ...lucene-expressions-6.0.0-snapshot-bea235f.jar.sha1 | 1 - ...lucene-expressions-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + ...cene-analyzers-icu-6.0.0-snapshot-bea235f.jar.sha1 | 1 - ...cene-analyzers-icu-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + ...analyzers-kuromoji-6.0.0-snapshot-bea235f.jar.sha1 | 1 - ...analyzers-kuromoji-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + ...analyzers-phonetic-6.0.0-snapshot-bea235f.jar.sha1 | 1 - ...analyzers-phonetic-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + ...-analyzers-smartcn-6.0.0-snapshot-bea235f.jar.sha1 | 1 - ...-analyzers-smartcn-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + ...-analyzers-stempel-6.0.0-snapshot-bea235f.jar.sha1 | 1 - ...-analyzers-stempel-6.0.0-snapshot-f0aa4fc.jar.sha1 | 1 + 51 files changed, 41 insertions(+), 42 deletions(-) delete mode 100644 distribution/licenses/lucene-analyzers-common-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 distribution/licenses/lucene-analyzers-common-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 distribution/licenses/lucene-backward-codecs-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 distribution/licenses/lucene-backward-codecs-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 distribution/licenses/lucene-core-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 distribution/licenses/lucene-core-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 distribution/licenses/lucene-grouping-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 distribution/licenses/lucene-grouping-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 distribution/licenses/lucene-highlighter-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 distribution/licenses/lucene-highlighter-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 distribution/licenses/lucene-join-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 distribution/licenses/lucene-join-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 distribution/licenses/lucene-memory-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 distribution/licenses/lucene-memory-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 distribution/licenses/lucene-misc-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 distribution/licenses/lucene-misc-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 distribution/licenses/lucene-queries-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 distribution/licenses/lucene-queries-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 distribution/licenses/lucene-queryparser-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 distribution/licenses/lucene-queryparser-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 distribution/licenses/lucene-sandbox-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 distribution/licenses/lucene-sandbox-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 distribution/licenses/lucene-spatial-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 distribution/licenses/lucene-spatial-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 distribution/licenses/lucene-spatial-extras-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 distribution/licenses/lucene-spatial-extras-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 distribution/licenses/lucene-spatial3d-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 distribution/licenses/lucene-spatial3d-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 distribution/licenses/lucene-suggest-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 distribution/licenses/lucene-suggest-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 modules/lang-expression/licenses/lucene-expressions-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 modules/lang-expression/licenses/lucene-expressions-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.0-snapshot-f0aa4fc.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.0-snapshot-bea235f.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.0-snapshot-f0aa4fc.jar.sha1 diff --git a/buildSrc/version.properties b/buildSrc/version.properties index f75d5a936bb7..39c32192052b 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 5.0.0 -lucene = 6.0.0-snapshot-bea235f +lucene = 6.0.0-snapshot-f0aa4fc # optional dependencies spatial4j = 0.6 diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java index a7c53a56bc41..6ddd7591caab 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java @@ -787,8 +787,9 @@ public class MapperQueryParser extends QueryParser { assert q instanceof BoostQuery == false; return pq; } else if (q instanceof MultiPhraseQuery) { - ((MultiPhraseQuery) q).setSlop(slop); - return q; + MultiPhraseQuery.Builder builder = new MultiPhraseQuery.Builder((MultiPhraseQuery) q); + builder.setSlop(slop); + return builder.build(); } else { return q; } diff --git a/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java b/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java index 089b649cefef..3c0bda973471 100644 --- a/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java +++ b/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java @@ -34,7 +34,6 @@ import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import java.io.IOException; import java.util.Collection; -import java.util.List; /** * @@ -68,7 +67,7 @@ public class CustomFieldQuery extends FieldQuery { flatten(((FiltersFunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries, boost); } else if (sourceQuery instanceof MultiPhraseQuery) { MultiPhraseQuery q = ((MultiPhraseQuery) sourceQuery); - convertMultiPhraseQuery(0, new int[q.getTermArrays().size()], q, q.getTermArrays(), q.getPositions(), reader, flatQueries); + convertMultiPhraseQuery(0, new int[q.getTermArrays().length], q, q.getTermArrays(), q.getPositions(), reader, flatQueries); } else if (sourceQuery instanceof BlendedTermQuery) { final BlendedTermQuery blendedTermQuery = (BlendedTermQuery) sourceQuery; flatten(blendedTermQuery.rewrite(reader), reader, flatQueries, boost); @@ -77,7 +76,7 @@ public class CustomFieldQuery extends FieldQuery { } } - private void convertMultiPhraseQuery(int currentPos, int[] termsIdx, MultiPhraseQuery orig, List terms, int[] pos, IndexReader reader, Collection flatQueries) throws IOException { + private void convertMultiPhraseQuery(int currentPos, int[] termsIdx, MultiPhraseQuery orig, Term[][] terms, int[] pos, IndexReader reader, Collection flatQueries) throws IOException { if (currentPos == 0) { // if we have more than 16 terms int numTerms = 0; @@ -97,16 +96,16 @@ public class CustomFieldQuery extends FieldQuery { * we walk all possible ways and for each path down the MPQ we create a PhraseQuery this is what FieldQuery supports. * It seems expensive but most queries will pretty small. */ - if (currentPos == terms.size()) { + if (currentPos == terms.length) { PhraseQuery.Builder queryBuilder = new PhraseQuery.Builder(); queryBuilder.setSlop(orig.getSlop()); for (int i = 0; i < termsIdx.length; i++) { - queryBuilder.add(terms.get(i)[termsIdx[i]], pos[i]); + queryBuilder.add(terms[i][termsIdx[i]], pos[i]); } Query query = queryBuilder.build(); this.flatten(query, reader, flatQueries, 1F); } else { - Term[] t = terms.get(currentPos); + Term[] t = terms[currentPos]; for (int i = 0; i < t.length; i++) { termsIdx[currentPos] = i; convertMultiPhraseQuery(currentPos+1, termsIdx, orig, terms, pos, reader, flatQueries); diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java index 754d76fed271..52de9a7e5db7 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java @@ -134,7 +134,7 @@ public class MultiPhrasePrefixQuery extends Query { if (termArrays.isEmpty()) { return new MatchNoDocsQuery(); } - MultiPhraseQuery query = new MultiPhraseQuery(); + MultiPhraseQuery.Builder query = new MultiPhraseQuery.Builder(); query.setSlop(slop); int sizeMinus1 = termArrays.size() - 1; for (int i = 0; i < sizeMinus1; i++) { @@ -153,7 +153,7 @@ public class MultiPhrasePrefixQuery extends Query { return Queries.newMatchNoDocsQuery(); } query.add(terms.toArray(Term.class), position); - return query.rewrite(reader); + return query.build(); } private void getPrefixTerms(ObjectHashSet terms, final Term prefix, final IndexReader reader) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java b/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java index 979bfba605f6..9cd587704cbb 100644 --- a/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java @@ -45,7 +45,6 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; -import java.util.List; public class MatchQuery { @@ -336,10 +335,10 @@ public class MatchQuery { return prefixQuery; } else if (query instanceof MultiPhraseQuery) { MultiPhraseQuery pq = (MultiPhraseQuery)query; - List terms = pq.getTermArrays(); + Term[][] terms = pq.getTermArrays(); int[] positions = pq.getPositions(); - for (int i = 0; i < terms.size(); i++) { - prefixQuery.add(terms.get(i), positions[i]); + for (int i = 0; i < terms.length; i++) { + prefixQuery.add(terms[i], positions[i]); } return prefixQuery; } else if (query instanceof TermQuery) { diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 4909959015b0..3e8bdbb0ad42 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.0.jar}" { //// Very special jar permissions: //// These are dangerous permissions that we don't want to grant to everything. -grant codeBase "${codebase.lucene-core-6.0.0-snapshot-bea235f.jar}" { +grant codeBase "${codebase.lucene-core-6.0.0-snapshot-f0aa4fc.jar}" { // needed to allow MMapDirectory's "unmap hack" (die unmap hack, die) // java 8 package permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index fafa57118c25..8d56bc44b9ad 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -31,7 +31,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" { permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; -grant codeBase "${codebase.lucene-test-framework-6.0.0-snapshot-bea235f.jar}" { +grant codeBase "${codebase.lucene-test-framework-6.0.0-snapshot-f0aa4fc.jar}" { // needed by RamUsageTester permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/fieldcomparator/ReplaceMissingTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/fieldcomparator/ReplaceMissingTests.java index 63b66f47d1a2..a291311c3bc7 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/fieldcomparator/ReplaceMissingTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/fieldcomparator/ReplaceMissingTests.java @@ -52,7 +52,7 @@ public class ReplaceMissingTests extends ESTestCase { iw.close(); DirectoryReader reader = DirectoryReader.open(dir); - LeafReader ar = getOnlySegmentReader(reader); + LeafReader ar = getOnlyLeafReader(reader); SortedDocValues raw = ar.getSortedDocValues("field"); assertEquals(2, raw.getValueCount()); diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index 9440a3e91c1e..8aa4e017da95 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -576,20 +576,20 @@ public class ChildQuerySearchIT extends ESIntegTestCase { .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode(ScoreMode.Max)) .get(); assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("Score based on join value p1")); + assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), containsString("join value p1")); searchResponse = client().prepareSearch("test") .setExplain(true) .setQuery(hasParentQuery("parent", termQuery("p_field", "1")).score(true)) .get(); assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("Score based on join value p1")); + assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), containsString("join value p1")); ExplainResponse explainResponse = client().prepareExplain("test", "parent", parentId) .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode(ScoreMode.Max)) .get(); assertThat(explainResponse.isExists(), equalTo(true)); - assertThat(explainResponse.getExplanation().getDetails()[0].getDescription(), equalTo("Score based on join value p1")); + assertThat(explainResponse.getExplanation().getDetails()[0].getDescription(), containsString("join value p1")); } List createDocBuilders() { diff --git a/distribution/licenses/lucene-analyzers-common-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-analyzers-common-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index 74d21bae9460..000000000000 --- a/distribution/licenses/lucene-analyzers-common-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3510af19947deadd929123aaf14d69b4bdec759a \ No newline at end of file diff --git a/distribution/licenses/lucene-analyzers-common-6.0.0-snapshot-f0aa4fc.jar.sha1 b/distribution/licenses/lucene-analyzers-common-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..2ed6eb6ef561 --- /dev/null +++ b/distribution/licenses/lucene-analyzers-common-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +cd2388adc4b33c7530bbb8cd386e5c8c5c8e6aca \ No newline at end of file diff --git a/distribution/licenses/lucene-backward-codecs-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-backward-codecs-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index ee6143bec147..000000000000 --- a/distribution/licenses/lucene-backward-codecs-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -247ad7c17cb7c742d7a9abd5d9980e4fab815178 \ No newline at end of file diff --git a/distribution/licenses/lucene-backward-codecs-6.0.0-snapshot-f0aa4fc.jar.sha1 b/distribution/licenses/lucene-backward-codecs-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..28cdb1db9b1c --- /dev/null +++ b/distribution/licenses/lucene-backward-codecs-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +f5bbdd01b98fab7c18b46e762de3e39221b0c8fc \ No newline at end of file diff --git a/distribution/licenses/lucene-core-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-core-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index 2d39f84d21ec..000000000000 --- a/distribution/licenses/lucene-core-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c0712dbec58abad545646edab67d58f7373f5329 \ No newline at end of file diff --git a/distribution/licenses/lucene-core-6.0.0-snapshot-f0aa4fc.jar.sha1 b/distribution/licenses/lucene-core-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..c304106975bc --- /dev/null +++ b/distribution/licenses/lucene-core-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +18ad74518b34af7cfbd6c1e3a408920ff7665501 \ No newline at end of file diff --git a/distribution/licenses/lucene-grouping-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-grouping-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index a3ce82c8a04f..000000000000 --- a/distribution/licenses/lucene-grouping-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7573e3efb12dd16fdc991edaf408877dab20c030 \ No newline at end of file diff --git a/distribution/licenses/lucene-grouping-6.0.0-snapshot-f0aa4fc.jar.sha1 b/distribution/licenses/lucene-grouping-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..a95cc29cc7de --- /dev/null +++ b/distribution/licenses/lucene-grouping-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +dc0b211e31b8f1e0ee3a9e8f9c71b13fa088dabf \ No newline at end of file diff --git a/distribution/licenses/lucene-highlighter-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-highlighter-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index 9259a2c66c18..000000000000 --- a/distribution/licenses/lucene-highlighter-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -96ef0a9a43a5fc99d27bb7e7d61517ee4c7e54a4 \ No newline at end of file diff --git a/distribution/licenses/lucene-highlighter-6.0.0-snapshot-f0aa4fc.jar.sha1 b/distribution/licenses/lucene-highlighter-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..8f57bb026394 --- /dev/null +++ b/distribution/licenses/lucene-highlighter-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +bbd503396c08546f1b9e023e77dbf25bbb052d1c \ No newline at end of file diff --git a/distribution/licenses/lucene-join-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-join-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index 4959f5f163c7..000000000000 --- a/distribution/licenses/lucene-join-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d93de34947d37e31a337cdfed400333588c378d8 \ No newline at end of file diff --git a/distribution/licenses/lucene-join-6.0.0-snapshot-f0aa4fc.jar.sha1 b/distribution/licenses/lucene-join-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..835bac492338 --- /dev/null +++ b/distribution/licenses/lucene-join-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +96fd93d4a4192c42b0d56198b73a25440d4db2f7 \ No newline at end of file diff --git a/distribution/licenses/lucene-memory-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-memory-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index 5218d0a019eb..000000000000 --- a/distribution/licenses/lucene-memory-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9c292930b1828e68f06509944a5346c141d56fd4 \ No newline at end of file diff --git a/distribution/licenses/lucene-memory-6.0.0-snapshot-f0aa4fc.jar.sha1 b/distribution/licenses/lucene-memory-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..1e392d3e246d --- /dev/null +++ b/distribution/licenses/lucene-memory-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +ddd44a319d201ff73cd25be139bd3175226ab5a5 \ No newline at end of file diff --git a/distribution/licenses/lucene-misc-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-misc-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index 947722edfd33..000000000000 --- a/distribution/licenses/lucene-misc-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -866ed93f48683e877ffa4d9baa1323dcffbc65d7 \ No newline at end of file diff --git a/distribution/licenses/lucene-misc-6.0.0-snapshot-f0aa4fc.jar.sha1 b/distribution/licenses/lucene-misc-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..a21aaef33f5f --- /dev/null +++ b/distribution/licenses/lucene-misc-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +07d943ecdc552632bdca8f2772fd081a02cbf589 \ No newline at end of file diff --git a/distribution/licenses/lucene-queries-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-queries-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index 6caf86a6b968..000000000000 --- a/distribution/licenses/lucene-queries-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -967d9c2647bdd4d88961747f7436a5a92aa0385b \ No newline at end of file diff --git a/distribution/licenses/lucene-queries-6.0.0-snapshot-f0aa4fc.jar.sha1 b/distribution/licenses/lucene-queries-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..57fb022de53c --- /dev/null +++ b/distribution/licenses/lucene-queries-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +66c72fd979f54480af75d01719ef25da62c0a8b6 \ No newline at end of file diff --git a/distribution/licenses/lucene-queryparser-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-queryparser-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index b3e92d3f168b..000000000000 --- a/distribution/licenses/lucene-queryparser-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -981030d83a7504267f3141d7365fad9b46d51465 \ No newline at end of file diff --git a/distribution/licenses/lucene-queryparser-6.0.0-snapshot-f0aa4fc.jar.sha1 b/distribution/licenses/lucene-queryparser-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..5c311c4bd9b3 --- /dev/null +++ b/distribution/licenses/lucene-queryparser-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +8992204f922fe52af557e691cbfb4c54f92b76bd \ No newline at end of file diff --git a/distribution/licenses/lucene-sandbox-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-sandbox-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index 7b5176c4963b..000000000000 --- a/distribution/licenses/lucene-sandbox-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -707691b1baf22c29020569f5b875d200a4955411 \ No newline at end of file diff --git a/distribution/licenses/lucene-sandbox-6.0.0-snapshot-f0aa4fc.jar.sha1 b/distribution/licenses/lucene-sandbox-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..20f0037ea31f --- /dev/null +++ b/distribution/licenses/lucene-sandbox-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +8565264e00bc43e1226ff0d2e986dbb26d353ce2 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-spatial-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index 9df2a16b886f..000000000000 --- a/distribution/licenses/lucene-spatial-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -be9e78130a069983f611f484d5b7b87bda0d6370 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-6.0.0-snapshot-f0aa4fc.jar.sha1 b/distribution/licenses/lucene-spatial-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..6a9098579450 --- /dev/null +++ b/distribution/licenses/lucene-spatial-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +98fc1bb7e005f33c388be66486341ad8168b72eb \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-extras-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-spatial-extras-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index 6badc36d3619..000000000000 --- a/distribution/licenses/lucene-spatial-extras-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -edeef6ce8a58d5e6a074bebf545918d04e8579e1 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-extras-6.0.0-snapshot-f0aa4fc.jar.sha1 b/distribution/licenses/lucene-spatial-extras-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..b741ccd62a78 --- /dev/null +++ b/distribution/licenses/lucene-spatial-extras-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +b5b651b0adbc2f404e091817282dabd7b432c677 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial3d-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-spatial3d-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index 480ae590aedc..000000000000 --- a/distribution/licenses/lucene-spatial3d-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d86a7ba859576bdcee1dacd8f407ccf71f982c60 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial3d-6.0.0-snapshot-f0aa4fc.jar.sha1 b/distribution/licenses/lucene-spatial3d-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..512e4b7b5926 --- /dev/null +++ b/distribution/licenses/lucene-spatial3d-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +334e194bf83c75f0ae165e3e72b6fa35c5d636c5 \ No newline at end of file diff --git a/distribution/licenses/lucene-suggest-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-suggest-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index 7835298c4a28..000000000000 --- a/distribution/licenses/lucene-suggest-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a3860de6502576f142dc948eb2005fa4dc0c27c5 \ No newline at end of file diff --git a/distribution/licenses/lucene-suggest-6.0.0-snapshot-f0aa4fc.jar.sha1 b/distribution/licenses/lucene-suggest-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..3d2cf156d40e --- /dev/null +++ b/distribution/licenses/lucene-suggest-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +89c46e9601cf8fb9acf77398838f8710c9e44053 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-6.0.0-snapshot-bea235f.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index d9a29f17c503..000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8d11bf581b0afc25f87a57c06834cd85930d2ffa \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-6.0.0-snapshot-f0aa4fc.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..5237907f2243 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +f36f8010c9fec7342d34bece819c13de5f241135 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index 538d2ad8216a..000000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -38fda9b86e4f68eb6c9d31fb636a2540da219927 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.0-snapshot-f0aa4fc.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..f2e307d5d98b --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +1378905632ff45a9887b267c4b30f7adef415ca4 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index b90115da4abd..000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -352fea7a169ada6a7ae18e4ec34559496e09b465 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.0-snapshot-f0aa4fc.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..7bf3eb5333d2 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +49acd38e206d9c2fe28269fcba9b752d3b605e0e \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index 7cbe648e0bd8..000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -445f5ea7822d0dd6b91364ec119cd6cb4635d285 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.0-snapshot-f0aa4fc.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..8f08fe269809 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +7c11723d7d4dc3b1c9bf80089cfc2de7bc8a2b6e \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index 03c96786de2a..000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0b216b7b9ff583bc1382edc8adfee4d4acd02859 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.0-snapshot-f0aa4fc.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..bf5e5da8dcff --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +654d961bd4975a3cb13388d86d72fefb6994f659 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.0-snapshot-bea235f.jar.sha1 deleted file mode 100644 index f27a98f63bac..000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.0-snapshot-bea235f.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8d161a8c7e5b5b82f64dc5df2ca46197a3716672 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.0-snapshot-f0aa4fc.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.0-snapshot-f0aa4fc.jar.sha1 new file mode 100644 index 000000000000..ed0dc51b97c6 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.0-snapshot-f0aa4fc.jar.sha1 @@ -0,0 +1 @@ +0f408ac498782617a0f80d6a295d82f6d3609499 \ No newline at end of file From c50c5a52d519dc7626de8572da0d00b4644a2ee7 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 26 Feb 2016 16:25:05 +0100 Subject: [PATCH 209/320] Rework norms parameters for 5.0. #16987 Changes: - no more option to configure eager/lazy loading of the norms (useless now that orms are disk-based) - only the `string`, `text` and `keyword` fields support the `norms` setting - the `norms` setting takes a boolean that decides whether norms should be stored in the index but old options are still supported to give users time to upgrade - setting a `boost` no longer implicitely enables norms (for new indices only, this is still needed for old indices) --- .../org/elasticsearch/index/IndexWarmer.java | 64 ------------------ .../index/mapper/FieldMapper.java | 19 ++---- .../index/mapper/MappedFieldType.java | 19 +----- .../index/mapper/core/KeywordFieldMapper.java | 11 +-- .../index/mapper/core/NumberFieldMapper.java | 2 +- .../index/mapper/core/StringFieldMapper.java | 21 +++++- .../index/mapper/core/TypeParsers.java | 64 ++++++++++++------ .../index/mapper/internal/AllFieldMapper.java | 2 +- .../index/mapper/FieldTypeTestCase.java | 7 -- .../mapper/all/SimpleAllMapperTests.java | 6 +- .../mapper/boost/CustomBoostMappingTests.java | 47 ++++++++++--- .../mapper/boost/FieldLevelBoostTests.java | 14 ++-- .../mapper/core/KeywordFieldMapperTests.java | 58 ++++++++++++++++ .../core/StringMappingUpgradeTests.java | 12 +++- .../mapper/core/TextFieldMapperTests.java | 5 +- .../mapper/numeric/SimpleNumericTests.java | 16 +++++ .../string/SimpleStringMappingTests.java | 2 +- .../update/UpdateMappingOnClusterIT.java | 2 +- .../mapping/UpdateMappingIntegrationIT.java | 6 +- .../search/child/ChildQuerySearchIT.java | 1 - .../search/query/MultiMatchQueryIT.java | 4 +- .../search/query/SearchQueryIT.java | 2 +- .../resources/indices/bwc/index-2.2.0.zip | Bin 96001 -> 72250 bytes .../test/resources/indices/bwc/repo-2.2.0.zip | Bin 94087 -> 70304 bytes .../index/mapper/all/mapping.json | 2 +- .../update/all_mapping_create_index.json | 2 +- .../all_mapping_update_with_conflicts.json | 2 +- dev-tools/create_bwc_index.py | 19 ++++++ 28 files changed, 236 insertions(+), 173 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/IndexWarmer.java b/core/src/main/java/org/elasticsearch/index/IndexWarmer.java index ed1814681ac6..332fcdd380eb 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexWarmer.java +++ b/core/src/main/java/org/elasticsearch/index/IndexWarmer.java @@ -19,12 +19,7 @@ package org.elasticsearch.index; -import com.carrotsearch.hppc.ObjectHashSet; -import com.carrotsearch.hppc.ObjectSet; -import com.carrotsearch.hppc.cursors.ObjectCursor; -import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.NumericDocValues; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -64,7 +59,6 @@ public final class IndexWarmer extends AbstractComponent { super(settings); ArrayList list = new ArrayList<>(); final Executor executor = threadPool.executor(ThreadPool.Names.WARMER); - list.add(new NormsWarmer(executor)); list.add(new FieldDataWarmer(executor)); for (Listener listener : listeners) { list.add(listener); @@ -138,64 +132,6 @@ public final class IndexWarmer extends AbstractComponent { TerminationHandle warmTopReader(IndexShard indexShard, Engine.Searcher searcher); } - private static class NormsWarmer implements IndexWarmer.Listener { - private final Executor executor; - public NormsWarmer(Executor executor) { - this.executor = executor; - } - @Override - public TerminationHandle warmNewReaders(final IndexShard indexShard, final Engine.Searcher searcher) { - final MappedFieldType.Loading defaultLoading = indexShard.indexSettings().getValue(INDEX_NORMS_LOADING_SETTING); - final MapperService mapperService = indexShard.mapperService(); - final ObjectSet warmUp = new ObjectHashSet<>(); - for (DocumentMapper docMapper : mapperService.docMappers(false)) { - for (FieldMapper fieldMapper : docMapper.mappers()) { - final String indexName = fieldMapper.fieldType().name(); - MappedFieldType.Loading normsLoading = fieldMapper.fieldType().normsLoading(); - if (normsLoading == null) { - normsLoading = defaultLoading; - } - if (fieldMapper.fieldType().indexOptions() != IndexOptions.NONE && !fieldMapper.fieldType().omitNorms() - && normsLoading == MappedFieldType.Loading.EAGER) { - warmUp.add(indexName); - } - } - } - - final CountDownLatch latch = new CountDownLatch(1); - // Norms loading may be I/O intensive but is not CPU intensive, so we execute it in a single task - executor.execute(() -> { - try { - for (ObjectCursor stringObjectCursor : warmUp) { - final String indexName = stringObjectCursor.value; - final long start = System.nanoTime(); - for (final LeafReaderContext ctx : searcher.reader().leaves()) { - final NumericDocValues values = ctx.reader().getNormValues(indexName); - if (values != null) { - values.get(0); - } - } - if (indexShard.warmerService().logger().isTraceEnabled()) { - indexShard.warmerService().logger().trace("warmed norms for [{}], took [{}]", indexName, - TimeValue.timeValueNanos(System.nanoTime() - start)); - } - } - } catch (Throwable t) { - indexShard.warmerService().logger().warn("failed to warm-up norms", t); - } finally { - latch.countDown(); - } - }); - - return () -> latch.await(); - } - - @Override - public TerminationHandle warmTopReader(IndexShard indexShard, final Engine.Searcher searcher) { - return TerminationHandle.NO_WAIT; - } - } - private static class FieldDataWarmer implements IndexWarmer.Listener { private final Executor executor; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index b8b7f4bb1130..20522abfbacb 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -203,11 +203,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable { return builder; } - public T normsLoading(MappedFieldType.Loading normsLoading) { - this.fieldType.setNormsLoading(normsLoading); - return builder; - } - public T fieldDataSettings(Settings settings) { this.fieldDataSettings = settings; return builder; @@ -243,6 +238,9 @@ public abstract class FieldMapper extends Mapper implements Cloneable { protected void setupFieldType(BuilderContext context) { fieldType.setName(buildFullName(context)); + if (context.indexCreatedVersion().before(Version.V_5_0_0)) { + fieldType.setOmitNorms(fieldType.omitNorms() && fieldType.boost() == 1.0f); + } if (fieldType.indexAnalyzer() == null && fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE) { fieldType.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); fieldType.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); @@ -419,15 +417,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable { if (includeDefaults || fieldType().storeTermVectors() != defaultFieldType.storeTermVectors()) { builder.field("term_vector", termVectorOptionsToString(fieldType())); } - if (includeDefaults || fieldType().omitNorms() != defaultFieldType.omitNorms() || fieldType().normsLoading() != null) { - builder.startObject("norms"); - if (includeDefaults || fieldType().omitNorms() != defaultFieldType.omitNorms()) { - builder.field("enabled", !fieldType().omitNorms()); - } - if (fieldType().normsLoading() != null) { - builder.field(MappedFieldType.Loading.KEY, fieldType().normsLoading()); - } - builder.endObject(); + if (includeDefaults || fieldType().omitNorms() != defaultFieldType.omitNorms()) { + builder.field("norms", fieldType().omitNorms() == false); } if (indexed && (includeDefaults || fieldType().indexOptions() != defaultFieldType.indexOptions())) { builder.field("index_options", indexOptionToString(fieldType().indexOptions())); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 10b165ff4c54..98ad76f7fe1f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -103,7 +103,6 @@ public abstract class MappedFieldType extends FieldType { private NamedAnalyzer searchAnalyzer; private NamedAnalyzer searchQuoteAnalyzer; private SimilarityProvider similarity; - private Loading normsLoading; private FieldDataType fieldDataType; private Object nullValue; private String nullValueAsString; // for sending null value to _all field @@ -117,7 +116,6 @@ public abstract class MappedFieldType extends FieldType { this.searchAnalyzer = ref.searchAnalyzer(); this.searchQuoteAnalyzer = ref.searchQuoteAnalyzer(); this.similarity = ref.similarity(); - this.normsLoading = ref.normsLoading(); this.fieldDataType = ref.fieldDataType(); this.nullValue = ref.nullValue(); this.nullValueAsString = ref.nullValueAsString(); @@ -158,7 +156,6 @@ public abstract class MappedFieldType extends FieldType { Objects.equals(indexAnalyzer, fieldType.indexAnalyzer) && Objects.equals(searchAnalyzer, fieldType.searchAnalyzer) && Objects.equals(searchQuoteAnalyzer(), fieldType.searchQuoteAnalyzer()) && - Objects.equals(normsLoading, fieldType.normsLoading) && Objects.equals(fieldDataType, fieldType.fieldDataType) && Objects.equals(nullValue, fieldType.nullValue) && Objects.equals(nullValueAsString, fieldType.nullValueAsString); @@ -167,7 +164,7 @@ public abstract class MappedFieldType extends FieldType { @Override public int hashCode() { return Objects.hash(super.hashCode(), name, boost, docValues, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer, - similarity == null ? null : similarity.name(), normsLoading, fieldDataType, nullValue, nullValueAsString); + similarity == null ? null : similarity.name(), fieldDataType, nullValue, nullValueAsString); } // norelease: we need to override freeze() and add safety checks that all settings are actually set @@ -205,7 +202,7 @@ public abstract class MappedFieldType extends FieldType { conflicts.add("mapper [" + name() + "] has different [doc_values] values"); } if (omitNorms() && !other.omitNorms()) { - conflicts.add("mapper [" + name() + "] has different [omit_norms] values, cannot change from disable to enabled"); + conflicts.add("mapper [" + name() + "] has different [norms] values, cannot change from disable to enabled"); } if (storeTermVectors() != other.storeTermVectors()) { conflicts.add("mapper [" + name() + "] has different [store_term_vector] values"); @@ -242,9 +239,6 @@ public abstract class MappedFieldType extends FieldType { if (boost() != other.boost()) { conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [boost] across all types."); } - if (normsLoading() != other.normsLoading()) { - conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [norms.loading] across all types."); - } if (Objects.equals(searchAnalyzer(), other.searchAnalyzer()) == false) { conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [search_analyzer] across all types."); } @@ -304,15 +298,6 @@ public abstract class MappedFieldType extends FieldType { this.docValues = hasDocValues; } - public Loading normsLoading() { - return normsLoading; - } - - public void setNormsLoading(Loading normsLoading) { - checkIfFrozen(); - this.normsLoading = normsLoading; - } - public NamedAnalyzer indexAnalyzer() { return indexAnalyzer; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/KeywordFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/KeywordFieldMapper.java index 3f01493590ce..744882e1ccd7 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/KeywordFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/KeywordFieldMapper.java @@ -92,14 +92,6 @@ public final class KeywordFieldMapper extends FieldMapper implements AllFieldMap return super.indexOptions(indexOptions); } - @Override - protected void setupFieldType(BuilderContext context) { - if (!omitNormsSet && fieldType.boost() != 1.0f) { - fieldType.setOmitNorms(false); - } - super.setupFieldType(context); - } - @Override public KeywordFieldMapper build(BuilderContext context) { setupFieldType(context); @@ -128,6 +120,9 @@ public final class KeywordFieldMapper extends FieldMapper implements AllFieldMap } else if (propName.equals("ignore_above")) { builder.ignoreAbove(XContentMapValues.nodeIntegerValue(propNode, -1)); iterator.remove(); + } else if (propName.equals("norms")) { + builder.omitNorms(XContentMapValues.nodeBooleanValue(propNode) == false); + iterator.remove(); } else if (parseMultiField(builder, name, parserContext, propName, propNode)) { iterator.remove(); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java index 72014482ca85..4b4c08825085 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java @@ -31,6 +31,7 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -116,7 +117,6 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM protected void setupFieldType(BuilderContext context) { super.setupFieldType(context); - fieldType.setOmitNorms(fieldType.omitNorms() && fieldType.boost() == 1.0f); int precisionStep = fieldType.numericPrecisionStep(); if (precisionStep <= 0 || precisionStep >= maxPrecisionStep()) { fieldType.setNumericPrecisionStep(Integer.MAX_VALUE); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index 656d6effcfa6..4301a2252d82 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -157,13 +157,30 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc fieldName); final Object index = node.remove("index"); final boolean keyword = index != null && "analyzed".equals(index) == false; - // upgrade the index setting - node.put("index", "no".equals(index) == false); + { + // upgrade the index setting + node.put("index", "no".equals(index) == false); + } + { + // upgrade norms settings + Object norms = node.remove("norms"); + if (norms instanceof Map) { + norms = ((Map) norms).get("enabled"); + } + if (norms != null) { + node.put("norms", TypeParsers.nodeBooleanValue("norms", norms, parserContext)); + } + Object omitNorms = node.remove("omit_norms"); + if (omitNorms != null) { + node.put("norms", TypeParsers.nodeBooleanValue("omit_norms", omitNorms, parserContext) == false); + } + } if (keyword) { return new KeywordFieldMapper.TypeParser().parse(fieldName, node, parserContext); } else { return new TextFieldMapper.TypeParser().parse(fieldName, node, parserContext); } + } throw new IllegalArgumentException("The [string] type is removed in 5.0. You should now use either a [text] " + "or [keyword] field instead for field [" + fieldName + "]"); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index f8c1c0a812a9..c6b91292ace5 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -71,7 +71,7 @@ public class TypeParsers { private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(TypeParsers.class)); private static final Set BOOLEAN_STRINGS = new HashSet<>(Arrays.asList("true", "false")); - private static boolean nodeBooleanValue(String name, Object node, Mapper.TypeParser.ParserContext parserContext) { + public static boolean nodeBooleanValue(String name, Object node, Mapper.TypeParser.ParserContext parserContext) { // Hook onto ParseFieldMatcher so that parsing becomes strict when setting index.query.parse.strict if (parserContext.parseFieldMatcher().isStrict()) { return XContentMapValues.nodeBooleanValue(node); @@ -99,9 +99,6 @@ public class TypeParsers { } else if (propName.equals("coerce")) { builder.coerce(nodeBooleanValue("coerce", propNode, parserContext)); iterator.remove(); - } else if (propName.equals("omit_norms")) { - builder.omitNorms(nodeBooleanValue("omit_norms", propNode, parserContext)); - iterator.remove(); } else if (propName.equals("similarity")) { SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString()); builder.similarity(similarityProvider); @@ -187,6 +184,37 @@ public class TypeParsers { } } + public static boolean parseNorms(FieldMapper.Builder builder, String propName, Object propNode, Mapper.TypeParser.ParserContext parserContext) { + if (propName.equals("norms")) { + if (propNode instanceof Map) { + final Map properties = nodeMapValue(propNode, "norms"); + for (Iterator> propsIterator = properties.entrySet().iterator(); propsIterator.hasNext();) { + Entry entry2 = propsIterator.next(); + final String propName2 = Strings.toUnderscoreCase(entry2.getKey()); + final Object propNode2 = entry2.getValue(); + if (propName2.equals("enabled")) { + builder.omitNorms(!lenientNodeBooleanValue(propNode2)); + propsIterator.remove(); + } else if (propName2.equals(Loading.KEY)) { + // ignore for bw compat + propsIterator.remove(); + } + } + DocumentMapperParser.checkNoRemainingFields(propName, properties, parserContext.indexVersionCreated()); + DEPRECATION_LOGGER.deprecated("The [norms{enabled:true/false}] way of specifying norms is deprecated, please use [norms:true/false] instead"); + } else { + builder.omitNorms(nodeBooleanValue("norms", propNode, parserContext) == false); + } + return true; + } else if (propName.equals("omit_norms")) { + builder.omitNorms(nodeBooleanValue("norms", propNode, parserContext)); + DEPRECATION_LOGGER.deprecated("[omit_norms] is deprecated, please use [norms] instead with the opposite boolean value"); + return true; + } else { + return false; + } + } + /** * Parse text field attributes. In addition to {@link #parseField common attributes} * this will parse analysis and term-vectors related settings. @@ -194,6 +222,14 @@ public class TypeParsers { public static void parseTextField(FieldMapper.Builder builder, String name, Map fieldNode, Mapper.TypeParser.ParserContext parserContext) { parseField(builder, name, fieldNode, parserContext); parseAnalyzersAndTermVectors(builder, name, fieldNode, parserContext); + for (Iterator> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) { + Map.Entry entry = iterator.next(); + final String propName = Strings.toUnderscoreCase(entry.getKey()); + final Object propNode = entry.getValue(); + if (parseNorms(builder, propName, propNode, parserContext)) { + iterator.remove(); + } + } } /** @@ -217,24 +253,8 @@ public class TypeParsers { } else if (propName.equals("boost")) { builder.boost(nodeFloatValue(propNode)); iterator.remove(); - } else if (propName.equals("omit_norms")) { - builder.omitNorms(nodeBooleanValue("omit_norms", propNode, parserContext)); - iterator.remove(); - } else if (propName.equals("norms")) { - final Map properties = nodeMapValue(propNode, "norms"); - for (Iterator> propsIterator = properties.entrySet().iterator(); propsIterator.hasNext();) { - Entry entry2 = propsIterator.next(); - final String propName2 = Strings.toUnderscoreCase(entry2.getKey()); - final Object propNode2 = entry2.getValue(); - if (propName2.equals("enabled")) { - builder.omitNorms(!lenientNodeBooleanValue(propNode2)); - propsIterator.remove(); - } else if (propName2.equals(Loading.KEY)) { - builder.normsLoading(Loading.parse(nodeStringValue(propNode2, null), null)); - propsIterator.remove(); - } - } - DocumentMapperParser.checkNoRemainingFields(propName, properties, parserContext.indexVersionCreated()); + } else if (parserContext.indexVersionCreated().before(Version.V_5_0_0) + && parseNorms(builder, propName, propNode, parserContext)) { iterator.remove(); } else if (propName.equals("index_options")) { builder.indexOptions(nodeIndexOptionValue(propNode)); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java index 97c2fa3933be..7565243251c4 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java @@ -305,7 +305,7 @@ public class AllFieldMapper extends MetadataFieldMapper { builder.field("store_term_vector_payloads", fieldType().storeTermVectorPayloads()); } if (includeDefaults || fieldType().omitNorms() != Defaults.FIELD_TYPE.omitNorms()) { - builder.field("omit_norms", fieldType().omitNorms()); + builder.field("norms", !fieldType().omitNorms()); } doXContentAnalyzers(builder, includeDefaults); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index 966edf82621f..b7194a3829bc 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -130,12 +130,6 @@ public abstract class FieldTypeTestCase extends ESTestCase { other.setSimilarity(new BM25SimilarityProvider("bar", Settings.EMPTY)); } }, - new Modifier("norms.loading", true) { - @Override - public void modify(MappedFieldType ft) { - ft.setNormsLoading(MappedFieldType.Loading.LAZY); - } - }, new Modifier("fielddata", true) { @Override public void modify(MappedFieldType ft) { @@ -217,7 +211,6 @@ public abstract class FieldTypeTestCase extends ESTestCase { ", searchAnalyzer=" + ft.searchAnalyzer() + ", searchQuoteAnalyzer=" + ft.searchQuoteAnalyzer() + ", similarity=" + ft.similarity() + - ", normsLoading=" + ft.normsLoading() + ", fieldDataType=" + ft.fieldDataType() + ", nullValue=" + ft.nullValue() + ", nullValueAsString='" + ft.nullValueAsString() + "'" + diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index 19d0317f492f..762a62f37561 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -223,7 +223,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { } public void testRandom() throws Exception { - boolean omitNorms = false; + boolean norms = false; boolean stored = false; boolean enabled = true; boolean tv_stored = false; @@ -239,7 +239,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { allDefault = false; mappingBuilder.startObject("_all"); if (randomBoolean()) { - booleanOptionList.add(new Tuple<>("omit_norms", omitNorms = randomBoolean())); + booleanOptionList.add(new Tuple<>("norms", norms = randomBoolean())); } if (randomBoolean()) { booleanOptionList.add(new Tuple<>("store", stored = randomBoolean())); @@ -285,7 +285,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { Document doc = builtDocMapper.parse("test", "test", "1", new BytesArray(json)).rootDoc(); AllField field = (AllField) doc.getField("_all"); if (enabled) { - assertThat(field.fieldType().omitNorms(), equalTo(omitNorms)); + assertThat(field.fieldType().omitNorms(), equalTo(!norms)); assertThat(field.fieldType().stored(), equalTo(stored)); assertThat(field.fieldType().storeTermVectorOffsets(), equalTo(tv_offsets)); assertThat(field.fieldType().storeTermVectorPayloads(), equalTo(tv_payloads)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java index 91a0ca15cd4f..490477d67e7c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java @@ -85,18 +85,17 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase { } public void testBackCompatFieldMappingBoostValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("s_field").field("type", "keyword").field("boost", 2.0f).endObject() - .startObject("l_field").field("type", "long").field("boost", 3.0f).startObject("norms").field("enabled", true).endObject().endObject() - .startObject("i_field").field("type", "integer").field("boost", 4.0f).startObject("norms").field("enabled", true).endObject().endObject() - .startObject("sh_field").field("type", "short").field("boost", 5.0f).startObject("norms").field("enabled", true).endObject().endObject() - .startObject("b_field").field("type", "byte").field("boost", 6.0f).startObject("norms").field("enabled", true).endObject().endObject() - .startObject("d_field").field("type", "double").field("boost", 7.0f).startObject("norms").field("enabled", true).endObject().endObject() - .startObject("f_field").field("type", "float").field("boost", 8.0f).startObject("norms").field("enabled", true).endObject().endObject() - .startObject("date_field").field("type", "date").field("boost", 9.0f).startObject("norms").field("enabled", true).endObject().endObject() - .endObject().endObject().endObject().string(); - { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + .startObject("s_field").field("type", "keyword").field("boost", 2.0f).endObject() + .startObject("l_field").field("type", "long").field("boost", 3.0f).endObject() + .startObject("i_field").field("type", "integer").field("boost", 4.0f).endObject() + .startObject("sh_field").field("type", "short").field("boost", 5.0f).endObject() + .startObject("b_field").field("type", "byte").field("boost", 6.0f).endObject() + .startObject("d_field").field("type", "double").field("boost", 7.0f).endObject() + .startObject("f_field").field("type", "float").field("boost", 8.0f).endObject() + .startObject("date_field").field("type", "date").field("boost", 9.0f).endObject() + .endObject().endObject().endObject().string(); IndexService indexService = createIndex("test", BW_SETTINGS); QueryShardContext context = indexService.newQueryShardContext(); DocumentMapper mapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -122,16 +121,34 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase { .endObject().bytes()); assertThat(doc.rootDoc().getField("s_field").boost(), equalTo(2.0f)); + assertThat(doc.rootDoc().getField("s_field").fieldType().omitNorms(), equalTo(false)); assertThat(doc.rootDoc().getField("l_field").boost(), equalTo(3.0f)); + assertThat(doc.rootDoc().getField("l_field").fieldType().omitNorms(), equalTo(false)); assertThat(doc.rootDoc().getField("i_field").boost(), equalTo(4.0f)); + assertThat(doc.rootDoc().getField("i_field").fieldType().omitNorms(), equalTo(false)); assertThat(doc.rootDoc().getField("sh_field").boost(), equalTo(5.0f)); + assertThat(doc.rootDoc().getField("sh_field").fieldType().omitNorms(), equalTo(false)); assertThat(doc.rootDoc().getField("b_field").boost(), equalTo(6.0f)); + assertThat(doc.rootDoc().getField("b_field").fieldType().omitNorms(), equalTo(false)); assertThat(doc.rootDoc().getField("d_field").boost(), equalTo(7.0f)); + assertThat(doc.rootDoc().getField("d_field").fieldType().omitNorms(), equalTo(false)); assertThat(doc.rootDoc().getField("f_field").boost(), equalTo(8.0f)); + assertThat(doc.rootDoc().getField("f_field").fieldType().omitNorms(), equalTo(false)); assertThat(doc.rootDoc().getField("date_field").boost(), equalTo(9.0f)); + assertThat(doc.rootDoc().getField("date_field").fieldType().omitNorms(), equalTo(false)); } { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + .startObject("s_field").field("type", "keyword").field("boost", 2.0f).endObject() + .startObject("l_field").field("type", "long").field("boost", 3.0f).endObject() + .startObject("i_field").field("type", "integer").field("boost", 4.0f).endObject() + .startObject("sh_field").field("type", "short").field("boost", 5.0f).endObject() + .startObject("b_field").field("type", "byte").field("boost", 6.0f).endObject() + .startObject("d_field").field("type", "double").field("boost", 7.0f).endObject() + .startObject("f_field").field("type", "float").field("boost", 8.0f).endObject() + .startObject("date_field").field("type", "date").field("boost", 9.0f).endObject() + .endObject().endObject().endObject().string(); IndexService indexService = createIndex("text"); QueryShardContext context = indexService.newQueryShardContext(); DocumentMapper mapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -157,13 +174,21 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase { .endObject().bytes()); assertThat(doc.rootDoc().getField("s_field").boost(), equalTo(1f)); + assertThat(doc.rootDoc().getField("s_field").fieldType().omitNorms(), equalTo(true)); assertThat(doc.rootDoc().getField("l_field").boost(), equalTo(1f)); + assertThat(doc.rootDoc().getField("l_field").fieldType().omitNorms(), equalTo(true)); assertThat(doc.rootDoc().getField("i_field").boost(), equalTo(1f)); + assertThat(doc.rootDoc().getField("i_field").fieldType().omitNorms(), equalTo(true)); assertThat(doc.rootDoc().getField("sh_field").boost(), equalTo(1f)); + assertThat(doc.rootDoc().getField("sh_field").fieldType().omitNorms(), equalTo(true)); assertThat(doc.rootDoc().getField("b_field").boost(), equalTo(1f)); + assertThat(doc.rootDoc().getField("b_field").fieldType().omitNorms(), equalTo(true)); assertThat(doc.rootDoc().getField("d_field").boost(), equalTo(1f)); + assertThat(doc.rootDoc().getField("d_field").fieldType().omitNorms(), equalTo(true)); assertThat(doc.rootDoc().getField("f_field").boost(), equalTo(1f)); + assertThat(doc.rootDoc().getField("f_field").fieldType().omitNorms(), equalTo(true)); assertThat(doc.rootDoc().getField("date_field").boost(), equalTo(1f)); + assertThat(doc.rootDoc().getField("date_field").fieldType().omitNorms(), equalTo(true)); } } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java b/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java index b1fde6bdd676..90121e66ea8c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java @@ -102,13 +102,13 @@ public class FieldLevelBoostTests extends ESSingleNodeTestCase { public void testBackCompatFieldLevelMappingBoost() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") .startObject("str_field").field("type", "keyword").field("boost", "2.0").endObject() - .startObject("int_field").field("type", "integer").field("boost", "3.0").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("byte_field").field("type", "byte").field("boost", "4.0").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("date_field").field("type", "date").field("boost", "5.0").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("double_field").field("type", "double").field("boost", "6.0").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("float_field").field("type", "float").field("boost", "7.0").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("long_field").field("type", "long").field("boost", "8.0").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("short_field").field("type", "short").field("boost", "9.0").startObject("norms").field("enabled", true).endObject().endObject() + .startObject("int_field").field("type", "integer").field("boost", "3.0").endObject() + .startObject("byte_field").field("type", "byte").field("boost", "4.0").endObject() + .startObject("date_field").field("type", "date").field("boost", "5.0").endObject() + .startObject("double_field").field("type", "double").field("boost", "6.0").endObject() + .startObject("float_field").field("type", "float").field("boost", "7.0").endObject() + .startObject("long_field").field("type", "long").field("boost", "8.0").endObject() + .startObject("short_field").field("type", "short").field("boost", "9.0").endObject() .string(); { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java index 8af92f266a57..28867ed1f737 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java @@ -24,22 +24,33 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; import java.io.IOException; import java.util.Arrays; +import java.util.Collection; import static org.hamcrest.Matchers.equalTo; public class KeywordFieldMapperTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + IndexService indexService; DocumentMapperParser parser; @@ -232,4 +243,51 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { assertEquals("The [keyword] field does not support positions, got [index_options]=" + indexOptions, e.getMessage()); } } + + public void testBoost() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "keyword").field("boost", 2f).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + } + + public void testBoostImplicitlyEnablesNormsOnOldIndex() throws IOException { + indexService = createIndex("test2", + Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build()); + parser = indexService.mapperService().documentMapperParser(); + + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "keyword").field("boost", 2f).endObject().endObject() + .endObject().endObject().string(); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + String expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "keyword") + .field("boost", 2f).field("norms", true).endObject().endObject() + .endObject().endObject().string(); + assertEquals(expectedMapping, mapper.mappingSource().toString()); + } + + public void testEnableNorms() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "keyword").field("norms", true).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + assertFalse(fields[0].fieldType().omitNorms()); + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/StringMappingUpgradeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/StringMappingUpgradeTests.java index 4b2fe9a71020..d49f50da0ab8 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/StringMappingUpgradeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/StringMappingUpgradeTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; @@ -130,6 +131,7 @@ public class StringMappingUpgradeTests extends ESSingleNodeTestCase { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string"); boolean keyword = randomBoolean(); + boolean hasNorms = keyword == false; boolean shouldUpgrade = true; if (keyword) { mapping.field("index", randomBoolean() ? "not_analyzed" : "no"); @@ -143,7 +145,12 @@ public class StringMappingUpgradeTests extends ESSingleNodeTestCase { mapping.field("doc_values", randomBoolean()); } if (randomBoolean()) { - mapping.field("omit_norms", randomBoolean()); + hasNorms = randomBoolean(); + if (randomBoolean()) { + mapping.field("omit_norms", hasNorms == false); + } else { + mapping.field("norms", Collections.singletonMap("enabled", hasNorms)); + } } if (randomBoolean()) { mapping.startObject("fields").startObject("raw").field("type", "keyword").endObject().endObject(); @@ -172,6 +179,9 @@ public class StringMappingUpgradeTests extends ESSingleNodeTestCase { } else { assertThat(field, instanceOf(TextFieldMapper.class)); } + if (field.fieldType().indexOptions() != IndexOptions.NONE) { + assertEquals(hasNorms, field.fieldType().omitNorms() == false); + } } } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/TextFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/TextFieldMapperTests.java index 3a9d5b46ab9e..8dba6dd3fe35 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/TextFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/TextFieldMapperTests.java @@ -132,9 +132,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .startObject("norms") - .field("enabled", false) - .endObject() + .field("norms", false) .endObject().endObject() .endObject().endObject().string(); @@ -386,4 +384,5 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPositions(), equalTo(true)); assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPayloads(), equalTo(true)); } + } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java index 09804f829193..c10ccd14262a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java @@ -684,4 +684,20 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser(); parser.parse("type", new CompressedXContent(mappingWithTV)); // no exception } + + public void testRejectNorms() throws IOException { + // not supported as of 5.0 + for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) { + DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("foo") + .field("type", type) + .field("norms", random().nextBoolean()) + .endObject() + .endObject().endObject().endObject().string(); + MapperParsingException e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping))); + assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [norms")); + } + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java index 86c67db219ff..8007e6248361 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java @@ -564,7 +564,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { mapperService.merge("type", new CompressedXContent(updatedMapping), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("different [omit_norms]")); + assertThat(e.getMessage(), containsString("different [norms]")); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterIT.java b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterIT.java index a3d6a87c43f9..600f84b5f5fd 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterIT.java @@ -49,7 +49,7 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/all_mapping_create_index.json"); String mappingUpdate = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json"); String[] errorMessage = { - "[_all] has different [omit_norms] values", + "[_all] has different [norms] values", "[_all] has different [store] values", "[_all] has different [store_term_vector] values", "[_all] has different [store_term_vector_offsets] values", diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java index 0951f3c46df3..8e064f46e126 100644 --- a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -156,15 +156,15 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { public void testUpdateMappingWithNormsConflicts() throws Exception { client().admin().indices().prepareCreate("test") - .addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": { \"enabled\": false }}}}}") + .addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": false }}}}") .execute().actionGet(); try { client().admin().indices().preparePutMapping("test").setType("type") - .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": { \"enabled\": true }}}}}").execute() + .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": true }}}}").execute() .actionGet(); fail("Expected MergeMappingException"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("mapper [body] has different [omit_norms]")); + assertThat(e.getMessage(), containsString("mapper [body] has different [norms]")); } } diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index 8aa4e017da95..2d178488dd96 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -51,7 +51,6 @@ import org.hamcrest.Matchers; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; diff --git a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 23e2592447bf..be190b547ea3 100644 --- a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -156,12 +156,12 @@ public class MultiMatchQueryIT extends ESIntegTestCase { .endObject() .startObject("first_name") .field("type", "text") - .field("omit_norms", "true") + .field("norms", false) .field("copy_to", "first_name_phrase") .endObject() .startObject("last_name") .field("type", "text") - .field("omit_norms", "true") + .field("norms", false) .field("copy_to", "last_name_phrase") .endObject() .endObject() diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 44b8636d51a5..68b496cd5665 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -117,7 +117,7 @@ public class SearchQueryIT extends ESIntegTestCase { public void testOmitNormsOnAll() throws ExecutionException, InterruptedException, IOException { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1") - .startObject("_all").field("omit_norms", true).endObject() + .startObject("_all").field("norms", false).endObject() .endObject().endObject()) .setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)); // only one shard otherwise IDF might be different for comparing scores diff --git a/core/src/test/resources/indices/bwc/index-2.2.0.zip b/core/src/test/resources/indices/bwc/index-2.2.0.zip index b645084eeef9952f82df979f50b658b4abac3d10..797ca24f4edbc368b347d7a21b39f4b2b5addbe8 100644 GIT binary patch literal 72250 zcmb@t1z22Nk}!-DJi#plg1fuByAuf7xVuA;;O_43?yilyyEhsvIQ+>wyR*CRKeFG< z@|<&@>T~;+SDiX_8wF`dD0r~ne}FG05`O^yae)TI2Q$)l)TdWffdhl|R$o^O{WYCj z5x~Hqjv&Fn{+e6CD#B!@r0Q@9&|rwlM-Y{4bH3{z=4- z{}}0i@aq51ME^5d(tn2bmpaT0|C8(A=zq_@tJy!JXZ+uy|3mAv|4r-v&ysNbE2MNC z9Q7Rme=*5Fposp<&)$Lk7H;jC+2sZt42%u}3{2?Xp!f$}T1Gkt$3I;5??#CNn0pr>e1bE_Y|_xt*9+4{727y2R~lpoEk( z#SJvjMfNS>m=vpkED;Q2pa?AC9y>KT6)!D01v?EFJ5@HQc$j)zR*E)sQa+4cigp+% zBS+)RkQ_>-}hR|G;>VzH0KK@WcU{ zJv0bHom3f8jT12$%Vafl8#X_PZq`&LlOC81K{3d&Sy2b@vrWB&;xoG4=eFx6u`*`6ivU zg0Dm|FoLGO*iQie9BL?3ZlfYaWq?9ERY>d#_~!& z?S4C*n+V)Qi1)^MNorfN*CBk`4N*==czFuH+c37KaW}1%|NinV|I3-Bckv7V>Ihnx z{kNP;gb{}4c_k&yXszd66AL?UGe!I3P0e|!z=9Id1Sz$7sg@8@AkJk7C#Qg%mDSnB zrd0@MO2|OTK$rL1tCxV`6@sa|9>WuX84TEerKvv$6Z5~~Z~tmy;QH5y{XYj(SD2Eu z;ji%W{53`XEt-D`DTe=jNcqAim$smz$B!t5g)(1X$D@~{hdT;DK#Qidgoq{(ev8eW zQK{DW9c-eWW@g6l5*onz+|Sg@v#X@dxlD^=>xA`$jI%Qo3L#}beFyij7r-J`kj{i;7B|?yz`j<+7PuYKy>VJ}_W#nX{bNt1} zA1UiM8^7nD_5%JzORRVQ`NS{czw#s)CWD}%g{i5DmZG$krjiV^zMYDaKAYliPyb71 z{mrpO1x)wv-Tn89#Qzb;bcPnT|A<*bJupZs7$^>~e_%7te~p(vHne|gu>TT2jpNoR z?U(2&}B9JtLT5D8v0{vQA|(GOiNW)8td-v#=PI#B$(i4 z6CD(58mKprmp5XPH3sM#fk7q~AR>mx`67~KzjyGAg(FTq=0nb!*_&%uQI3040Gs*w z3CVXH!kk@nk?))9nM^OKXo%YY=L6^xB=Au*gf!*BZR~rZSi9}z%*+@%BjnKDX?XZe zvB1UJFyW`&{@&gGDs zQPkTJrE8_Duz=?<)HnwvjP&a;_^U8fX{PII*X$Y50RiNuoXo&X!S<9CYxB?iBA>Cy zhRlpgVdEv37F*ZNUA)}G^4=*Snh zqVKoB)YX&zVxezbCK7OevXK8AJ;uN<*A4q+A%TC_LWahGe`=!h|1i;3de)a{-PEL= zUoISk8%#Qkac$jxn{xd|+mkP`>J4iQ5BlbBv zvhPD{{6*vj!98i7dGzZ0UQ6SL&fRpnWC$QlUCxm2DE#{=+ z&8Xr{pyGW19s#7!w)`$Ot>E-0^|8%C(3C)ffr-EWKT)5UL$b{B>R^*(4j~#Zy1u(MdAdHF%;I15 z=%Thh_sUPKlTuSzwaL<`wkUWuYD|SDDe&pM4SDR#wC}(7p7oN~@kTo(t$-CdAy#d( z+%G~urnd#piyY-mMIg96hqu3rLM~KCiK?F;+FM zFlXBoiC~-N%F{<=6nOhVl1~LMj#{LX?(EPSryMbBCr8czD~ThFgoCdo}>@dsz{rh61t zQiTSKBb;H*&WGEhD)Mw1MuU}r9|9v(%$k&KO0@Bj>V+Y_%g0MR#NLc+*`^1Ns(sGJ z%ytPY&z|}p*pi5Gw`3}!#Z3W{Qn=0-jTnq}IQOQbs^QyAWD*XAWnTJ_RHA>*T} z;n^3JtTrASl^?0KWE3~o%PIJbS4AJJ7ZfU0M)R|T40bbqs-^=m!@NBYptzE{BtJ3S(}is#+;CCDQNrS)p6>vLSVGrs4H*Aj%IpoWjQD~AL5f(Iwj0;fV8Vn9 zGX{g`Uf&d$j4ppzM5R@}SVA|7N3=DJS$Z%h9LpuxAf#Oe3B`{F@7Qk*^-*MJ98}677#xtd8 zI@0}g4-r+%OBE-O^JpI!+jINc1jhYHFiw&Mtaqj)!!;vjv$Dq8%Wa|${p)KYru|jm)Je=>B6kVzWchn zit2NTICQK^NB~Z8_BF!~KRi3ADQE_$;CFiP?=c7qZ5Q8Z8zgy`@)tfnsc;}2zE8}T zet%L_@RLENl_+WJbq}84YHK@>`knw~qi6M=bsYK%0`@f?EDXO`5L_c6{d5b85BTob zX*gThnWLWs>pFhV$SzfO`V?&3*pv+qeO1q@^MqF-35gjkTdn%yi^KuR7?GQ860D2I z4>^9u;clnsHE*?+f2Uo%y99(JpP1NPae~(B~|hC;4!BbA1uvtRe;D!>T?6QI1;`ox6T}7B6g^9dsjS@({oOVSnbtC3IIBq&j=o&~#!k>GN~M z-6V9K>OiouoG)D)f(MKAC^6?y+*Z|){18~S1lwf=?=dRghV}Cq;eXb~O0e!{HotaP z?qAjPzgYvZ{P!B@Z|o3&7+LvV2J9hNyZ0L;e$#mb5x`U=cnJf8Tp0Y0d@I+1sirL{1fYWVBdTh#za9%geDTs z^3B+G-*}ShJi`N3($b-kea#k6U<3y!vzZf1=6oS|>uUMJwa0zGLcZ4kNcH=s=UoQM z8#XbSX_(Q0qfTT*QdIVhODNxuIe^+bo2OXfzf&ccHvde6Tqsd}h?yA)udGq$$+Ybo$|4ghp}|5-^T zrliLGE_HQ(9|itg>)tWi)(l}CCSj5mK>%=Gg~1^$JS72wRK#e+2r|KHM2&5i2C3YQs+*e9 z+u#4rg#|5!j>L#r+*8wB(azYLfSI9v>pR*Bz2Eu47;sV=_Pd)R90D5gNjB2g^Z4hP z{B)ISVNKXNVa?d9YlkJ6I)vp==}_OtC7k4vOf}6NGzWVQjba-Hc~4vkHa>br_J-s* zn3mJuCv-%GE&e=9&Oa4k?;MCv#D8gBsJ;!Yv+Hi_2d80`e~AG{Fl=-t=QyW z9lqiJU-jzv36i_d=bh=cp06ML`=qx&dJHkyS3K5#62Qca?1KFl^W6V80vP>Mw>kPB z0{k83`+k?a*x7n@ESoxiV))cYg52>}K2!PS=hFY)iGf8|#>o(14Pa%_WuPB!VLtzLOJYQ9OEq!1EdM+d!$Fk~5%r zfxuukQMq9?nc&81#qv6un814c_@a#hjfVBHV*TtyU9@A7&*E-n=BT*uj(bRXk7>&% z{W5cHlVkk0L|!AX`W=y%ZMhl|4mr;wgH1~)qK>Czxkx`1aF_bB?vVgk8~ygWT9v_~ z-(6!K7bsgBj`FcgILVqW&jtoVd%I|aB>lUC?FYI(jAod{99Z{ln6&`DX6P3NFncReJAEjEZei@eP@siBBhU9w+v2YHQM+(F-^edXGJBO>apiY$c>GN+X)^m9 zTnQ$3q1%12EX56N z#k+y&5VVnMu1L>kze>{&Iu)vUJ1)+cZmu@U%&O==|!@tvpe6itB2v zJAPIBqx}Z%e+gVyZ+f%<)&=O>SU!ZO}uj(5BPmh|L02XrhL#Y9ka03fN5zbVTs^?old_jFY z1lCa)6-H;$E5?``9doJ)-z4It`UU*G+J#TM`yF*VpWCEx^yclW`bEa@MVu;q&&w=| z_u?~q$EQAXpEGfKpJ%vH6lR-*Ll1(|x5F5!v3_JiFq~L7mOkolj&cv$B=(FXOpgoJ z-1WKKO`BO%tZYa={8r1ELB$UR(6|r^IYi+Pxj#I_@+k)aha{Jk8n=DSo zaj7dn)84A;-{IF~>@xw!+V%bFP08#26nCL5GZStnFi+;3l)})5h8!{xs5f5z=+E4W zKC~XFw6&NjyaYHw=Z*k^eONbbjBO`A6zE6V@5*h~s#xEi?A4^)%!7>!vDpj*$}W^^ ze#-DNG{oOBrS~Xi8^#DFsbb5Ge*-jhNv@dGHTtx!-XA2X(K~;8)OkP3yYTJj^lZJq zVH|2!%XgMEI|r0WAqed&mJR2<^_(5H6aIZ6THI>|PVwjMqHrx<0_rl}nnPi2mv^(@ zynb92M)#*RsW@;84uS^gY^vA}fCMvUpQI+ArO9EIuM|yJs_Rqrk+_D%S+peKCp4-C zW@%AyF*^hDK7}vtZP`YCuc!dlRPZ#0%J2Xnelp}UQbp86JBTZpSom$$rnfD#-{mVD z9dnre5N)o`1ko_2I<3>%S^K1Gn^$yv#a^@nr^z!D6>!K2;P#!5nXf@1JzSxwxmqDd zlxH)L_0Llv8b%&{2!K!s6rhbnat*Ll?Jrpm&lgGs*i`89w}Z(FsQ!R)cALM|`f(oY z7@un&@}L}oOw|fND9u{PD4SG@TxYiT#@A`S1(yzRC>W*g!KX+1JSOFYcSgRrMq6oH zQ5qo9A2^vQL9MN9LEU2Pi&ySkJ5f4tHg2nj(F>^~{&|TI6;s;5qh{=O-#Cva9DYW7 zrt*tB5>abh#rrWBdp2Y~6wFm*oq1$w)YO@;j<>DCA4^;t9FISQh|U(D?kMx|rd zxpJQZsz{Ck4&4JnT*QB(nT;>gZPGeyV-`f!0&WqzhFOi(`heyvU1A@LYs*FLb~_3pi2#i?9Y!# zKPuQnrISh2hv=MO1c*DT_49mdJVCWuF=^nJdE<6MX zye8rkxHY9=DWR#&?qlY33~L-r#PY1I+t_dlu5R#j2)x0;x$fVU_ zgy+U{V%OnT;%-|x)vO`e=9~zQ#eKv+MficfOmG&{qL2BeNlHvd;W`?Iiwkp0SMNk5 zV@{Q4%-_1N3@(Fad6XaT%?;r%+~e}JJ-I%`X8b&Jl_`>(78DCy9k?A1%Mx;Ujt5(u(&pUyjX(r&hQv(%WnOdIRTf-@9d`gLprL^sp%2 zrF@1Cx1=mvgd23v0~Y7*Pg1Z{+ zrpy+feC@J2#);zawsT>Mdo6=DX<}jDfw~nD=XTz?(qyQ1D}jbEB)xc?W8c|kTXbf@ z8!wAp${dY{O1pU?z>hXuO+7r;doaWl3GjrRn`Mi>`;h;%RcKhTP^wUR;EJb&mccZT zRGjIIfvBmx;uX0(fF;3n8f-m-5_M!tP;rD0=K`blt%eXM=rSNPNzQH;n)PWNr(#2L z%|O(k4{Rv+{1T&K=AHb`Z1$(@&y7lRBRnk;NKKV0=4Qq0#v#>F1Uf++gNHu@I?PGe zEh$Wa02GSpPJDs0JsA@2W3Lb%q~+~gg~;vmeYYCXZ6Wnqxm)1Nsh>Yl<%fAKwp{+A z`$Ws+36uBTlo(Dm5qSRV(+ii-*ERlIw;9+UWyEvNd0kjazU@1FA?l;~ECpv%SK_h) zR=qu~Ep(O|?;PwhAXw@yOmg1FfFk%2L}my{hoSKqZm&d5zPHaF`k2J(={5(a8LPJb zJY;FQsLEJ!zQz&ddKD6pT~OsavZ`8!^6i7dFyJtqa&CCMpErMx$9y>0hW8nW7h7Sl zB)vt~4Hn#bvN54vNfUo=^D7Ff;KI*DN^2vV17u6J6`crHZ9lI{U`2?E-_=Z|Vg;GD zU4Cv&Y~1pE%xuuNOaknI4N#RPzh?+UtUM^t;2iM%asjv2OxkY0L~y*l)>WDm-$7)t ziTiWnCk@262&|rqs;@FlqbWoB`{>l`dW@kZxYbP`%0;+goDkRmQ`@y|tj;7>#8xU< zD0bDIUDXy&vnk1zyw`<{o4VYC4Q>Gi+|oUl*dlk*)Rw^%rP@YdLss`^O7*zt$+V|x z6C-?ThX{5u`U_gs85ab#1s=L{QgUza)i~~qr_L-9u?a-eB=q;omMp~^Exv!DW6zRz z0()|-SY7&_Q%Vv>J3>Nsfjst=!Pwwd4YiQ@$tOE z`#QWcTx*gEISObvh7 z;*M2ClD+Cf^Fe@^cV^=})K$xQD7M79wUZsYgiFRTlhFq!Ce-$x7KIH{A)_Q)q4n4$ z)+6!=r#7US877Zv>db-SQGH5xQ6X+mVQg~Ke%D>}osp3&9;OHqdrSuWG+;p;ah*4* z-$S)4)UaK#-7ODc)XRWWoj8y5fZItsI^Jq>H9ZGvW87?yVemG-qmDC;N9oHOLqhx% zuV#{P_R48$i!Ug8q$GrYp6E(2INfSK|tQ^d<=HKH4 zOBKrw;XsPRZG()e639<0D&26BW{h!^kXoxV8mdwz#e6n-VWV+J1bB?Sak6%~WO{7k=IDv`YyAPQ8-v|cSmK@;S&IgXGbr(Jzq^)jrs17kLeY1e>)k%$aACmhOS9KO1mF_JLqdUky6 zF&MvXT*f6x_=%h1WSm*L+Sj@n6}u*j4DmQFNebKKMlbXNU%ZjfaTp2o`J+>f>w7%1 zbhdC7XX8N&|J6YR2&wy?ue1vU;LQdIH%Cz5loi9wsSe5d^|LtsFGJl|ESId#PTWIsU#BdG%zSE2*<+ktdSRAInVkT(X3MjR>hN;Pm*xdUQtriG zHgS>~RgOICc(|>5Kf_6SUqUB(zi=t7`F?!h0LAPFyYX`Zr3ZG_G1$kt!Rk!nH2@XVQEa#0N10ykQ(>pYR;EF9;@M9E4oT%4g=Qeg_Bq*uVsVdYR$15;X zKNv1og!tugm@CRl1tP*H6&2N36~XWKYc+SEk4Z2l(OfOndN?)?2Gr+ZoGZu(m9&_2 z7WexjYmN%h-_@8>bm_ffbv@qFfAzUo_j$eUdWLOOcL%Zazq;M8epgR!EvAreCI}zkABRf$l48X@>Ng7do-o5tH)d8 z)#}ZKxTx#pcDC!aIg?HeW%FsisocgZ>8;K6)}gaYrB(Okpv&jo%azH-{S-O>fEUSi zWL4mX_u)N4*6U1H$FBdUw-uj8tF9ZgPpqB;C}bOWFPm8&hZC=z{huH_cUD8smFaK0 z0-sNtyziHr9|D`+Q2ysheOXXauj^NfCi+*)`Twd#!| zz3E`XLcl}82ZU=wZx|Rc7!}g7pKZz2!CW@IpGSJ1T`-x2iIvgNAfLpT584K0WUh-G zG^P4DteRSANOJH8U9MZkw$sa^TE@20J4Rb%w$n+gT4c7;GjFvAW5Z9S$CIn@m=k0U zSYm0u+2_7F;7Yv)*{AC|;7VNCQ!-d}^;;66U>JF?OaR~x(;vnFL%a8;illB(v8(F%TRvxfaD# z#=8d$nr5*)6XH*8J}i@Ylvg4!uqws>YajZr9_!zDg)x@~8~t&}sj@2t1ccL|asH+Q zMTh+!?sUV09hhGWkW94FkWVZz0mh5;qGp*2MF}R7YNBw`*-vmDraSy`iFBOo!0#wk z7oAm|tkgO+c)Ynu=i`0Jy3S%&=Dr{KcIQyn`RYCD;(hCn@}>S>Fbw7GA7A*CxBoGU^SmrH>I zjHi$it<^xs5U&+MPY|h9K~E5?l|fGst<^?PV5gOtFMx?Ckz_8Qsv7lH7W$fS*f6}! z>b4;=qW)vJ2tFd-7e;0G;`MTz{p1br)QJN^F5>7B~8*$+(CGNos3dm&U_^bM|jDmdB|>0AYpTwbzJke`2*A@*&|5E#~hc!3^g)h)g+0HnKu$5Aua>v zOB7ZWi7?fs(qICI&1*$|uPMcZmy?bDv4|yUc3YuNq9kf0UGEv9!bTq_${Qj_ylnFs zz#F0|m8&ICi9Z^1bTFk*LiwRw=%wB}gJlL&+jP8RVTXXi&Pe=Sd^2%!v`RIe;|R*J+r z-H8InWc;{XhZQr?^?e9c%H!zFm&YNNV(D4=Tx!`(V*0S$%PV)*jpQ5=6Vg0p0x7c( zVkJT`q17@NAu0g?Dk7y1i6!|Gq8f!tpA#0SF-zW!s)YRhC?!hO%cM&7`?h^dD340H zsMbJWE=R4FHc2RP*#Mj(z9y|)=+3aYofNvbT3DE-o(43nRE?;$M(x(_WKj&oA0eY=DHn|qX)hg3N|I=Di;t4mwRjvF z#*qY7y0^4HN`D>M9@m6tt#dWZS4ncGo;CSUMq(zF?{T7%*X*(44a~l&@6}aS@MkBN zj5sDS3P&FXD~=9bW_h(*5hm3Nkd(}!2&tx_L?!)@ND8Njs;?P~{u`wH4Jy${eWsC? z5|MQOTJp!Y(0Vo1~@%xK$@Dt|_n z+{z!q1?F`s>A2@hJ@Q;hDVvp5j;VZjd!)?xYW1?tj`tk;*g*LyCUR>}AaE@@KY+Q4 ziuBBgs#NkSYOX{OG$-@byg~$64E$8A6S4TkBzRGbXf^NawM34iFREL$COPSqj@@1i zKD>{xATlM$C1oPC)tKkyg@S1WATfN4*FZQ*J}`OztLcF-zXqbqbQ3DUr z2yxlIq#}VPt$fJUu(?|eMO1lWQt)rEpHIb+?VQBBk$j+E?@fYO%7vGGS>#iJJC{ngEY~E0 zpM7&=QWCF#UBqVB8Pl|(^e15(@9jtz8 zc;4%C(K@p6d9LcRCfa!2>u{-P@|oS~i`uYw_;&9$nt0K6(fGV~c+nJX{>j_2YxBxq zH{WAW{F4s_g4e?GkGB`E{cUv*%CdyF`zgz_^~8%}6XBYNDi6xd_=hi)DhT^spBor* zFv;e7g!Bxrh9VpM9IS@FlT8Bzl>Gie`KbeNKI$v zw6X8m%LS6gjgqO$RHi;>T97TuS|_iuv;gc^_acEhaUNuMy#1M@G zy%cZ2L4u_E!Iu>C1f>*xfPI3SYN1MsEZ}njr%IuG3MXJjWyEMFnp3zCA%)T?Hu^!Z z5GDn~NGQ5RfHH|vpjH=E*Z%8_4wFu-_SF}qFSV^eEbiyG*LpRo)TXU(K;bxXvTK@y zFErH}rqheLS)x8+J_f!60%3xf{C4LFB=#N?)})X;8d zHnv;e@v-aMDIA~;TV-0Ib=Ev>Tr#X#G{_pLTC%b0-pL*y3S(x9rJ2x7Z)(=FtC@-3 z@}kq!ZqzWUSuzM2PznQJj?$`Wl-J)Ut74YXE@+fDXqeP&82AsMg%x2=((rA!3>0Bj zSZb}-*O*z%P84>?!lp(343qHNoB|pLk1M39mx8+>ZTfGHal9_G;W&4$Yy9Y zxE$V;qNh%PivGhV*_3(qpm>}yZI_MQ(s*^OCdGnz@*pe4rmovKXZ&;80GqUB;tEZj zrgdHK0i@bWdF`@s-MVqmxLleJTbq^IYIzMc0-RRuf^o*Udm0iOo)z1&W#h6z-Mn$s zc-68+-K25XICz>rTaneoQhGfnz+w2n$6@Ip_@+S^E2t6~19{QPdTFh`<+e8_yMm-A zXsxlu&|&c)bKENpHg*yD>?0S4qvL(B9P&DTEBB4<5ol_3TRBY)x#Hsr$8gh3BQlB= z!7@*yo5A(mLCUy8S{>VA{vF1|s&;jYsRQt!b{r$EjBPO=51)-AApqu0WxAcf0)vgq z(thc*dO*Ldk1gm7nH!&sL&i~~hykcy+lL-hf;@{~$USD83}P77Z%i*^7}Bro;|Y2| ze)@>af$zw%XSxu9Nr86k?Y3c=z^hJKf8}Gs0UdX|NBU1FGJ8aGBLgw zSGYa)8N;A{c^^m6rIPY?c%NpF1M=ZVIgWHkj(yt|{h~g)Ls4R356SOD@NFM|a-2FX z9aeYi=k$FJ8bFrDPn6O_PvoR=(b%q@(NE|z4`M~O0GM*%+eKFqihqpc#81Mv2W`zP z6l6$?CvhXYkmkbQ;h%H!M3>vS?)_NW4(U@4GKp2T-p}b99lkqZqr<|d@%$Z!bdfh)v*tmRsp4>EJ>GiXxe10GWIE_ zOam1(j@T7bz=AqytfWdh47zpg*0vj45NJl)T0Eyo`cqOT-HY~P8=`Frh-nm%JxbW3 z8Q4Q7to^+e(E;b2sTYtx>X?K>ce0KZgS?B*)@pTVC*r>9U_SWUS;*7M+MPEX%n>Z z*ts2FjqSIL7SUDk6jIdxG~JzE$jcD-h}t0V;(2txxjwp3#pIGv#ol6^GK?P5iy|Um z<}r33yTU$Z95YPqwT=2fK+G%V7XA}^mvO-`eMrS^-mt9KF$$UBBQKYmG{0g zyfkr_3(@>O%s>JHUS4;%bbHP-`@#M4QO=}mCfU|{wdNyx;C}5WMp7BwqIPB5nr#cn zezYK>*Glu9_Ic}?L(9cg_kR2kT+}jwgON7H*lqG5^FByjvpb6V@*p zVv5=)pyp9?FTc__s~I$`7;2Rg4vK_7;bn8hzviIJ*qfaw>X3)c@*uba1W@xt_uskh zT`ujG4zWg^5U}%DyRXGw(to_L8``ZLLW?RQnB>XyNdL*PYxgrSUl=RUf#8sr&Q<%S zdCzWnH`J(Io;+)UPX~kV)^vY%r6^+3G%JU%&lBbe@e&85oWn3u6gC&{m1WJh?&W@e zbAz+XtQ|=a{sXi>yIS?x~`2 zJSt$AHi?}y%A({`_9}dkxl7q*o;69lN$hpb`p9SDx$szit$9(`W0E)dH7jl=3BJfv z?kW9}^Tc7|uzeCbs}Vfwu^#@b2-i(Q1MgPhByH9zpNr@D$jz3+=Ha(VVVqX@C{KbX zp39aa2jF4tBt}*l-=b&bXt!GP_VwIh%A|v2imw;nlNa(m{tYKd-`DNx$q!k1d_$g* zPwCOyXw^*FIs#AGS5t!e*+N0Sk>D8+T(FLCN4N?frf9Q01fqOF@Vt=j&^O3OXjAOj zy4^Fr0Ps;rG3YS`?2im&dY0YIzL(%=5M(ei@X7d7{KYhm$&{ZGAz)w-;j!@sU?8CIh_AyJ8@=yM`v zVFDN)cvt)%BwJ?P%s}+O!`k;y_3&mNXUOucB-*0Ei6Dhx0_i`0#H2<<#zAFbG13_R z5#2%bB=j=~{~afGt%$zEaAI@?(W+2mkPJc!0TZ#2@o;%`0eqeCLBJSN3KkQ;k>GG| zv?!5<@IoL>oO}rp9!WP*nh>URntdWZiY7tr=jV~+H+U`zNzwvzYEr2ZrNa2d7r%

      @ita!Q}8a-<}ty1`Egc4BAcTHA~y-zA;Fyy)M%pn=0YM_ znHV8Q{&o4me9Y9C(?fuQ&yV@#+OnZX#I>J4cyWAD(+JaqlUn*E*0F-UH(&<h6-2NjS47tMzug_UpY{XszP<8 zSfgImtjJ!l6Pa3Dskz)%bi1^dHSh$UjUC;3#FJb%6#blHO|`kmR<~1xl1HVJsOe!- zX7g>1?PF_+qdF*uNdizk@;-r)>P6+T3{k-jr#s&OrC0TF_R>q zb_6}4glbl$u*~w;tMFRk0xB1k^U^hCx8f^_{p1n51T-pgRk;!)JFWfj5zPb#Ds`30 zQf=jy%35_m)?@B`!I90@{8E*@z<%}!Q9>V;xJqQ{nz~!I{a4a71S%|5x)N=*mK=NK z{lpQggdr+xqh0IiPF{7km%%DL8sTP1pkN}be#vE}5yPQ$Puy~LmVjGv3Ma(>&Ny;#D z&@%!W7lAbq2wzzvWLPjr6~UxtQ9i9vRwK7tJ@_Fa4;PjN*&Ki7s|ad*osMD9pj?Cw zZX2tdY5JT-RgIuw-eA&FevPJK*q~a36|M)%PxI5+r7AnA-ARr@JMG=nL8k~jTy|Ez zc~eqTT2o?I|1Y>rtmmd{^UcL}lDoBo7!hT-cT^wXLS5sYa8XzY%z5Tr3oo^IV+IW) z$Z?BUJI8P$>TnNPCM>k0!RfKdvFVculPO#y%=|!pIDRR9ntu9z+kPMXV*C_yYooLI z@}Lc&;-KQ7m7$cO>!2N=tf0@JkWA^12#+X^NRDWZJ|9sPzjs7)#A3-0$}Y-I%5KVz z%C5@J%I?zh)ysDMAUPE*=-1;%TE)soek)uPqv)$-N4Dg>x| zaa+Dy*;~^Zy9^xLu9yGzwB0rNWD}f&+ z;U6+KxV#)6k8fr{RqB|i=*$8|b#ho4m0TEH>yE7_H&dXJS*fCc1uiScmE-zZv#MFa zqC7eBjB7sxzbZRLTS;3@TTxq8TUlEjTVY#e+mk?&9+YOCP2?9&c~-S z%N2FvCRG!B5AJ0S0*BcXL>WF??Gw-$aa=`?awqB2oaGK8hs6^$Qo?>!4%&wS6XqGL zToz6Xpz_7#>^eo0xC!>9@;XtIvI(|~GcIn&%46-h77|{oAF`Do?ZuWH2j#=W39Afl zE*GbB(AuJ|2wrAKk7=J_Zn14nbJ| zNSF-7SHB4uVZ`sah|I3V*Aj;{6KEMlT$7HS;|>|}fKWzLtEtiKCc#hsoiHzm-|>En z`+!W0cz`p;uAx^wr5Dp<;7nZycXfwX{A&c38qJ*^ktW9rWcV~F$`YX`H7 zM20QHl5E7VtQXp&;%^2s8!;l}_ldbruds*4e;LLB@c`G7dC9sOs8`qX>qJ~Vub0we z?~egPjiiR7&Dd;iJHA!bY;D`ORoFx8zY61mc#gZq+-&os`I3L4wplci)8JcWlS~^3D8PljyBM?6>i(%0y6X+QS zO@<(gnL@+HX=&fTcZBe;8SoloNVY1|I7-O2Y*ssJ)C44mn61)s7jp0O8{j$jW5Y(#jb%Eovu>I)IZ)dbQI=aX{lZTCy*)ts0IE z#nnf~tZSuQ7mc2W`@N1|_KH?}OFkKKiK2O@fJnsNcxEM$AM6U_O7Haxm7h;-m@;Bd zHi3>(@}9)Al&cTMh02|!a}iu2apN;goP~;7h2Ohk69@c{^8u2ZxMrFw#MHE-t*&O8 z8ygB3qE)6F0gFyw9ulu0FT2}~wJ^rp!Wefd6IY5fJ%1X8G^3$t%B@}Q$h~(d?awH+ zBd0!B3FBV2D{@~0hH$so6%A%_>AsXJGZU2HMC}a_yIo3n^Td9NnhO;-A0TenjakWx z*bC&;w5~`uUsml)k{)9xo5O%J*Ya=S?s9- zN5{u26n{$89rrgL_b;3b5uOu7=WsgSqlpog(A;@={IC`Rk8O?$&a*rK%3K_V)G` zx-XJ)-rno!?S5RGtPT$5`}0V3R~Gw+hjZm|aY=6|%7`Zou7Qg?7+=HGU1>YR9&6-Y zG>W{-mH8LU-k-1(qq-RE!vxxqXU=E4tv$@%==v;}VXyw}fxc??HwXTz+20(ft7d;4 zS1B2bb!DofrMN03F==QD+^waXwbaTyyvN_Y% zqmRH)T+Vs>lf6UL(TW&V?keZ6x9zFWs#jN6*C+GE3F;kRy}x=}N2ETm+MlStX<5k1 zSLGI6hh)l;xpRFx0)3gCH4~Ia)0Ll9liw`KMAqOvGv)30$m_N1PPLv`u*yDT<^JiB zefF$r_UvxP3YXvo7WA{i@oCpFO-$N{ef^bby)(;tb_?{WXmzu2m0#pcS~ozt=XTQ@ z9Ni{-HXL#3d+di}lh0lk-xE1!nM_i~EP|7mcItX=bntK(A`r#Ij8DXj#l?)_`if&e z;9%r25HqEUU5DRW3b_w`Pf0N`K1fM%Gd9lrE9m|;V^Tg_F}B#_@?}ys7n(`-S=D75 zAQhHG$@sM})HUVv7J#WZDW}y0!&pR9%#)S4GvmWJ^G-$hy_wiE6R~&32QwdcXJcOO zy37pC$%%3klZ86wljW4#)3Lw3eB++Vtl?^HEURuLVr;fJ8hrDy{n71#h0HkB^|s0D zo-?Fj!6abKN+ev`QYqJV#wftdew^Y*ZfPa(xB{rks#(X{Qci|YPu5Zd{9G;x#iFUk z3gnl?WS14n0ruueBAYV-EKt#83pr%36fCGv;3f_+znwm_evF@NoBfob)DXZzC|RB` zEZke>B}ouFypps}WVg^s7F9L6i+2^BzwoL}uo*j~J1op4@D$0@NgdS{Kh$++&st-J zuIS2Ng~r`j_r~4E{dBSvd6sV`qiQ;$D}*a#mb&>$D!Y24PK64KOn3`cFI1eA3lB%9 zvIEtT9Y8n-sRCgK(F0DYgkqbGssNtw{+-=cK!2K(FT80Pe2KSrmWi>6%kx3n-2y=c z0cR_ZB$p_ctkl?WUA{wpn*{J-pEut_@-7Aya0LPKo)J}oe|IG~Bb+at-(TPva6`3717>cclTd#hTaBUcF$JV4>3c;9+`v=8)P0Y_zxyWo5`ph% zhr=z==@o(5QJ!Kl&~N#I*#YpeY?Zp>AZEU0V%kb|*BRW>j5b|f>>cB*&GfKmWb=Q& zY0&Qx^@ZXs&c#By(T8Nq4fJ?@2uD7@v*tTUP`BY)`{CdUhO$G%6Rsb=^LrJ>9t1aI2&If6OkHvx-RU5~=>XrDA(0$Xsdb6UqXkgFrh`$TY9Im_0xB5* z6{K)BwO`=|h}wA-Pp_J*u(Lg-`i~pSZ|?P%A2D$sc0eZ93%9u{y4}yYb0+ESG0kd8 zwv{%(WXU6ZtXNnan33Qn2uGKX3g*@XDRc4*XG+^XmaK)-yaczOG!)!yc-|MZyvmVd z4GV~2emoM1ENn%9MpqPKM=xf%m!F376}d+W>VB7VzLl1$#`dlTJR!r3^vV05qxW@ z<1}n&xmqtf(Bzq9mT9gfy6Ce@3zdl@k~0Yz<+(VzU;FS@Wl}&4K#Rt)<9>tOx`VL#Qy7P@KSuy5{@@4v z0Y?18)N#ik*w+10ZC2j4Ra@<{)v^`$YI`{3JDe!gp$HAjr~i+&a|+G`2-kFGCQc@{ zo&2$liSx%cCbsRFn15{Bwr$(CZSB;atvywzPHpYQsjmJm`mV3O>hAY>nyG1nYFAZx zPsi|a%hFKe(-Ke~mxZ#qB0D$h{qTW%TN4qEXLYg}D$({R@F-`~WXWVBnP~+4yD#ZM z-Ixy~R}||#2@y0$i8iZWpolv3nS?Pe2i;m>Jf5>1hL8Y7-A&RtM z$3D)7WMPg_&`B;)Ok>oLdkcYK0uC4>HvWxwimG|C%@XW%&)O@!@F6mr(1o-ImEjC7)gosTM@ zcr|BGI1C74^*L}X&-B-gsOXlq5-vc68bKrvc}vS>aZTxl!sY3;^zOD;3;A}*yaSt$)sI92c0N7!3@ zcOO!||0RNwcP1$${11zh`8WS3V@!7X?*EsP`)?vB|2xKX@_#5fq-Z_u*pwt)Jl%MW zSWUEO^^yMu^~02QZtwXIV(!n^G|2S7>?3NsjU(!xB!cIk_wIjmAGZHbU&MtbkIlS1 znlohKABaE8-*Ur#IZ0H5!4?-IL4?VZq2&XQHNO4igW%)I{&#B4{}bz{LZrwTvDJ3; zl#UP?4jeRm+nC}y+O&N8Y)V?P<@Ne_Ki)sS+{sfbG9991kj=6JO@Y{twhHeqZl<=f zD&L!%S{&cr8nooP+B;hLURQQ{SSuI=+*oA3K3BZQeYB>YY8jlHUS4MFd)EThenH7r z?9_c+Lpfwz2+lvELmQf&|7A3$e$2fy{&UQ{VB%By$%FG|>yC zzH$+1+Wo`q_Zed`t{z@IS1%_YFAp4d3jR+n_3yppEg&XP=yf*L_Bc4>&`87JPCROy zXfz&#u}4g@3#J*Z1~;-z|RS%r|n=Rs}65OcSbY0N5yZs|Ldi^IM8&l!$DTD0u|XzSx7 z`b=dyWH9BBfQZ5cumF0dj)z24&W~&v5-B4jrY+k_m(+4f#43H#-+M!lqL4pW%t(OB zR)DDX9ho&Ml;5@F{c!~0!*rpdVE7AG%BpfkFD<6D0mLmCZgI_!2J;yrZ7q0@?Cp=n7{Tx6(aT#6$LHGnpnB?_K#9TUWO7bSI^}!|tjJ z2fILm;iO~PMd#+IwE5c+4pqpIKi)G9Dx?x$9Xc6Xe>ejmF_xmvNxA&bP@?%DmoHZ# zE4(XD-XIMm%5Ag$;vI=uD#&;} zU4P+vR{LWg5?-|cRLjtSVBNC!)?cM-aHW!DU(3a0O6;`*{Dex`1Vnh|T!Wl|1?2%1 z97R0vN|LL->6vM`6}c-E&Np#II@AmCGqC6gieUs<=#m1oRQXK&yqlC;g!=x1`SkWh z0|bcvMSEcC#J8yBg97hZW?^Wo7*DU@KYxhn((E5%n$lh+EifWALUi(+Q!~i6K0`T? z_rvdd6sfd9*c=~Z6mTig3JILAv4RYJ?en2XB^05i8Xb8k@#`A$_rX})+bSs<*KVSN zHC|9lB$(grB?dEyj5Uy;24OyfVD!SSnMms8?G`@WyUp0tVI>q_RCeIuZ@?L=lhX!L zb@Pg*je6LXO8l{lilBQ-$__OwDJCu;G&x{bacynvvU;w`VVNlm$czL{zc*KswFx-w zT$>07J6e`$XK%H@ht09nd zg&d1zz&7UaT2oO96;g|uJngR)%NRe`s>4mk`p=7S(0{Ne$C`bg&zUz7H_$vI zy)!Gg?E^`RU30daA!Gz5kaxRGq*vd+Z{*R`Y5$d_Ps9$5wUz@w%D!25T{L|(ZauE~ zyfHLQacq68etv!SEk*s;-j;{}>AWof2F7jqKid9lwyZmE z0wHK(UKpl+EJJ9fL1-+JNMyq>2>^*=J{q7PUQzNtN<)0kzoD2P&kVoEIIoYLWM90; z3BSi&dDaRN7;?r-dN^tZ?X`l=o*i|TE;uXCt^!(~Pr&-7v=YfCzVHyp`B~K}S zd>Y)t4zA^ms)Kzy?x6Lxwe%d@iKvT6K9d956UrlIN6GS^A4j9u!tXDt9bphh(XxUG z`o?gISVkWw{ZuzkP%78>8~`sapa|-SNm5m~n>B)2Y1k7_bC1c%5Nvf-U)=&Tx}Q0+{$o=)5331lOA1TXz!3}s zB1AJI*ky)2egw%6e8Vyo)!oEy)={C8HRTzB$|gYmP(H9*rnG}RWCE)B5Ekv+8o2_G z6m;`v*|>Q`YnFqjsWSRNAH!UfVH7|P5wM4D+Ju~3S z4!`se*X1qtEeAj&4qB5!zYb#EdZH*A|NL*Bm9qMKFtKUrEAgNm>>O4un%p3o=^&GO z>N%8s+WKmJguQ)cm}2|G6KQ4M!0I>u1Eipm98>~p%$0!c3fFxNu0$1!B8oYtNf^8D zr$O;rb~<@hD+Mm+ez|Rs8GKF%2dBbD0DkyAi1ogckXS2E9ZN(P;M_g|G?e0Gq@YaN{2qO#7H(6_ynJkFewz5 zp@(eM83CjIr0_tlOuZ&yH0(`P3{4Q+LiiLQrM2&Tw}%=X2>}7@8CP6L2&re^s0>M4 zraTw&w=6_$b*xmEr*!S(-uPv!uf%3}=6|lT%FX+qmcl^)2Y`rtJT|!oj7l zzOzh&!W2r`ChXiZLXx*}ckY^OzLzY}L<|gHC@hLP1PqGi5oMyDBD?*~C8L7k-Tp<#1j9Fxd z6K>fss~#unTThKkECHtYkEz5dEtXcGBYeS+hWSBB9CZD-0BfDSd? z2|t8M4QfggfEQvitF!GrJuakBa!=5Uv+ab@X|u}m<7=8w&sq}Hu6V@fcGfxZ9uI-F z#l+NF;BvqNRksB$o}Bi3QF2N6Kq-BPl#|j)TM-y!u)-7Z z!PIUm-sKHz>u1HRqbxymZXGEOPv>=Yn^UG}@vjYEV1cgZoIRW8DK8Yx@BF){frpB) zhtSrdy9P7E{A0t}2`IzvJmPKMq!&} zZW2*r|DLCW#^&LC5$}Y1YOgx16R(q~L+P;t&0s9x1XK})+DRQiW@0cu-PkZA5RUVT zTR4Na+_P?3jjaur#=eud8bDjeQ7hJ{oVduNHO8A<yoq2DGRd){+iF?pg#a+ zqiV!@B0ZtL$}5BdT$mLJ;}N8_1_v&&Nj!#WG>Z>kHW45PdNE+vHT#=<`MAwOW-CBx z{#1FhizUs-uinSh{q@S*=+Gs3or>UmG=}+|SjCcje7syAFt_zQp^2b`EHo)lua6ro{P$XeTvJH)-^jf!&G7F~G%; za8`TWTsC$_+Z*0hgyLz&^V-$EAOoGPSAKyKL?X{A_q4MNcgI>X$GT~6^v1_;?NMWj z<#GY=F|q8{;q$A90spC6#!gd!s4=Rm-`f3TVo~H-e)Gs5b0YBfT-OFCohEZUd|o!G z`TOZOHbn1caP;6#Bcz1~qd$cuLjfkXWXE5f=sQr~EOR)Jiae0Nts=a373V0QdCDGV zU)58=MpEf2F~U=Uo#xy|#!$gqz}$%5Uk^Fu($CYyI9edN^H=L+G&(gGw2VoLFIH=q zQ^Ezi+qyogmT2mrVd^ybpfq` zn4RTCV7T8ho3Ga!Z+1PODQX;;W6z=ZC|ThGDrw+7VlSPXR#2~JVLW7QB&D1)$Rsk{OwVtC z)alEm-B93&i0GSFO)yyhIdE}%o+DHsOuc|}hHhxG3M1ReQ_M5c!aH1|=KZ~Cc&0D- zyB$wUR#T!;u~KWxBxS%JMHrNX-CREYhW6-F4jgO-pDQK~0+Y{v^O{rNtV%1@!u<$( zx%w?cvWm_yYgAYvJve+r(62Jtcr6<5Z%TtI@f_MhVxoC+arL+|Cu5sq*p+ma zXJXMmuxDh2_d7^33Vn)?iY51`mC`S#S@+G2R@+U4@7#S_c{a{RmUkGkgapIHFN@^k z3VyVo=%$Tcj*eDcb2w~0q)9G(++y!!1ey{?@HvT0j-urqhaVStv}9!2;v>J{aLLEJ zgHZJ@su&$?=}Uq3nSv&hKI&E!;aogfYmVM6g9Mx+y2z=*mtaEVdFl2G?*6RQ_-Z3B zZ`)5zdm&Dk6ZC0wM$%e>Pt9U&T!yzlHVYYw71emY=iIKfa$QjWRC$>oPiXxpKhR;7 zd$Cr_ulGjINS}0#R`$$<6&j*yBNJ6pwnsj+myDx>bzIZn&r&-m8NDP^f!lhu(Jshk zcAVapRX`iCObHguhrq90Vk1&h55f`->KD|fc>u+9>16(z178;vz4Y|L*_oItSaE98 zxDBkFP+*yqMlZJ&*Uy}E)v-gJ#=x138;p-R;DXh#el$@}iK?%whf=dcWm6-OOc+C9 z7%X1Q$e!y>PBC5%-_;qGvjosylb4M7o!%bOkIw(hjyv^;wjCW87&%SLrK@zk@mU?m zXlPIea!=x)Qd@>mcLY0E3Uk)+@k{a8|CQ|-&bUNz>C@r^IBm46mF}4#V}HvUnKK({ z4&qhFqkn>Tgr&h50vn}kHBWXG^hV*@L|!m&Dan`9jx)i+kc%-%6wwJAa6^4DX?uco z7?L-kUBG^C9Q7`rP4KRUCf~4~WrQ`+5qk9z6Y=e6>CXcU6N*Fv`9lb}<$~ICLbPVl zapJm=xKP_;;@a(1&xkq2?d|o!8FAKv)L$iyEUk?>XjK*!f=YqS3j}3Ni@d$imWqP` z+Pso=SYdYX5~uh>!>wwt&P>xO^}Jil*1CIOrXvsrH4K}=*b!aydsGf@zina66;szm z8lV=HvYNPWu-q` zZG+b8tKkOJt6G7?GB=o1G?zkSWZDZ*&HGOZhc;;JD{aboesCTjY2o4e^TSF$xji)A zZp>2GJs{jiy+x!lSqN?6A>cjdAKJ1t;ST{78JZ;ul*E=nZJR4;b1 zSO``tbBE0*ar|WgG)~6KX%|J+eLiUGwJm3|Ohp+ePsPKWGLt;i^m*gwx!G1s`D>T< z&Iq?}@Yo9|Otr_gu(2D=bq4Ik82C@&r1 zg+n)-Q~;M69FaG!nNsw|#7`KKU@X(#g>%lGs9dz5UKqSL*BwgUz8RjDA>pO@$sCX( zrch$d$`w_8+-iM9q_*}p%!c?xhKR>AsB!C=q5?Us?UjYNzb+`q;O-`U=uT3?Ms}OZ zi%sj0TUcIOUEEtSizghTy|&yQjNb2r1SPt|sqGTXH_;MIODXaIVRzu$)5H|Hhm0X? z9XL|zIM3Otk1%Y?iGN#$7|R|iJR7|-EAokkg_0rYAQ7g5w1-(nd=e37d415|C*#*AkWSOeR5t{~Ef{6x? zlqlc}I|FaXLG~OhI{*S3<0-Pi5m&Gx(NWRUU3+ucVmR%`GI>z$DH-h~%X~OaxH#^# zE|D{^+8M2u_KaFeM#exByf&gC4&K+t9nryMhn<#dLxN|5#}ytJS!Wh67lQnPG4<>j z!|^L|w42K!4KcWa1Tdn*3$}+%+=gwnBg;|RHXxdxY)N@c5p^tUs)X3E(Z|%1t=3AT zj7m$XTlvUJ(J~!vw`rW-32Ax=`cu-Z)SPC?yy4&L2^ugC(VLRQR>ovqlLUp>&$m%f zov|CeS)*;3WZEjRImhieVLgMcqupQ~hvyHqk{9T^<0{!M%0Ri4KqFdE_`cOKp|aRW2r z*#_!Q_9~eA?G6`@%HAh%3^RLwUA=w(lx*VcH`b9iLTRPPK}*`^knS#DG>h-s@1jV! zJOb=h(tfvaW!j2M$EQ9oTt%S1jDhDqe2I4wf914V3S40PXT7bod+Or=Pj~>gxayAG z)ZAT(DV)ayf8C*eEX;o@_Uegl&4Ri|IRdK{@uqZDm2qj)*4;%&NWL9&tGwpycV2dK znZkPyL?6J9-?`!R7iT1a7^P)vlgCL$c8Ao6GaQdmn%#e`4^tYp}JmR)mY3El*23{ zfrsmNLQA1qUW4s-yV0zH^!7g_*O>C}9jAE5LsLD)loPn;EFrV$W3iLJm&Pi9QxYwy zFMO;MMiT7NP2!kr*yr%~qn6mf#+9N>$cR;$Gfg#lC)3aTY^f|R*U$7DP}48HOaz4s z!{7V#InT!53q7}Y%yR=PTyvbavZrkb{Art7%s}B%`^!5p*Yx;tJo37yi|mg*!=okt zVkcaY+6qSqmlPQ%%*;^4TJ1>+TQdC44T^d(2}Q*b{=;n(W1Uo>ZJdHFil#YlP`#vB zcDSe!<^eu&L5*T(ILXc^E_sZ7R6E_#Ofgr0lc=GU1K^fXoKslE9ih=D-^`o*=z!Q} z4O6AUe-|1teNTG)Pjh@N_=ii>>gUHjGQH1%c36F;OA%o2brhvhQgdbX6%=l7t3eVBZz zz$2vVCicXMoMQ?h!8cmG9_-or>vJ`d@bFkb5rCy@c+`t0)YC~qy9cZwEAIQ=E`0Bn ztWYaY(erm4p`_w#gy!Nhmk8pqgJBpziz$^`T=)S-s)N@?_37wB!3ULtd51>%*UvBy z$zm5#M&<~n*Wki^)N7NB0;{F;bq414yJiu0eEIUp^*6JH5@|XBZ$%9fh8jVT>|@(x zU*f@)xc;A!VU@K>z9RW08DrGg4k737oi?rGYfe%J8)K(ze!|0zJd&xfPraD6h`n|g zqX5MX6a0e+Anp!3_o#uBUpf>M6#!qEouV|NQ7VFXu#RdJ*Px$=7N6X(dc7sQ&p<#j zWdVRMQ8H0kLvSR?$b%ciP0t32U8XwO_WuVRi+*YvBbqD%npjGQ1?Yr`6I zdVR8fr?M4d24N3LgFB$;h752*?Mt|F+uU{60;z7_;jjtD%mVLxkVnhUX0BuaFREEq zlS2873SX3nqZ&drR&3WGo;z)0uJB#Sj=)c!JEAxLr%c$if?2wI>Zugt;_+yk!Zz&# zJ`(T*;&hZ(EvcDoJ|_r9_Y13OPIWVeKAqxB0}lyZkI4MH>Jc7xRW`6w{Kq=nZBUPp z^|+!jIyyP_YK#+Vq(wr)$p`bxb|mGG3rBSb`Qru#ushV)GPa$!U<#5 z*2XC}{e}Mp70si4R43?9jHs7Qcw8rBMD{*rZW)J$kwd zDyuL9n4c+9Ep`4yQ@FfePa&#GF;nh=Jfn;<%+<+-_Nw4~K2I9M6Hp~qSo}&SnDLIV z9N7)$g%ncgQSWvwc%_1a6%5-a+4u2G%0=mghtq;P3SqXAd$hg~rtT<{PUxk4g}lxy z#@u0d#lTzl_-WC?BAa82N{4L6pYU$A)zod#W8C7Gj=7CV%8-RE-|W@_9AlhA=Y1g? zZs38biQlMn(2#K2S~rbPa_R-^OB~>+gMe$tDSy>C$k$CCzLmqbXeKv4k23zHV{Gyw9T+q<#p*^(XC)2p0Kx8={`gn8&c zUE1){h!PoPTQtlLRyD&JCGF4U9pZvV61AS|947w@-zmyYW8Zj`Zs&?XxoNJr*ye06 z+bN8P>W}8728EnTe}DGw??jx3N8i{ zUFoM~x&8~W<G|Uq}50dY;kI%{Nbys3!a`>qEOaO<&H*6+? zFY2>?fgf+IwTa)qLbs7>$ep}6QXFGy1NHO)e2@TaiQm-sz0u5MvB}=*r$IYldGWT% zvoz~m8|wmH(G)3P8^FBvK_?zOjh{_iE_rrX4KI*c0^3_r-)8bvA~v>{s9#cKv`?+g z9UAfmwRfk_#W!flS8PO_>|gSg33ChEsG4+)E{eBgC|lUi9hE2JBsk#IBq@_jW@(}9 zLndP+9T849jFvKgNlg#_;{6fVW3V;Q(fL9)%NFPKeW3~K59rs3#c^EVZ21P5bW zh5lv~iUPw^&+jSw4Wv|*B@n@mIgO%NOb%ykn5TSd-(Fjb>;gzCB0sFB+nwiI!Uz3h zXxuDO6vKgR+?taD9@NnmJcs|BR}&rJ1(H19d`_5ylyb4W8JZ%nOzk-BJmD)37VjOvvMpkA8^%%cQ>RL06wpxHC;m`tC1>O~!Op9icVO zy(^;iPM;8M+v?z2gW~y~`?bhbKC^Nvwt|F-Zhm;R$Cv9VsDnQXhu8-M8-S8A_vRfT z+f~Y&MCWW9Bu~+r5k|t#mK~eep_1G|gHn9c(8C+m&WSZ{G6#>HGa@<0FER>FN*b?I z&u!X~Wr#MdWm+zBVF#%Xh}yz@@Koit0AaRVW>;vjp!*TKA2JMR1@v-3G7Y$=S$}7L z4lMQ7{nosOF5zn=Xg*yJB-T%ia=@&VI13szNzcj0UQC8-uj0|x`yD$8RKv~-V|_Eq znGgui&z^=Zn>d&#OUls9klbJ$y%^e=R7Qt5$}X*z?%QG2@$(m-zE4NXYGS5QQqwh> z{lr5HT20H(SD9dzGlg>Lv2aARlfln#M2^WZKRX~P<`dAJ?o|2z0Lu{8kdl^lgkzkOnC0*$ra8uGk-oEI9A?UG>cgQ!IT%qH zqPYHbdm+&Idi5dD^Y!rdg=;(OoiH(9*#6_y_xY#?mY{t5KZ|b^I`ombmau|c)#s7c zr;Fq6-se%^eP>_Y&8r@Dz9gaj$XO@Q>6=j&m1?Wqc;^E zn)KJpiNeeDfOP7Qjs6Tdn^(8HHeZeF_rY{=fialD2iw0r9()2aH3FS8Wl86c z_29z(Q$tYf)M0$GGi!V8mho3qB@p(Z5NqcyJPeXMORMO4D40GO{JK?D+etK%Bv~?& z@@_h>*J`>0YuU&68YTP#F(Xj%egUK49lOrgK$KoRT=b(Q6j&5I?6BI%sy+^D<%r&n zljgbYPcsUNo4Oq~0lZ1jTC2ek`_bf+nwQUe;FQ$oPTVE>k3?F3?&>?Y)5)#Mt(HI~ zIHd0=M8aSqa61R2egdTYe$Za-(y#*b#1=bTJK`^KUtS`b&kj2YaK6_IE&WMQFFV&I zonGv|pr?^HdSmV2C(_a8@Cby_=RfTzwv@IV-VL$caJy3vAkIxD|JwDOrmjLYx3WDq z$99+!U4|pUb#~DeGq`HJLEo^5vC`1iQx;IchjawlR*btY`Zt$E^_^?4^#= zw6reVEv39G*>|At?qs_w)P(5Pi2J1{1d1L}&zNVC+_)laJIFdE9EB6X#deq7lgG9; zLwN7O9xt*4;q!K7{`)|i-2!+==8v6+u=6N5)%OFaEbsKV=g6>wEHo?c{5AZfxMoMk zRN%jQM3d!8d3o*|Cg9WQKKILfmaq{ax(!qNwOip0y z9ynPz)kx-}diU-#*4DNDIy2dOt}=||kKVk0?+`!lSTDyQ3KYxrfio-%*6?UUrW|*&)%nL#LHFi((!)f%jZzs}e1b!%s;hWS+K_)EuZOTxpeq@F~l>9mRJ8W%G9nqr`{<#ys* zK!2-ij`Xe}xT{D%{vT`Hz|M9Z97rV|Nw!1=IN@K4`D9Rf zQ%vvpBm}rUOTRzC78mS8eM46@Ua{8pFs!{hOu~k0hqFjH_?#E|-i`^8)1(G3@|yoF z>&eeRbES5DTT|-l$y*8a;5)M{aq^SPETyUmWDxTeDUEN$@t>IyWZ+pm7q>(T%E(>R zYHIl1VJ^ImRa$%O=_cDo|9Fx`L~n3YtMtsTn!5Ypxcl(Hgl%!b0+?;ec~BNv=;8yY zav2c9nmo)Dg`5R9((W)IE$_2nN=y0QV|UUmMj8I2H~-3+le6Iu0s4IreZ?3?-uic= zOXb?_6FHWcqlS{a;1kXULiDzU^UX6oUfOc64MiYj_vnuDv;qz)LTvg!zD1=K>?07| z>81oXa(aWU6$Hnw`a~|=q)Zjc3Sy&HMd+hFT>Adwx~(&&?{`;_ML3!>(HKl{F^vWC zU)4XqMfKTw!{+3;aPrRLdHZL2cQ`cwEX?Onb{NWEezJdIRaJPc%0FtI6R!|90X}M+ z@5L6vjeW`V-dEpMRJw$f^)WwCThxw@A!8ol$P#$p5i;GSDV=i%uK5krk_H|~7E$iB zn%knQ%g-yEq~-zfB}*n4YzMvpav~Y}j!=BF!2%=czm)5*T6}}ORUX$8$LYF;aEg_p zavgxbG0AaV4ettl3!TFYGSF1gy$EXT9ex*Vxg&mxuaSMUsT+O}Cjyu{Yhw{}_5(Ku z=B^A{*iI~;!8`7;RpfS3jyZfp5Z4SOIR+nKzY4o)Ib~ zte>^Y*$O-Mbt(|7Sbi!nE;f@>kl#$qU*#y`uE+wB6+VZl-gd7@_9}uYQ(O;MQcL_&b&`do zXF?l#+HVW!0afVio?R=ounaYx3iDoQee(0?kNIhnV~q8MCsCu>a*CBTw0PR#`6B8l zzKd7CN5C$j53-h!h$+lJ52kvP0B#*;t;1rKw+2*1R6c&_dP=I-F(V3IF{UJT(2|6b%R&a-X+|N7u!n&i$xjHzMB|U{;uLgJJv`M_sn>}Cm^!KQ^oVt45Y)T%SLz=5I^RB3PagM) zsg@&MXoOUT>m-xQh_bU`mk(a(E$Vuq%t7-Da?KU4g4?=*Lad-%ft{{ue3lP|Vb>?{ zwX1(Gi{zSXu=+hzoQ(+qceX2%TIHrV+HsbBs0*Dh)V4bsi;=J=D15lSE4aB%2-E8@ z($?^0&~%Mp#VU04LE3PyZTYbmifC`|gE2`r@C53(I}wgba@8G#h-{^gKwr+1=oWfwvL* zYr^XpE<`S6S}7ekKB99w^zs~sU+Bh97NZz@98#|Pv1s1_CnNd8o&n^F>6E7chvoGv zk_!AxS!RtW=sU%xPf_Nfsx_l0-oLId>^{Ii-+sC2`W;WI_3DDkq-tgzty)xuEfUAi+poX2bEv^H zc-S73_hwevxtlL?5;Cs|>f~QoqQ1ea*8)A6gP8Y+XI~MfPko~#Wl?EDBKx)EDYlRL zFRtp(?R@F5U$lM#L$1^E*LL@$vOf%fXHkGMPbde(O!=$+!hx$j$lo8|7P*?xJG{X- z1W$K|zVNyP4LQn{>i5xf*9p2taFUKo^xm3~yn9pb)I%?9PhOq_1pNE#{=%CJ2p6XD z$lZbsRANk}Z4HzSxQck$&%dFqfcCotE#fP8OhY`(4{p~xunaFE-G;vu$5}1K8R?a? zKJ8?+E#L@1r+*)jBL2dNXi8;nL0qH;$GVQJ>UCd(oRY7kYAfix?rX}R!?K7RQJ5Gq z`c4-`59O+F3OLgQ&{$+KZ`8G?tJ5T%CKj7;>J;xU*0DOf2T7qVR8t=!RS36lI9S(w z5e@%kABMaNf%$8n1cMS&Q6<}qs_)pFjgD}wp!z4Gd8VmDuVJAp>5$bx>k-@^Q>_ON zTrv2~K@mrkF*jS)2HrA4rC$TK(RfuE+0JWovzLQv#y5nQY?er>esYC7OgB%%^Yt$4#kzq#|bK%T;&$b~Up%BX`Le4)gEX zS0_vtwdloEIfc;uG$r-$!RCXQH?BP#?z&*OxL9D?vGqZqw~r+`;Qf#f%ce)3`Flr& z`ya1i_5${RPBF#NJ<6CGo8;OBY((qKVDY?4Z`n4~d>L)$^u@O)l0hTQTG{jf64{Vr z;b)4*jcyiP_zFV9FkMf_0p2Z*$p%AQjTF!QOAOU z_f(kVhAr=7NTfZ{#r$d?OL= z5Fc>qO*c~cV{mi2()Zt&%`u_ro4JJ&+Pq|X?BR(4ieKcrD_y-@1$H_%WK1)3T21O* zDLM3;sD-VZ$qnW$hZQtxV=feJq6K=n8*^2$wa>yL#Yt7oA~f-?5_HyX`rhN7`Ltl# z2Du9TK}>W4()Mx#bicJ*dr}QtX+15zt&q@#m&eS~0H!<>6Cqh~Ub=?qHgEF>6Z*0n zFTAlfEtGiaq@IP4E5UODjWsA$E0RRU#B?-oXH_ynR4E6|4E1hb&-M~o+O74#>NzQz zE65*@SDQswgUpVrF0@B@=!)#7o?t<`j2W){zJ=ihlF9`SVE-Gqy>JnJ1L<_+zkI z24W#++zc5Py`x*({s{O9JDgY~R-6~ho}l3qW_I<;Xer7Rc;FkVWpigx3p|8&4J4%# zSlE+x+l7czq1DBg(C8fBU2b+r(O_RA3sV(O-7_Xkv1&JH&=naejkKTcWvJ2u6&sha zX{HSl#F6p%+1r=cGon{Y+lM!YVTsXdlt`}3BXvk4n3QYPvg%H+4k_-D|-;6FaQ~RMRIasfgoll7ps7z)H?I*-qAEcFUom8>7sSW5^cKQ} zE7y~zVO@N7BOfKmiQ$_y;L@EnMi+_pFc!&G&`iqf3B&q8`Ir^qQYGJYo}uV6kXaOZ z3+O>nY4;le_-Ne1(X>mJYgotU-O}@R7ifMmvqAOTi}|qc09q6}$g2d?bEI6we0L)3MVkw9v-Dp3EwXhMIIx z_cpqZ9~2DUqXM8hssZX1$Vh7j@E3HWo^KE~Z2ll$tkg{o?Tea^MGFJ@tFNE3x9| zOprSLX{~wWs{t6lKoh`7I7njlUo}U%FG4k4xuFho zlKXQpeWJEw&)ZuIP7jc;ejm6VcjX;lL|xk*Hf|YyEEX6#p<#i+dKSh-UP9BNH_ z!%69>B;Y2#55dEN8)M9SIY+Y{cHZG;A^@_$viT`(Lo`%vfxa((iF z!}&^{@y9CicZ(a*&d*!>>O5KO5MHn6<-sQm67HER4Qh?x_*&1)h|~+G75<{uy!=GY zCH=9QwM~Vd!aSSko%RVEUf!|0b%SDE9@YUm7p0b}mqpJao7;*pnINpr&&5TY~xIOf%*2XK$#d-Sbw6j<9PiDQUT@OCP{JJ_XZv8xY;tcz0hY_ze&GVy?nzHzMZPQpjn*8TLs_IACndCUKnzE)AwE`U8BW>I=-4|7jntic`@yN=>+OXwVHR64`wH6Y!W@O&svtB^EIn8(kE6AQO2IW+d&l4jt}9~ zsWqK`-&F8Y`P(-iFvEX>wTH!61j)_UM^J(4nPHk7DZB2-5!RCKpj7DJqu4z08ZS)I zcRB##gCbP}6Z0j}x^L|}V`9Qm#vtNM~!m+;_HA4MsudeKDjzZ)5zrv+kOZDdExD6bG(B3~NS#12lliPtKn zb2;QM?AxSwyqOO>ab~+B?oLRiuw+7chkL~`06Vkpo94Oz3C&nV>cAr#1h&5&gWe;A z(g(j8JC0@U(kiM4r-#6R;jHtrZ>VaL2}6pF5N1WfcV24HH<@o2wR(a2_rRc)_f;Dh z#FYBDkWcK*&1~HlWaA-`%1An&ET4#maJBQGAn`TwDc(D+)145)-8Vn_Is226#Nt-G(fc)O}&k=hWFaq?;jCe~hRZ zh<$W!|0JfmXvoExkUNnPF0WPW^3&xNY^0|LTxgm@N=XTbLt+=zxIbJWf1Exho7NGa z-ymx_YzFE)OfOwm1#BQdrmZxr(l;U5vX<)e=?o^NE0*H;h)??*ksjY zhoFz+zl1JjcO^AFz`BnKCaz#?4<8m(Uh51NXZG_XcHUviMW``SS>0pk;Qhs-thMi9 zG$Z>?kTCUu`w`LiEaW~h71NS~x_H!y*srMv(oD!PTKYhzW$%;CFaN-)xF{@6=3k{Y zc)Mu_j8=PacoUYPW&tDaZJz1dqZUuLHl?=wnq(_H#;#E0juVI4fAd4}&V?srZ67`3 z=NghMldH+JKZs6@ms@x5YSlSvYLrz=rf__J)o+x{q$dCM@xeab4 z4<9m5EJGaoOkZ2nBc*0=I5UjuQn7gpqEdzM30=nVHP)3;PV3R+t-Vfr9;GR$u*g_K zJGl%ZSJ(N(-%kKYb-}z~#TEhY>N~d6c6a7~W)azsJzd8z+FE!tT%U!!jXiddX$krlU1kOFAH3%WKFPh-&M31#vH38K zy}bKQ6^a!xi$0LX2)91%I5GU!o~Xd~;3L$wL`<^EQ2?m#H~?f{NKn<7H?- zG0rMAIqejU=d)D}C$gTzi4JK9&Q_(D_Hw|YuGmHs9V zdljjGHcBu09T(OFe(OZ2eb9yH+s;_fAi7MHseYoQer1Oc2j#+S{{7u$ri)%n3zB~$ z*&G1xAQ5eryj8V{iV7>$d0}{FkEr*#pa&=57MG&=l)&~K?Q@@DzH;~auHog!Az+H( z#(r;VN9bqM(wgD(M?7)Rb+^U_Ln*Uk?-TD^?P957rXDKk5k|)Y-?q2Ma-uW#0UNM~ z{7X>o9g<5SXJCI!vkbm=J<<`Bh<~)^^C4%CA|RCh;98t1bd7lrpm`jNkrZ1L#lYVP z=GDWMrgt{lm7$IG@z*1HK>=-3sSaCW&ZQRl`W;NXB4)P*1nUK_+yKb>Kt7TnZ1G=R zE(aXd?b}+*ynex*Q9qG=2RFQ(W8;Ev=^%&3I0M*j1d6KPE=ae!<4BDXJH;H^b)v`w zN>K%R^}a1gb^QR?d+yaHONyVL4x}yqT0DWreb3)&eJEkQF>8@))2U&n;$fCPOb2f} z9Vm?e&c;)A;US_YU{|aTQ}XuYb)Fhetr2; zp7p?fcIoxdo5Lp+zu-ND^C@qi2`zVilFeeSpUDMSpHwMXcNg&oS4p3^?!eV%<%cuM zC@zP7;Y)?> z#fHn9=p%eJ@4z|;;T&85W?+dWs&@Fjye_+Toj(XP)u@1b~5t|PliIR3?{=ZxNjPMiu4AO4ju#f7RuBDbc7g> zHYZYZw}P~8aZTc3@a~|;Fwb5Na`bafU`rn>k*_g>4@Sy@&C?&RFbcpq;L-@aMNEI_ zIxcq~Nzp_e^AWadv~LVK{(cueaH89GN1&rS$}x$i$@HGZxlNm`O+w2xBg`hoRqQvV-hm`9MFyYLBRum)hyy1&{>co`ddqZH^kK{c6Yx)oLJ zf*YEReyzm41~g0du|~bKKx-*1x#oW2Njf=u1~?)@y=Crq%H=8iBf<)(dH)w_=ddM; z5@p-8ZQFL{N!zw<+qP}nwr$(CZPtCw*Qri@LNsE;h&A_Kllb3zU@=_IXsiQ@m+5@j zrd(GA-3g1GvE4eb(ZE3mVZGRNlu&ucfQQ9D^5my%EF0qD%QJ}QCHG-JO_saF-bYj3 z!IyeD_cIRpj|?5|AC=e%mz{b!sio%qx^1~$rEO|igYY!Qh@e3h2)qi)HAg^ZMCjb{ zA`Ftfn{X+^%*?t0Pf+l3bka@CSWWoZMFo$6xpyLy?9^!7V~4KS0v8xcFCyDRtPq)q zE}~G2Or|%#6E<%U&IL^kE-`4)p|S>jmh3D?Mnx?)I;z{rTftc?KX}w+m_=AhYcw3T z*G}lZipSXOI!HJJ@ke&tl4K1iE`YPFe+PrVeJO1eEa(f@lPs7^+d+o8mbDR~vWn=m z9G`4w>}k&tbjfPj$;Z&ydInXy9u+}DEW#^c#E~>ibgE*kVuI^tWX6G=C(fl-8ME0hL2!?PckNz`RQ$3FMZQ=-YNd z>;?-v5LsmSW~di|Z%|Q+LKY5OgR8AP$!Y8ng;L^n=)ZJu>&JuX_-u7Nk~hwAz0Nf` z$&$x(%C1E-o(kQ@)%uYtDs2xvA*U22WzbVUuM&m>8LUHw;`jxOxklP)<7ne= zT!XgN24a(oiGat>4Zo_p2%&(x;XBR=WF-zz?<};Njg&CrH8CB9)nwLP+H16*$GQ&D zR=2#(JN9@7=yV*#p0DA>n8$kiv^;^6kjm{UWg5V7<35XcOmmUnmqx>ot_U(SL50hz zNjnT6wrJ?{$Op=ILPstUULo}xbp+RZo=9UO05l`9xaiHJQpODZU(a1`B#dmZ1&n9VdIVa7yn3c5&z3o z*!j7D&deok#U21^Ba8o4elO-brF&8=7hD!i;|6p(RFuAkpCbtw(9i6O+w`8WfRpQN zNpvOLat=ng$3su)>N3SmWjpR_BsY_(B=4R`|BJha4dr1h)9^1H*E(;-nzI*Np?}xG zOW{0l)K1gk;UEZurSlqw-4tRM;lko1^*ixpgf8epKVZiRAv+?-GnL%Medwzur&n08 z*u9`f=vDUN+fd|gQ&3m%&S}{X>_%sGyDtel7Ac-kk+f*(kosK2X%xaLI64e%)`#$j zuSE04k8@n#VIOX9msu0hlA$Lra)9}APpQ_yx#%g3YCHKeKTzi)gss0!o)~%ZhKKHr zo;^Y=C?dKZbB}Ih61^yg$St@GD5w8HqyD$FZR108I&_YLQ?~y*?zua4nv^}98x=hv zQrJLVctS#(P5?*n1LwxMiJf~OgL#u}T2OywS@G9RK_Sj1$bKHp9T|8uy-|aFjER2s zKCcV4n+QE7&PwjKs3saTVzY0%XR+}KQaNa5dgy&FJF7$VzT7yqA04tqfL zc46Uspq*|Nm9@oxSA0#gDeuWe{WO=0vUEiR-9i#WwxK+vvpCU~1;2>iyv;c-Yl334 z;w4sjlY9@h>)*~{>aAn2*dAKT7)#95G|$d>UP@M-<9MtX*y82g5FQKJSA zotIa_>5cIE-3`cX6fW`h$ZWy`#fKMR9^6*3 zHC!ir5fyzBVS?QuAME-l#f9j*-w*@Toxm|(*k3bCpwKe4hpB@~W=dUCqAHx!Be%wF zzgw&laXgje>v**VPHLR35X_Ko~9xp2r;xpPp(O`r9Qb65O|y`tkf zwRg0UK_639!j|QF&2=@3Y4%S#0yLcQTKc!GSxZ>9J88`TAw+GSmksLg{5GFG+tf_5ow#3X~eabI;=DQGrLiH@Ik zz#K^)LdT&Cha}H`8*`7QXc?#I{z4aHbO~Nj(=U>nCS>ni$v-7oD?ARtTi0`{FyFUw zy`EmWPg8%te1QMI2h7i;Q_#>B0Kh8+0D%AhsA{J(HFo~rT602rz}sOoL<{?4E?|}$GkPt#xh-R!x#1)cti%AH@?Ft|i z1qFd1TKC>^f_{7Edj7n8@bH9OoIsT%q*d75=q`ga*a({bGDO z>`Ij3L>48T&B+Wmm_d5uAa^=s?V5+|qt7*n`XV~*qFX*d8E8+J@{PSiX=inZSxv373feiw z?2mzBhdU5AspfbzQnSdH_AmziNWSjgUF)MBt%A7&!#;|$+6m_gSpGN8|GQ(tj z*o%1g$Ct95uxzhq?^HW35*~0*?O21F44IC@o$!&{_d}|_y%t^=D#(jL03SRTUQnz< z#1XSZd$4C5?~5@Ldk_sp?=Z!#&hJoVq=A0zj@|bHFN?#iZaCi~i$m$Nj6a@q;{@Ny;%6;k!@- z_TW&sk{Z2)*?X+cLTlW;yoH(T5`At)3B7nlT1D$8h9Jk}snu5Q>)vB*$ik!=)c+jX zhvoxA)rZ!I{5N|Z!g3!XZ5|vNd|P!;<9Srkh@a|-&|9a$8Xe?RdMFkj#Ed!?-$^X= zILMq*Nqv}US+TeDH3Z|=Xa+HcXjQW-TRN`$8a@cG$XMif3K1}FS(#>I z?Ay^axOU~L)tC^`zF-`6J9#(CAp)Hd&Lxw&Q>GDrL#BRh0}I1q$r2*mGsRcu)b{TyJ?$S zI)5hD*iFK)t;711&W6sUY@M{uT+Jtd{qC2RM8s#kZu53sR zN{?%MFNQMkj^`w~in~kUVG$AW?8X{Ct4z~kyyEZ+ZVG0T?zSV3^rP{KA5DMkK4rzy zg`Ko9g|GJBTiPFY-6$4PbC#awNBD7o|1A2-v-2Z=E-Q=J^IewCf~+pjJxB9W#f1B` zHJ|aoy`cI>&L*V z(tT)_n5R2D!-Z3xetU1bLhT%be*|$VIefMSpe6ozT|CwIdOBj5=2FjYM~! zRDk+?`%AQwZ?A;>ZW~x5XrHelJM3lLelDmQJGz~zOn^9Ve}v%^q#XitT1iv7giCgdH2wGHh;(L@}zPbYj(+NE3Zx420ntYr0Qqld9spT zsWuhk&F!QnDC52LGKrp!KZEAJCtt?Jsq#FDKmKjkp0{}5e3rJU+bfiY|03|MoW-+W zk65Xu|93ID-BP;RKdYa;wxD!X~ z^ng$f^TKbj)~LTEi+hQ#=5)j}&><=>7C!&^4UqOgei>^|Xtng))P>o)N%9MaKJCNe z48J9id!RZ1i!JA*V9GXmCJw}`cYId$0+62L+fxUkkaQk5Ow)N0HpjPH zn$c!MgLK)TF9#j>jcv+X(1o1o6<9COVlKA+U0#ZT&wA*6bRYN}UFWpUjqymDDf0T= zz#W|L`t&v`de7qDW?0~tb(~k|_?{}xYYLr3%D$|AZ3VX1Ov^T8L8pH*=GSR0qMPaR9{KWb1o)xs&x zC-ilkd)Z~pekxNfR#0Yg3KUM{nl z=BB+XtRQA|-vw4Mepj~NCz71S6*2ACk5)G`_7Pwo3Mo4Z@p zos&_QznqQ8p@uI(!jYAltLX~MWRO_^jek|$UUYN`Jz}A0*`Mi*Mdoap;h+Kxt>r zXZcRprqOikr@E#9JCUsyIZwx68P-z^uR>2+UTqW`{^jtA_}GXo?2vG{Ql#~P3I2!c zNv@22lCd1{g3$KedMg&C0X%f9n08C)^)M@MWflq4C(ycLH}T`((Y`GGn8wJfcRp(> zkOP2_UAvDB>Nx=)qUk^;Z_^oQYiS%8agKS@wwVU2os^@A8H9bZ{NNEglKh&$}n*W_jTFUsn(c z=Ph^3EqNEkJU%Sa*vxx8u>e@RR=C1XmF<u#t*ghzjCAtU$af&&g z7K|&$%HtHQC2@yiZX%^%K!bNA$|1*?e=_wb+sLuiK8Qo`J4I&A}N7t{wGjIYr0% zv%Q~hGKgnhIA!vS#{HnGn$ zE;I8Ox&hF{qZD`125Cqp&Q+5>ZavbLuZrt(pHy(nLwX?1SFt=(J4)RTjjlXD5b|@7 znVx8iJ7@P3P%1t_r0EQ(P>Bz5bJ3d8^E4R_GBqkyT;_S3$lJRS0#DGSif*Mr$n)|? z)NPko0PAfLfhob2xcLOXb7$U!6*taN(HcysKEz~4I`WQLSLV+m$L<8b4Te!4( zQ;a;5kA&0tuGPh={Ry1%t{);k=%==C>D`v-_2#6mZMK}ee2txnsP;k_7W~I`|Mns- z%YVTw+j?hww%{6qnrHs5zVwHqedr(9wfb;Nl@ZYfprgMs>wUq+?EnOwQ4A&uO0Xw|STNiYvQOHFMmYTJcSxvBKYl)_ly6%|E zdsO_BL786(^EvS0GZfqCME23cf6*Tz--}d+&h^k7u zJ2k*EN1QuS`yM#71!j2Y4h!V5UFjKpsSGWJ`HAeBBx}CL?-VfOHPwO}lDpm2QUd9fvz|qtgY}*W;RwiokcZwdC@-vVjabJ&K z0mcHmiq;_n5R6th$tK%_XSP@N+qQtTk9J_(YUSrnp=Rw^eS3n~_k@K@EaXi#un5L; zne>R;FDLusRGr^HSe&m^2NbjlqlVrwCbC(kAxAVnt;!LmZ{RuYqxQt-5hn$Aqvwx@ zMp+_3iC7J6Mes4UK`HgDI4ZYdnQc=)1bC0WCR1Nw-Ge#4nXds+df{YT>rT_=quyFp zvJ_s8GW3=ySsWprOQe6|WcfBdIQ~Kj)63jC#H3mv6w`1mo{AX5+dlO(#vHaUo}pbW+_u&uizg>dlTn{#V#pymg zSFCupCeU97je7wCK2OE736~j&yjzW`z&HzCAdir$hk0)>v|m`aiL{BID502d7YL+s z9>U<#sU#ag57VSwTtmc@(0pZ4SS|2=%vRb%^6a`T;1U*J89|ivi~doE_l9QyNE`9% zJ;j)Pn$~1EM>gE#Y@ncay^OwCjV@%!)dGm{Fq`u1bRwfiXW6;-Hq`<(yPbZ!l};}G z4J!YM;n2kVO(-iQs!69P*i%RUjwlD!gMb16bBdi)3#c=67fC_cCv)V%yz#GQ(xw-k zSN=A)V3volHPWWGV$w21%`gDJOB>FX)9BGYkIB!?Af1BAyq+U~fFC?G`8wo7kTW_t zt1j-aLm!K>>$eri*64ORa7VBn{xg=gdN3W3I;ZNk%e6t%-w`FH-DZza8&GOb20fsH z+RTB4=PlsCb1wn+3$&>*I(#c_M~gF0?u4mPiHZXn&xZ`OzLtI}jEx!%vHsk3)N%~5 z-lYKF5Po04M-uj-xaTt1cf21tXYq)50pkBYCq)ykM{|}0bU7H3&+Jc5OXtMZeX1j2 zGJuV=oeAvyExktFht_~g#De6V_JVAs@ z1`e3Lk8Dy_7k3^Z8Bu^Ac|hBUBUQg$3Dg14eKj=)um*d;EG!A&k7zvL1I6IYf~Y_b z+M^jF)FiH9(CE?$yc@0qJ}<>^X(^BiCl(LQ@4vJSuin=fo-I-UykrqAFj(cHOU;^qN+ zBEz00Ag&2j>RGL8sVw40NcDt#(u|An&ZfT4aux$#3yq8L6<&0QbizmOGV<(TP@Ry@ zh;s~-dS`3bd^OmyUSDY}$tBZ-zTAKoJd5$SE&=vMu`8md1^0%)=Shk2<`$VLHFh1m% zSr9itN($m4%x1k0U-KMzDL~=CP#$^G#NpVb&(wmTS%Kx?yC(5L5VoUg?X1DgV?~zd z0_S%~J%>xMbg1?u+-~x2H|3Q*uc>9QUqo7?M{Y@|CEJtUnU5&Fqc$2^wH$EQ8qikr zn`0dH;-Is>NojYA<6#5CZ8wWLbx#6fIoJS5c~qE^gUxu8lyzgt!%_}=TL%i0FZa87 zmCXN~bIw1s7EHT+wz1dsC_xhL!VGlixVnzkXqP;n;Z2&@m%gfBL6^=3ycCa%f9KJO z{Uscr`imZRVF#v&I4atFSMh%&_W%X43;KX-`EYX?XdflOTW`ZYdsX0a}7KR zn`4aZQw#Wo2c`FtIN)-8o|1+dTOP!Aq3>GUr>6?mP4(pACFDX?I^#~VyK7w`?iyWW z?3mdbJV1+fY@OaO6L3li6L0mDywe}b`p+KM%V`#Q*(7AZDXSJ#jBQ*mSxY05hzq1= z!d8%0GMn0SOaxE>K>MQ5RnmLoK5^?b3Z%5(WFqE6&L+F`S(gAWxHk4rXi*iRWq zdeCwpA65L4I{<=d7;rMv=YC%S`k%)v5 zU*A(0YAzt+S6Mxlo8OmoOAtNwJBCnZ|Gj(_4L7AO)MP3hW2j5Bze}xdtH7#CQHl)m@cR}x8fBVwkrjKUIzK!jz}l46{H%p(bO9-90eW?AYgAy zrZJPmOatx&IOvDmhXraHuw=lY9?ZMsd9aAXPI@Mr>J5F-6w{tz<~#xk@LEAs!>GOx z?ScNAEyohsN!5iQ#NRgGLm9)C4^!2BBvp8VH|+cRgTLkf%|Zo7AA>&|-y5PgEogK$ zox=d8`ro~#<}7%K=6AG7ADT*+J8zOk69JNL)ZtN74V6GjRJA0X1CxOvLD5r)$s^bM zSK9&a!EYAot20|DtQ#>Bdj{{j4%Nc7n#2v>41&{{?mU#hppz%rKjii^aKOHxghNfm z{92;?5Pj>2Bsu65elqT}Q0WD@hJu0{v57Dl3S{zQnpFv)%P>-8x<6r?OqXbGqI?~h zLk3Y3-IMQMlS`@l;S}MU35G2e0d%4`zoXngR9w%Zzx+c9-nXL;WYiPpvy=7Dj882;E%L|wd*G^IdJOwUI^fi@Z(V-5ha?V|8y!z+&d{Q{_#Q1ao+ zArcYb#G^=4ps^qYEnqOW&ryRaI&=hjqNvC84MbNoIUMEBiWfeBo|bE!LiuT4X7o>% ziR=Qmw*?)pr{G1%)*@|><1dhjtWo!5j+p0v#Vazs%FoaueM6uemo59~mVGH&Lw!SR zP{1Jq+yO}$KWErBNYT%hUDlC;OUkTqtQgc266=;eh{gIn4>_y+R$af@0}>=_x~!L| zsB)nykl#S5sK_u?N)dI12c??Nf%C-9Grzr6ZdfI4hoMyh)S1h`i0?VRTwf#5kqcMN z>5g|&;rol)<>%^vjDOOl5ILL{^=*Z)8!qoLW2N5^Imo*@ zKY;|9?xMhhNVl4gMNXF~u6N@Ct&GNuWX`=ftDN2Kv^qbf3kGd5jy@NeV}657;Ty#L zZfUz-*#>Mu`~go0kdz3B(w9&*oYqe$uPWUKY|DQFTPDx}R(iIo;gqE_5O8dOmU`BT z!qU!vzWw^r?sSQEN zT_L#j$Lg9L8p582er7c_4M-a4o1m4c-e>JlgIDRBP~fbHX!g>Eb&HIF72 zc1u0lwF3RHW9>rY9sKzg#cYl73*IeIKaDxBMumf^(V>ovLsP zA+$$T{&m}0cx~%>riSXPj|KdwxSd8{nKOo;jllO4flTyWs7*l`{pti$!K`B-P+)(aUk7_AhVubEt7<+puG(7imu zjMRGjCQKShHb%!iROP`Ha|;o-9+T1@4;KveB}#b9eT3t?Tz&GqK9z z>~1`GkiiA=Llr$lJ^vue9De_u2b-cO*Mq(bMWR0yqNB~|gS(^LT0Xet8;tv-!RAjH z8_{IheU8e2(oQDWg7)@ozX2Qi_|NgTtL9xrH422j6aGh28K{OTKBLKMap0AcEMCm8)=Aa%KKrS1)UU}tt09t5&-)1< zu$&ofsOMCv1Ym2vtuE3WeIqtaro8oji___HhWICxqxO0CZ>SGs;}36-D#{N@O;P!Q zwhjtiq$#8hKD5p9ByFK4{rkfiwOaGp#QZ;!CBVRbbZy8`%cGSZKT1H#0W&{9z`}AH zFD%T*xC79XkQiD7rQY{j98!uf*-&S0gJzy>73{$9$!mJg4`CmKRg}nZkAeWA&z_Yl zQvkFL3s$Y*OTtlCzd&d{Q*@1SyMSC<=Ph3xDSxm`wY+6hZ~(@Tk;+HAuJ_umW6`!W z;V4$MyB``L2YV$;jQth#{vk{XRTGvB0bR|>*QD+<2lx0_GZk+|%heAe!3uqtKr`uii?^1;z2r$-Kh>5A-&ZYOakA@78s#zomrtm9 zuU&jaA;Y9=2AK_Uw_MP_suXeyyl5O4#O7l}6k`ye5&L<(qQ{1RfimUp4Hrr6(?~ce zOl2I?Un8PS!hy;aKMdS!Q?^-n$J8)df5NvmJh~nq^y#s#fu~~f3t|$10slp%E-v(e zjld`wO)!_GAl3d#;@ngQ{lbo}%-_#KMTl|?lr=v(J5g7y_&#c5KG~kH?dSe2Lx`IG z0S%T9b=H~nz7DMz!fKRk!b4MZ@|34mx+VIq%ZeOZ@^jqfOklD#(gSeu^po!ecgh@W z9uQOa>^|vCD<#UTYPk9%8HLON7H9WV9y-}|U?aGRH3EZAa>l=0F+_7bJ(Mv^V5}7Q zpzGcJ*;NKcexwgO6dv~3r)Ogpw`2F5bf&d5YlkufL0BXbsa59JQPsM+WSnRAy~`m{ zJB19{pV%|hwQSoZ3|#=`H@;KUqlZqpW&#UU2I^{4x{SF4WnqB>BmQsKBB!VT)ui^= z?rlmz@q+&to()_D&6l5SbC5M zRB;RYjez~B6sHTBPfSah^$J}>Aa*GqqW()!(;ah%gzmNLLaob-Hj2)9d7~i&ovRI~ zDq=&pnR(9<^@gpSo#12JuTM3N9ozPD$X!cQC@i>iA0H^mr z*Bk09i?543I9~-M;3r0fH7lEyM~*#^JvX_em1C|4BD*XAfa!qv0|6hVTv+=iK4sWT zJ^_-NH$R*DOEdef_I)**pX=2xVnaW!q)D6BW7PY={-lA=%=Z0r*6_DLdh?X&ZyLAA zg@r?$!1v-j_GMcOxgb-cRBgrs>VlezTXJr@ivN9I13x%c5G~r+zORL+d?FsY7$}#F zSownRUzExFS_FOq*FIWKh46yz?I#pR8CkTc@59>lVajbM{6Bob^7QR?o>|`5A|%0} zows&5nYFX#ha1LBCzeXNVjoPvavAV+Cs9K3R;fY$eFxuqH|?86g?&z3NfO^Ai>nQI zOe$_4@8SjgRUbH;$ro>w2nD&b4qJm{haELjnWn6Q{ssi;B2pNA80Eal7xK+#ds;>9 zyX$yE?Vo8ib|tSYv8vdpI>MTMT}^vH6?)Logp)f|o=3ZERFa>C!#^XR z)VE$TT&zrwaV%GEF8GuDfGxeuF_9+O*qCud%>(m+*_jy8KnWRK-fLk2+3~0O$OMR0 zz&U?z(8BWd&Yb2~X?m0$?=J3}MI%kCmP!5oSi!*h@~R#Spl4641ftit07Sut19rPP zt`#c39P!{d$>G-($Qa*tEqhh&%w9#8>rBUk0SMzA1D=!mvbLmUe#T z0zqM_?Arh8+ul!wuTlof2eqgN9aOsIYO==C0hl0qBIaNcQ0i}-^;e-L$pSX2G}Oi1 z{laUf{H1+<5WxPa)k^-RuumT*%7D{|9Dz8s${R#)=*$uUX$lbLqYC0Lh^EK{o7W<^ z$_TbjMGgoPudHkpIq9qYH^8Lzfs<~F;a-&+fPEMjY$~7*CS?^Pi?rWf>R za$4Spp2fHxdQHbC)qFY6eLI_ix%v_|Jn<(f`cM7ds z_c{5JoG-DenKGWXS^`e@yby|8KEOf1V;EgsFT+>7r~C%+1sW_=ZQlRUb!^Cp z2p@+&N0R9-^;+Q{YU%O3NHPn^;Xf*oQF=A+=bV2k1rmweCAXPl)?_Rfb*xm@?japc zps>HXzNzQ8l*r^gg2@Yd@`)3J!Nuj+r|xiWJrMSGDz+p>;8HyR@aM`LXQy^7kyBxR z6>sSAh<)qD_eZQ{s@`I60HVI7MukDNExHkfD{~p?XI>M}f*;1+kB*>n4CIXW`LF&f zdB|g=W|1)*W!;D)F}2G7F&1NNWvwp7hsHd?AMYV^e?f7L{-d|1gUqLn?eQ=R5SNEI z6M8Sa<7OyRFl`m^WiH}^5_;f)fh_NomBWNgHB-xY_>^$k{$ITion8g9+pkEVtOm0HE5Wa<mJG66=R0UNgR9}{7J_&&_o=#oK| zf$Ds4*1|AaD&OLC%TTs{WIj2U=AKQSX^=kd9ZO?1irHC1T99Lu6NZ?_ztdMLzC0y3 zpPosRFw7`QnsUq=JQaj(66jNnWcFp^l@5*9s0kYJP)Us7g2XNX7rRvK^qgB0>5r`4 z>7zR%Yjl^)GsjE4$e1C6$0CEwj(X(jYDLDawymFUiH3x{e3cQ^F>&Naj-D<_2GEQD zLLu#v_YdA1-^Mhd4U;Ne{08?~ii)o9 zA|^t)#J#9n-9}2~iIEmrX1V#9G6l$G#N-hLIrFUeF!@pMBx;Ozvg{Y`r@=D|>Gc1g zXm9_rgGx}|D(SCC*N%>@e3pl}QQ`P0O4{Q&*6D}`mEXFdiXk`TMf8sgACJy4wUw-! zGj1-9?%Dkpiz)ZW4y|C0qzGI2cfRewwvuaR;pYon_G83#*xJ-7tWf#6e#!o~_FIyH zroY#Q=FlsJw38Vy*1OPy{ivsF!I9I9DgEQ9vmS?maO$+y#gzYwVd|O!m~o{#2U_yHk6(?{DZJJyU_` zs({5Ihj#?%#jd+GL>uL2=RvItP`h2KXJ!bb8<_WYng;Q0LjU~b*!%k`?xMcWZJP`D z>h8xJ6{73`n$}M@?wr4r*APPZJc+3Vy-~dx1#hxuyq>YFQZzbo}Lsx zWev49)aH|iPKwhiXI)wzlQu)I*`AX_71!JToLk$+guf-XU~R~&bXl4$i-4OvDSQ-3J9<-Ph+>AmkW>$h?^~Wr z?;k{f-{P$~JmB8#>U#xImG{}b<&c!a0rYuU64+H8e}+O?|DWa7C8Un~Zl^Ba6>`uw zlJi!BF7sSxNxpey;6vr%Ek$J*|JiqQmR>jTyt)e>qSXmt{#D<`w}lb|{xzY{@>qpz z`r4*;VENqhIIc0SxA+KS-|U>=&UZUPdj<_vE2v}RjrrDk3eRaLC&LZ?u+1S0+XTKm9fZAQ?` zP10#Vo9kC2T`!drc(6k4!;XgX5a>15G~Mj)ao>?;%_bzw~B~|gwme~nsFC}D($cUq|GidYhzDUqZiV{ihozlE==hvkNH0gl<t3cJT(FnmO$|u6mo?fcg#5b%tVmt57f{y<*+Q&RnL&cI# zO^P@k`o}Dh^DSU!4IC-Vj@auFTh5=LTjx?KM*)PB((95ksYk$V2fv7t1LaeZM&JT* zfmoM-eEY4*I)V-x*ww<*e~KP+NWUu9N4W~YUM^2f8?=Q4B$aS51z@}6zMAL~G8 zLlBBB!WmPlJ-WhlnXlf!FN{2dQmU_u_D6UAm8j|9#T-)SFgpqrWD}mCN1uU3Oa1-K z;WEniRA%8WuAfwHcd(4x^%Yv&dKm1qp{{K*uFVTWmESMq9x7$s>1Cd5NRjPMoyV!P zePbM6b)`0BkeMY4DK!77YOLfEu@~p~MuH!~wq|q<=GeCJ*$LJ7Ikf>`>R?NkQD(Bb z1LUYW%zGnU2M_$3(w_7VD`OkFICBzNQDmEt4!NjBGS+WPlMVIuQ(zlTLPU*vYjOHN?G6Sl8-Sf_U zb}Wl_ilbt{Fa-gB8tD1iI5%9KUqhMzJe2q9Ez1=`St1)YU-D<)N3LaPK}~fP3d> zZD>L4lfJX$UDN!0v+|eqTmbxUs@UIWOUu9s-K`wSVem z3=8W_)k=&|f7)T&M&3yg(S15Y{??cE&GIjaJoobDX*;;#x^7@EI@Z-sJpWC%77eYw zS>MS8_(rP46D=ihU2?*y**BVX*@gc&NzysCpmoF2gWluCJdYP#zQzI##k!%Qvv<5CSd;A^NHXJ#{k zqYMKCqN$RN7q+@@Or}x+7l6J`Ry=WU&@hkE%YT6u5+h&8j&vFah0M>rR%qU?&!Z^I zIK>}$KTFDBrLsi4BuO%WX;Xf4B5PV8RiBaqXZG@jgn@>zTB9*y{;K+n2;kVzewY($ zZbYMUXDS*I-}LsERR|e{Sn1cZ5AJ*_ZMVY=wf+{Z;ZrRT)z&ceMng$C zO0bPqru!BxW*530RRN(x(bga&Yn7Nrpz-;M0Ftg3NikUnvGQp``W0wyw1B-uXd8Mp@MYGP7+2 zT7zUhp;?J~8wUCBh{bXW*qyuC z+NBFrxv5^qPKpdmG>ECXeNZK#>5=jPgL#S`?dnjwiwiy=v+E73Y*!dXZH1iX6hHj> zXSOT7CWPwC&fA`eH$3o%7$q>l8w(@V2?zkRyaXI<*WB}&g*h)zSyJCoSx z++#PCfly{rcl^X-njdK`86!qV&I_qHs^lG~qhv+SmgVfl{Nc1)FB(g?kCD% zGo`80I(4i{H%g%J;ECT07xRv{Z2HXA`;YNpza2UbNUMzbvZ(tNIWDK(54rT!49pL8 zr&d{THQgcay&#Lqi)wZNSKO6F++z$I4}qemXa)Twp>@(Q13mP$OeDuDbvLOWE|wAf z{mDgcly;=zMq~JvuqUjPiPfpYEPpIG^-kpWW9|~i33($`%`9u_z~`aZPE32g9THZT zA7be8e~`|Xy=ZW~*MA2rz`I?y|Vq||H+U(S+EX<}Er zA%OR=hjr3EqtaIg7YFCrVVdCJYuN?l>PC#{?P+b`^?444PDJBzX*6~=?%bPbY%d;% zu$`#}3uj*=ouvEzD{ELxm7JP1V1!DKw+FJ4PsHzcsg*s!Oa zUULrmL*d69Y?+@$>cOcMHA~aq-955JZ2oG`%PSHT4m@`tIJvs7e{HLJ?D%#ZS+$Md zv8QMT>s(%VUGB-Q1k|ehmlu*%g|1BRxfqbunk{ZfxUF*m~jjNFGvi zY`BI?mGP#CNV3(WlS|E^r}7Q4L|NH2SOo)nyN1GxWqaZxsO4HKZq~gt0Nh=+c3O|F zVz~~$xl4BM6xuOJsv^`kDt8CENBW;4ieX_f>u>XvbKfb$qG1g<qBAd;L6|%N7IAj%oOkFY|63U`8W_ebR-WE&C}rqC$Q9~T z&gvFLm3~#9Kq;n<3G|6%LW8(o%iE%Azn422o}U9gk=B(zhF9-yf65i%J+*%oE`A5q z#Wx80$EZCiqgB*_{^H?D!4pBRd}Gti#LWR-Z=3qwj5SM#O*#|iXQO#%N~Je%`$#PH zs?_G&bj_5!nSP~;7G6`KmA{U`>SkJmi_~bfF5_R`zQxS62wCi^;tR^S`DV$=FZ9uA zmc-t6%MzW9HJ>3I*8jA2l~Hjn%XV;=;OL;$^+!z9 z#9GIRJQIz;p^;H1HuvwVj3p*dxJC#={f=;+$9FIOrHLUcFLwxc8d-G7M2U9PGAx+( zU<4R_=oWnrpezdx0@^bDI%TX~iQo;4Ts0mp)- zA{)B-Jxis2R?K84+}Hv+1{>cX=~a=##d`sgJW~sJOB{aXBX=BBS4hdDqSBD!*rlO) zKA9m5x52#9x0(|3H3Jqzl4tqcOvxtamT%Bq-Kp(}g61LgQtEtJvR+qDqamq>&vVFD zEmjptk!*u6&|?NobM|nPz^?O)$;9>r?9+6<7X>L~b3Q{UdsG(>1;xd-xOxB4K01T+ zx?W8-q~?7OyH}Uo)pzl6b9^vxm!zvWVMV{fy3WlQ@VxmDEnWIy{V(knk>a|Kshhb* zN*k<`UOlyR-MQL=W$uU&B6YBJ5jVVues75&&lT5_oN0yJ@$(;9YULkgZ0lgJY-saL z1jlnuM?8r)y}P)hS^Qn*T|cjq!UQlh9%l6cQt!xEy>O|qDil2;8>{@NV|Wp`I7Cg! zW?>M?!SXAg-!&m=+Yq>tp_LJUb|WaGbMwmjb+Pls*Xv-}L_UzIrNEOfDC+42+18KZ;glGP}8N=J{SK z5TWNGE`ix8d@QCf+m6=8387LCr#ohklR-eSN#25tlsq-}l*pxwHZh}Z05sKLzkaZ} z&X&!xWMFnJ{2Qj_vHM6;@pmw)@#|NOkPGI+q+i7o#hTSrlJhnSvl&*aX}3uo z%c?Q!Y|aS6%f!C^LxyJ*qLi@za1Zr`mjToCM4$nQ z^yBm}TZOYoE_#L!lPsm-qt~XPb&1D4JQaHD_srak><5#N6*`ZulPA>IENO}k&oJ*U zpzxcu-a!W%I6-g|pY1blB^Hh_@+jYg@~VZKpmx`k#$DvX=;D0?duS^YFy~Z@^o|)T z8;>)~S)yvPX1uaQmoDJ%j~Iv0w?lXH)g*f);JDWZ5dr(Sc@1mT`G zSZc;v;9#=|j~{*+yH(>Kd-!12?&r8Bb(V>ynz9 z;Ixd~!lcB0R9%`OjKA-0S>Aep=SMmw`mo?aVl*}x(QNEQ+ANltE-erA^f-|n9pm@K;R*_nAJ3wI+B3QWI)uc+$ zb?0CSD$8l{^qHXIo%C)yTOx;`gNRtIUe26FoHbL#fLD<@E><%~h_;k|c$Ktc0TcJQ z>*Qw?Pd=1kj@+vnIKFYZz*#SGC#q()weYqq<%vSl^r~spQZLo}JnncB;z+f%fJOH%%QJm>@Q!#q zO(!8+;O#ylieCA;#F_bV$jJPMs+-22~vY2^>dx>ajvjn6XI&6;|{gSl9vboXS8LikiG#;r(SoY+$0xm zn}pfpX1{Z}E@%&6Vh1qks6_y`))XDSBizvGZSU_4puBteLjUC!Guz*^$7mR0dl&*8 zA$tyw5p%k{f0_+ZdDJ>WCt?5~0WNKMKIGG=jFl;rHLqBjFST(=r;Pppux!zZ$ilf> zv7jcUT>&sONb>8FyCAD~VBO(pz_IE_P)Bnp<9tb){C-A-mNxrQPXeTlI_$cPsSaMH z{Tl}G@&?*W?sjnamd2}Co~BG;XJI%Iw+1w?$=CQD^0_tYgE~D3gN{-Wfmc4GEoh3b zIU~)g`9K?rvEyq}+aX?O-9lJ_X_@vgeoslP<2z#5LztKbUGt*B1K!-_^>`K!y}T@mogc(W>BiwQ)~d0|b2X=s_idO>QSfF9gXx(mfusAU(}3 zG!LE%S-OS7fw`5%--9d#(bY_RP|PXw98~OmGx;Rp_i$@5hON zWXcAqmEn^WYP742y_r5z^#2O3ro>A~7)tDcn13m~Go>FS@l37sm?C$sEuUu0$_@GC zXU&JVs$-NP_(?^^cNST5bRo=|hSBF~?&5;Cn14k$xJ)tk`CXPcM3Jv2*hZqH zx}3EBBd5f*X-Jv6`z(pkWQ3!pGESXd%$Bj>!vy@X! zic~72tZjP%gv)C+$R53Pq2$WXn90rZrFow?TcXd|7+Adp?MgOlzQ{Y*tb3qaUt?68 z5l>OGo}DHpCPghoW31pehZIOWnso1X;&QRS{x&mGUrhhHO;VK`CKs2Ebz)b@ zI7|sC1415*Q3nTi-i;=&MC~mtyJgYH;hTksDEz`$s#j@}EWEC0c+VKswRU?DALnyW z(%Z%C6TYb3*Gz_y0B>wpzS2@8Ui5a>sbP3MsBUAI+ys38V)i;!5@8R-g~0d+!~so2 z*jvFsLLQn=92Gh{t+$5Zy!_-=a7?pINUgUvNvD8|cR6X!hVSmd=xP=yK(DQRs_oa8JPVWO&UHdF+%+u?Sh1g|Ode>o9>oeWGGKik1I)}bYFTq`nb)S^`xuIw2em9w zv)Or9&W6|~A44}9ZZBiB98%r4K)Dn8fWLA7_ z^$9yj$-bdL0%6Y3*?S?Y1)Ci3yrHY=CE`db@1}9HR)4nz8EXgyZIQberr>P5%f)|( zN8sz}7A3+P7@A5DzyZ>GR7+t0^hj$4X4PWHF}aSnNm9YZe<$*cI@lL5v zqvidDS~Z>C1!T|i9TsQKSj*OFe#QdDX{_aw+zGi8ZVwK>BZQLtmh%9HRFAKrRoC%X z!YDkV!J{m`h2_@=lkg`N-gnOlR-G;RD*m2l*}Y&W83ux6A`(;JF*~C;gR(wC?krQ> z?`LFf_Z!BDrqRXCc=xYKYnBjuf`lYbeY9M~Hg+#N`rcK{F92P$CVNNVV;-gWi3-1` z%*Egf%bhV`WHoCv{#k$VdVM|0=vgNUGr)2j%)u1vV(psEmzpotBN8UX+G_R zi{u10_!g4Qip4raOu96~^C~bOQvPKE8n=DACE3LW7jIG5?~C~fnz`rX4wqZ?((27> z4i&hAPSs*mMEt|DS6fNrgg)1Cr~g9vXtyz?nNvU!{bf9i&F20 zgKfm_#K7+TZ4SE)2y;^*jslYZU?d(`c<=;qtSPiCScxg?g{aW?n+Ys&fN;P)Z zj{24r6U)!@>T*MS1Z#s=E&$C*qN1ek;~eBx(fq;Q>}h4|+7*rS=p zLgxh7ENXHZR0}!GEh?Tkts~Ba`({$}N~MA3Cg-I2w%~7HMcSD%JnP+S6Ec&aTg zQxIsDlc%Y;S3W*bS(*2B2RSK&p5w~KNiEHvL9!l%{>?k`zPGM-wuR9-+ai~^x4<*5 zPY75%^K&BjLdeJVZ40R-lB=^Xd|yJ$W}-wFGYYJ$jiQb9z=$9r(N(z{au@yg3FXP;8V>6`J`%*=eje6yXX`MY=Y? zoR)O-=1-<&VN?NUr%6`ZYnqPl@I-XD=|32YjIV|?nfnx4Bsv)&t4_x=rPHS#cG+w1 zFw1B#SqI|2S{ZIQy}+?>odp@=S}^{VJ|`@mP5P?qfxJDl6%wuK$bAIPnvJ?zT^RJ> zu2Ii3kq2Q-C(3)7dZxOgvpREaR!-;~IV62R!V?-5q}L*yiobF17Wf8lLLt0dzFh+3 z!iV?+kKSUUJFQ@!z*lLEs*Kh-X*jhBmJ3!=y|p`%j_B{+p^G@@_B`N!y;=nL$)a&y z4-ry_gX5edKSj?dK+H2Imp!B4(WThH zWC|rP8dqn^R^L}|IIrC+Ogp!#&A#ZbgjEA(Hk{`UIYPLCO-VUZKJt9n*0$~*lFGNe zawB)LCh$Z1WB_Y%&S1c>+IhY%!jc^?0$-2@h}0t-b;_)bCRC*%6WrY?-Vc-wMaaai z@jniWD#Y~JBO922TO2a(a49fZw;fLWZV7plQb58o+*u_I)*K28N1oWRHLk#&K6VmU zkIV0oB~oc=F4zHYat3l1z9AK8A@Hm`WDX*TptF)r5q<`dJ*!s!-N{_J0lamuR_62V zjV@PA5xj43(QH#RXgEK+Nd3X9ffb~MNBBNtKZEH(LzZ7@R;4gGNz&41<$9xyO~5Kd5=?udC@Th+gEYwn|I7*((S`Jyp{v~( zMD)arT^(YxB>p0M_6Sh{3M&5`D&Ydl~NNx7utD3d> ztw^U!=R5o9PvIXDW>WLQ1`l=UB5XgsM}KZPo|@b)R$cq#=sW+Ci7S)S+q&VBvN}te zh*5yYSHpj)a2t2G+Ux^eo*Gdysn5Ag2rTOmt2Fs-?`%R4c+pU1y_q(`qfkDLAhP<7 zanENG|aG^6#%fotC!a5vfzKC~)fK`Hmz8*#U#ufpVd!gj7cSE4m6qf0%gsWZbw9+S-@ zMX$)HW6vUi!%9WT%cGBf(f;2^(c3Q3ur}eP&s#;aqO>%7}ly{XU;A;d*%!^+Lq`FPqPVZoj)p}A2ran|wEFtwP{u>gJsDT{G(O{Zlq7V4 zZf{$7-G0M~@ltoapvBr=!F~03ZdoHZ;YSwGX%Rpa0~nkmhy$%33rdMXF!fU=%EzE2 z!RZXN+<%Vdki`kg3iW>D<)$Map$>T`7|l&VSGV4%Q;KrJrIr>*28(Y$&Z9bF;^hKY?H}A8L@V;bZ`=0Q>u_mpE0=_M=QcJ&A zr%bD@7=sq?dK&* zZwS%A`PS%!XXx_Neo$Uzr%OWWC+emMPucEAF6H`~brb;Rv!0(g{-_+aIZX3}k z7<|`T9fU#kt>@U(L7OHsi!yc?vaP(`P@c6yTdUE)$*KFO}aviz* z@EVGkQylG1FMP!$!p`)7H|wEw-V>z0DV}2_h>U!5)Nbo+{=5jr)$e7!-+Y4mdijlF zRV7ap@9IH?8Kmp>Zc+_QKQhz?mlr1s3>W8|-;Z@+5)l8YiBLQ9yo63&KZH=zniIE% zsIk{8*(2>rw6~FH!$D_eyO_C=#snAZHP?_Ssj9uEC)#pBa|wEc{7LwyU*-tRPZA-<5RFHo5*Ue=>Tt_%6HQ&H7TeUHf)ET@F%HK3{j2}ew`6QYUF>P_>E zYu&@_o2zt)`07deVL=Tss@{;7XKam-DxwYtbvGsT+ibCf_$SZqp#=9vNc}h%5q0u4 zqnbl5!ip{Z=$kmcjiuXyx=p~J!#1Rrzi-4mts}>3?n$l)~q*UZT&0oK@I95$V zols$4Iv|LgUw5cXCQ+jotyD3;^mrSE8)*N#6eSo#|k!K(I3Ibu(AsQ{0B>xD5 z%9otA!o4D#OXrn>&)o^&rJ3svWkDIWNy67H{%LCKVZ3R(4D`#EcUwk24(}M|FbLFO zRlJY>%R!^2OU`r0;~Cj9@t1C|BX3L1PG%dS8Gqh{E&le#Wgv zzflm8^oiJCPbg8!TQ{k>+kcl9dEA?c$F~?_*6-|bZ^kA5&QlJ`=bL;nFpb#x)k4ke z^RRrf=`-9slINfc%hEUnVp)Wys#Q<qE;E+GCz$0~m*mx*k`LqyzfuTcfbWTFdZf#>Yy8Qrbk>Ydy8;$(Zpg)k?<;fyJcJ z8Y?FYoD~5pThayJDVJ@!$N=>#ipQj)E%vFZH48ZfR5^t z@N;f@2th3#y8@%EosG5dZbv!GYm7Nx|7*0}19ffT*87=?%(AuBH0Jls%5hE(<7{dP z+2{M6N9#+hu~vB19+tIf)H-I*?Ug0gqsg76o5=^?=Y{NJ-BVpmuW~|tKi0mqsSCxw zB!tU$zdR+l4IXJv#RZTr8X_VazRUY+3e{|89Mssg2i`qL_-d~Q!oP@${5{m(rKpWk z(Uj^Qb^Gwfdal-E-UqokYMf!kJZ&Q?YJ_>!HIy@2c{l4S2ga?v+wL%qK_VpNFa?)*7Q{5+4IeUOoVh+N?lD~SGwCA*B zV@hMCQhbvZ4l~3$t)+3oOd?ziB7gOWR&=jP((eBLKyuU60M)Gnr!}@rVy&!8u(Bla zZTVHG`~+Xp_)WihNd{T6W8hJ*fpgErUlL6x69TVDHTf}(Hg)Ik^>MJ|l zlW*uHA+J5kP4+98ICl<(yjLmb0YREMsnXhACeaBu5{#)=8*lUg_tLRLeC`S3*{1Gp+=RR8Sr==z_xMQM9Byk!AO44ggTF1VXQdb*q zm5W!IJHaM(`T$DjW^1td2hT*ck3s{XrU#;Wr1KdbGXi7dp@VHVv0-kfD~&m-jDhr7>1!eo$t(O zTjgOGWqi(!A*`zEV5Mv7kClschI8VAua4yAb24b5wT&g?gN+hpiAdrLb?HjK^FAWtAjfJv=K4X@Ii zCfCgkk2ghvN-l^6+aB$IJb=`p6DWaxt%)KQd*R-(lMswRGdng7j1^H zs=CKzzC;Nh1@kd1I-Oy&wtFed_xSf;CeZ@wAV_4-vf)_oUC3z=WoColS=Udmc0x6F zZxgrCzBC@$G#(&T?(-UWd*;q9JaN+V%5L%|l<>tiMH4ik*rv^e+b5p14IZ2~X1f&} z8p{Ur^FYWYOBp_Wy!|4MY)Bw$pu$iOqtzg?j=MK&>hU4T_hx~@K-WE^M;ftF!%CN? zaI(}0gTAIn5ywWbsO`=jL3)LG&@p<^)C4WvyW%Bqn^i$3dgEAR+ELWy)oVGmHhGnKhGVE@)G8HozaG!A@K>&K zRp#^Qc+!4yn;#1_iCVeF;&q0UJ*wz+IIo<`WPCjfLPPvhO%nx~r=C?d?9aP3NK~8l z2kbk=qcb9>?yhRgzk*Fg&9w}9IYbEE_NBbHZVNtNga`4+aH}kR`g+NnwrPLa*{NLj zf-Lo>Mq@0r2&#T(7|~tZthu?6|FIM@DWEuOlUu4QCD3jj7 z`(P8ac8#@DU=$~~B*evFKNjf?4XH(-!-+>Qp(%on{;h3QWV2MCr7DfHF!c)vDIh^^ zvA~>YD7|_yypy;bTkD$E_mp40Pg+?dlU*Ax#gJL)hJRp6s+a+nm~|h;gA@}oRG$hy zMOyXlbDEc1joeqIi>eBT=H?oRah3)x(k$q=aODQywZGQ!e2)LFRSwxJh-eAPXPW{a zWTl<pQO+B7$x6FcJ>@xAT3evGS@GH0cb) z_?qd#Qf;Us9W0+X=F=WuO zWzs(08UpJcXUO?4(y8t5(5%YM+Bk>tk!$S&9_EJx@0!+q=0Au-j8f@|en<d?98o;3!#+(?zWw+#X9DZ3dkuGfK=K6ZEVf_xeh`hs#Qk@Ld6a%J6n^MODmPBW^r z4fH*88?2M^lF-AOSSEQ4D!-YHi3)`Z5!@huN3ZHanf&`;9$*SrP~A*gPrukwVR9sv zRD&8Gjr>-uApAgE|8i5ox4jkGCt!4_B;-Isx^++lvx8varUW5Y<(P+wA}6CSA3fZZ zGilt2TM0a?weOyJsiusDoH;6t;%@P*0<5Ur>jWEDWEO_|4)|8NX1PeF$;AEjNm8XE9u=(0a^Lan|rT6jNZeAG}Ah;`d;wGD3zO5K^P^!)g$mI!HxEWfm`9bM|3&6C zD%k?`mRPsSZ1BiCPUMVF5m*LsqYbh+DWanC7AshVDxgZdhCC{zbxuA5euQ^W2$*HQ z%`{Z}G7Oj~Q31+eyvPOeQv- zi@%dCgEsmJYZZ50LBf4jOqt}EO-Yp5H3fguYYA!j0wLd`?Ne(XoZcKB!Xz^j%oP1O zT9o=rI6z3}dD+aTH-)0>@*$Zx#MKHe-c9AKIkQB&n`$90agTJ zjPI7nEookI>bD?e#cR>&)V|_qs!P^dKTjpx`K#%BAC@lhVu68A)iNGs#mQhCY74mM zs!F?{@`pKeIrsL2Gyxg>hd71NI-kEj;Ca8qaxDlue>UJ9CG=vVZI>80a)ntGAdF+Dim28C4~;8I03yW=FE9MTH}f$OvqJK$F$F^ z8r|)90qjecea&h^&2mb2QO%^qVfL8iSTMco{EjCpQye`J6gbtZ!4Sfg%(JB89Nz8Q zfs*5sC4Nz?gDvdw<-9)35aj@sn5Gn$kIhE~%eGM-aEgzlLuv9C7m=kz8qlj}@4IPV z$JBqk7k}T&Av`fUwrt1sxFwh5@y$0n1j_O5t`Yx)f~tMMkN65tjk>6LaUDDlN9sE# zRG`P>+v+O9dvBuK4A2=OENA{>#(7$XlkFNZeK6(AYd^*J45?fmO_XQm0SgOGjR}u4 zytZm0ViPIgPn)EAY(hLp9bleR-Cd1 zA5Twf^t9mNp?2NVnZ)#w!!j!Q6JxjJc+@v1oOxX3b6)cc1*zu7kO7nL`-nH+O^2U+ z`>Yx9@nI1oiWZ4CzfO2kyV+pw`42nb*L?W^i9~zqa3@@+L!c#WkzX>k*1fvzV3NJ@ zx_{{WG{}Q}Lo|e0VPXw?S5WiWxOyX&&WB*O6*S70J*eJYt)><{u`XfX3V8&|^nn{6 z)1VPy3}(=AzPNuj3kq}~5ASJG85>H&ftO1z0Ebofdd&!SfixmPGfTlQ(HeAL9=qfD zvIKDlL^IZXxqWEJo$qbzNvf4v*qY(uFp;})qK?nTEcAU4!A);=$W1Wp9<$C7NVb$b@hGn$H88HE!Va+UlA-DRqYj?l+#$X6P>j zQ}-o=7%w-nl!)2NGQ69U@NmW9P?AYGrtPmo7Kg!wwwWMsHyFS)&HY|m)H)VqOf<`2 z;l?#9uC3rm^OiK2Cs%8ZC3Z>4vv{%?Qk3-QEb@WMj^hYI=#G9bHY;H$@NDlbKBl;) zQ=?;h($}?!c9$wi7*WFrMGaAbR*VqGBf}hvtmxV3$rc`{lYafRP{B=EIrr2*1xj^d z+|d=T$7nn|addKnFIn2ihz>84OV@Q=pp|E#eg&U+dIMo9d3`#%T@rcE8_RLIxHF4D zz2iga)HnqVM*gL~HHbYmVw^9scAjkD`5hL(k;l29Sgb_x9leEorlqVQ5hVJ2?Vdq! z>Z}>{wC&Dolzy_f^FB1`ULIIL^rvC2PPClj1{6AU9L1fQ#m6Zcvrfa_>89;x#$~nM z%V9yo%veW`ByCFlT%DOcx8)lK11@T%#g&pKA zuJb595v7!;%K97`wPtZx|5Xc|kasfioNv3`(~1qQ=9Z~)T_0lMo~{ERVr(1~uwk_F)5dZA>;(_BMj#H|xj#O7ZAI zw6)FL_A3n3OwiEW1l}$iK(bryVpXT3DS&ZiV5OBZzS0}{Xo_Y5x^rZq^gL+{-rBp|MuBB#Y0SZLn$kcGNfyid z!Lk!x>Z%8QjRAA6jxAg0vvr-(x?3j@01q9P%w?h`RTL^2v=EyVrHaI?XeEd{p8T@o za%Tuikvw-=|Kq)SOrE!QsEGc4YsP5|t`HF^h(yPL`GW!7z|#c4>m8nH0qJ|Lch3(+ zgRtK+6&{O*hfIhEM)LP#RSA6K9=vAsU6VI@@ zRm6_~p}@&gTxRl=^I5D3lb5^b2bOHGTc5M+*#@4nyeSeZ>NXkfm3F?uW9 zS@MPp=V@%xGac$mwnyCdww%%jhH2v_-$og=ib=yngk+d|S@yKG1f3}H<2~Lp2alBD zf;W8?UVJ53-6cu~^cR!FCu}AeC&lYheCUt7A)RBo1+NotoCH*^PEEDic9EZ_vy`rq z&EsVr@E*f1WYJ-hxUM#sT6xXWmOd!g3VT*98A^FBomcM)VYWOpoa}LNXx&l9MecX5 zt}5yVUQW8UY*geErWK`p6y2_Lhgj7zn@&Edm`cUTZ2mC8na~=jM6JA}S&GpLz~&v@eSq-Ue?W}uikLKMW=PUrMeuyUo_M>k+MrEv zc)@k%-zJ&r`q6s&?15nT?i1Qv)QxFmQX~!Hl74#iEIe!fu7YRJY>V>gb_G z{JK!MwGCEeZ6uHBq( z+Sc{J3*{8Cx(C?Eo@zlwnybeN0ONj(9>Vvd;wW^Hbp4mNz4XOQ!QPU!gw!J_h{P`e zADI01fe5nnoOb-nLcDHJt zAEn_l^?Z^%2X{Jn%Oh6t`B0p{Qo2Xln2JS$v>^7^(EZ_J(O|J~j~Zl7>|9~mtJXdF zfk>lD5f8FyHziu`F%&D`w=S_3hpKNFs?eSbd&01X2YOuZ*cMp_=Ps>*$%j;`y`i}T zh{Mrth3c=mr6CMMalm`aIeJ-hx%gu=DNY=68%h#(D!u4(7YSV);NM;LU%f%s{2a0< zLEIwGUQ{~dQAEoxAVX8u#HEq1>^eP)d?luO&wMYvc8fTFE7h5Hb*J1W?5COeBt!S) zFcB?LhDaWA3ehaBf=hUi#etuD%7O4wIS-F=! zd*d_(W+K<=H5xE7HEtU?j0y$o9191b0j274H_ExXiWVklsyb7ALjLM0)lAKC#`4ML zWEa!?1QUyZX4pgiBINxRocJlX5L3H<<#d9HGQ9(D3=cUdyl?K^7e7l^d1 zISmnEnzsyMr_oYye}|qp;6{5nbkHGrLqG}uUl>$iV>5Gs^tZhg8`vKe_^3CP9hacK z7Z{U{L3dShU{JHvw>aSB6swQGxz3o>Rq@!1SfF(>zk{x=_;g8L_Sr1N&Aadv{?wU= z&F6c*(GM!Z_A-Ick_Dy>B+2P?GQM*>A@zwq&`p*=JeRSf zE`m-a^tTu#K{u+(9;w8MZ>dN7eVUrsKH7XfQcB^MJ34VDgrM7s7WTAhkP{pdI6(hO z$(!d55{?&Y5I!nK5Pge#C81%OXMq=*o4l%X&nv|n>bj)u1i`^H{H=)_4~6J$v?4hr zhC}v=maiUyyXv&cs=;mDk)8ibfFoRf?Z?263~s^~sH-M^{WvGK`4+W%bZ4i8h5R34 zj!#M{6yZ{X@HMl&L%w4Jkd-2xtz;r zEuQn~5>Jbq&(m$K;^1H2>$t}n6!DrAtR(55R};$>G(_AtzL{GqD3DlU=8I1xp_5XM zZlYstrk#%CAU#(R$8D~+o^S3-cuBdv-`0FV`nk5xy9q$8~vZuzg!vl&&=~0+juD*Xth_D)s97g64AgEzQxUpxk|{{eefnegz%6eR z-ERQ_4L6|mB=cg^2CPDsbrpfSv%+odnRh63E0_G3_%WZo)QC9OvO>VFjj#e%@waq( zw!U^+#yS>=1VAr-J{amkKBjTp%JCqm4x}6`Ons-IEumlJ!piaKQ!^H3_y^Z``ml5q z+qjFn;x|E{BhE05xJPQWJ5eCnD#sbL;$1ut8H~W{87QJSF=c^~x5jAKA9x=i{^lfx ze{d3te`iK3YlFXYLJy#e{AmBjO+Y{izjz53E+>R0RJa|&3_H> zzhn4^)BSHs{}F=xCkRg9B>he4Kg#p_7#JCVl9}rNWi%}TuR#y!e*YBx_mKBP-=#-pr%T}Q=z&-Qe?OVf6d!Iw3l%yhjyLr7Ku*7f=YLA+_xbUIl6e_q#}6<4@r&3`l*ax`l<4)%ZT>Zz z-)G4mO~mn6Ha|?H0sS8d^jqZkH}?6P7%A*m1}Q)xAb|nnXJ7sHXW;D&jV%qW?CrE! z_5ZaHe`5k!-tRySWZ?{Kynhwqho|hp{0Skn^;qfcOn;Sx?-T=iFHnw9APK&oNQA)s zNfP=-hQG>!Z3^^t5inGUBL9)a0Q{e3VfU*n6rfy>t${jl0auNm4RVC?r&s_t*1v2J z!9FsbN1#D!fJ?^D2AKl7&M$2i{(dR_Z3|)O%%H8uLT_Yc`KwYu+mRlM0$IEVwx3x@ zVEjoIR*t{S!q}AJ^evD9*Z*Mwhxw;j82qXhN1kGo_CU8#0$N1!rzz6%Cs}AqIp`Z& z8M3lyGtwLUx-8OHlvU(N000iqD}N?bi~Z+h`I*o!D>Tn9uG0=wC<-`nekP<%{of(P z2vo@4^xv#8e+#Xc#?wg8!~j5r{2vMB0N3R|Gl~A9(C>tP*^_e)B?JV4UMvl4KPyBb z^WP(6qwDgkP93?{{5BF8ak+rGke>;i{5J_%|8nq+M}%}(10EMS&HqRURQbQBkb(6t zFVCKaO#*sW0DxQEA48xq@RvX1L-a-lz&s1^Oz{hQx1SSne>tW9*dz#ogZx<*z_i@I zV)2hpHiAEeKoW>Q!Q$_;XaD|)^?!3`f05(I<}u^n5&8Qw%|Fbd{W!G$9zyt=Brl`ha literal 96001 zcmdRVWmH{TvMv(bU4jIHySo!0xVyXS#y!C$xH|-QcXxMpcX#KJ)7`hb&ue+NPycz0 zU2E*Yn5(MhS6|hfYt7m+;-DX(fqwll$>@Fk?aQApU_dxP2Dx9d6SYnxgb7&>b+&@j-@{Yl-;|5n|{f2PjL z+Q1O-|EQGpcRu3&Yo&ko_2J+7`g^s+|4!{Y9H#nz@%oqgU-D0p{k?kn|5x?Dp-%lb zsQ<5L!uFq-(gxV;+8h2UB#-|bJJffcK?41n+>L>Jr9ChZQ0cn}`~i!<*`=nZ0oeaW z*>)8m4oMskT2CHO>^){uLXs-B3iii@q>t)S0r~xu zBa&j&!DG@Pv|`l#0}@hHj&upZG>R#aDlu}$I`dGGWYi-PP(RAguWv?~B%v>C?_h@q z%IR3|SZZKXKxBa(K6bxOqq2Sm{R|2!KD2hy5CjJH3F7sBsJ^>rBtTa&VNUd|T(YRM znn~)wZQm^QdcyFD?Os zz}4#Mlq%=PbbA*kEat=NV_2P-)OOeDjjAz>v|?eIjF-2u;q$5zt_m`Y{UmJ}|BbS^ zsKM{koA|uyAy?udQSxu2g-R4ulGKF?m4!-!l2oFjfzrbRz;f)SYvVuF36S@t<(K>}SO3bJUsfXpmi#H-A^jzif6(e*%u_S4 zvD4VU`|(@J_$O}g&RJkU{^gE$;~?*X{zXq;+d)B&NkNu{nf0rHk|3j!sGjUEkN-r3 zUmkZ$A;QDF5B~L^um2^FY4pu){*|-(IzXV7;A}byAO1zq*#9$Uep^xgZN&aM|G*2> z@~ro{3%ytN|1c5$WD;O#Y-wm^571_#|5tPWEfHi>lG2isl;wuo+uI$jdq}22DW|x# zX3@telkW_{qx}ddVLpRmmY*2v=v0C+BzzMXzA()RH7U%qxsM^q+s#QzdJX`IsC_d5 zvOt${!)m-OATp&DndAB4d?L|FEEMF@!~Sq^alnTw6hxf(;4&_2KYUVe#!4r=@&FK$MQ)jN%3Z8h<}1;Gci7H>>VZN-=l;r6Z#D;P?az0({O)J zjP~$hbfU6aM{2A}T)uK#Qhby)(YAI>LP~VudWyC}gm#QdRAPD`XeIFQBrucWu(e~oeiSsIRcVf_uZGdkCXP~f;u--rWN$Q8WpeN*m61yL;%b5^p(UGbZ;wgO z1rwaFik;3>dOv+gs(!nExOuxiaD3i5n{J8>A!n(|^U&N66STOHZhJ9;>a z#_Zc_!zdsxO!+9EPnNSH%0A?~cw@pu!QGDe9aFyB`6NkjuJ0)?6e6^1T)#R+k7heJ z+vjrzd)!QWIR~i0D6}NXSDP|s))%4~oO}{GZ!OXUgmYpXO(7yl%1Suyu@BzMxP=j) zhUTTXT)qf~Q#}@m$4i|6z6&k)nCYpA&BEIY_Et42#WYeV*dLhFssOH24bDW1uNe^dl2vm`k=k z7!fch9nAzaC|Ptq`<*WG@dtxjh!<^om~3Ac4>E2q6fCeVB!={23um6wlvTP14%Zvm z0i8AJme=1`YKOsA?w9vURs8`7i0_YOq63YQmF54W;y(!VZ>hNb=TuyeOH7LXm5AEE z>ec_1iO`@JZtk|8rm}VvGI1NcsC4DM@e1hK3=H&@?Y=4opmHJYMqt|*@dmp&7=I>r z&Np)S1P41hLz;&H-5~ayg87-mb%xS*F5}`ICj7SFp0xW>; z_#AXJ%h3u6Ni&-c#%k!tO+)Z)EsyVL$x~>?!H$5!y-do9_!nOrqK_(1$xE3x<(; zhLtTt%Zuhp7wug7^Ygy0n7)tuXxl!Dz~As-Z12P#@l_RsKB&WY0Ei zJS*@4tmCBY_@s0_ee_4-mLNoBSRrza~u4g$@#s+4m3R4r;5pD(IsSXj9{w4;<9O{D_2Xo7S!xEA$}KrRf6bnws-G2|2s1<__yKT|GOFdGw=Vr9t>(LDE*$W zOI!ZWNAGVx6kz`_=D!FF_#cn|!IC|qENSvl1U0SgC3@yrMRbL!;wM@B2LZ&$9~4uF zk@aGr4z?Ad)@m$^vJ{o8a7{%z@*pIrY8pv6Ag0o}@qDfanR5aM&cxsD@3#m7QS_ACa>qr7s)~ipecdMh#E*v)@jFo#ORw0e{S?;~bZO0CpZiCR& z?Y)PoWhARl4^yo8+Hgvx#avBbQuHSta&EmaondbD&U0tJ)5g-OB?t#=c=pf-8Uv*`8t&XBky!G#CHR%CCJVW~}y2pG4=9xmIl9%ZMzbR3R! zCzcOow9KcL_tjf9G$Ri&qq1H+35V4JwZY)^NF8TmB;<6p84Vw5kmT5Hn?nl~6^il^ z&!t;kE{U#Sy6esfh;nz>lCYdL+l0^`+C48iv1tf8q_c&JRN%e(z-CZ>9S7`1-K6F{x@jbmO3% zS%?jRK6>4=OyC_0SG3Oo}{Iyk=!b8tXv80n_;_=-k##5p<_b70)n zFsAMI8nNmRWqEbuPDSi7tL0l3A!&6fG;q15CJP>;VarfkMu~`DfgV`#=*=XN6XQT) z`D)sdaG3r0hy_cu&J?>|GIa>sscay>DHzZa=QMRZ-}nZlOg|is+83rh(@8+gz?$32 z6wYUg`i#839LdAivTBt!pH#@RlI`(DXY-B|4|bA7dChwAQiy%GU)q=Hc}pz@iD{^Mq;T|FH@nAqkAQ^%SUl z4kvf{NxZPcQ1m8N;=n9!x;~D0WMw33H{P$BV@0hQ*t)B1Bm6SUB|)kuEOQ;Nj_Z?P z3LJO(B4IRt7j$QR@`^@(eB37x$d)TAgf-{37}Uf`g9< zT58gaWBSFP5;BE~V#-cANd;-@N7t z=M_(~z;OnFnatAn*Y9!P(QR!_PiZ0F{4=Ocib^tCLfxbuo#Xe??j8|Eh{7a&^gAjl(tavmTJFWGCB)SWQqj z3e_F3tmH0Td(>Uwd>`{vy?^6rSifq6z!~)1`}TA`6QaCIt39dx_XO}@1hywYdtceH z{@VojmrLYH!onG-rfQ#Y?uos2Uh3(T2Zv^E>ZzM?u3KIz4dY6l+a8*XFJ>*wHyBA7 zZIYw@tN4**61tz&y! z*YVoN_7d6`&b!28F~KeIJS=q>ZSR}k8!Os2VCl!*Ctf+%4U9GV$N0JCHFoB5y@2hxGH382mvnEIZd?WPMUbu1iIGY4>X+EB)cN7cC|NX;fOI=C5- z82aQv$*{QI^m}k$} zIfpNy7;A^OW@KHe`vAzA4bU~3O`^bO7pE^us}{WZ2=f(zvRXP3MZ}FTw-ECx=>QlF z-vY53HM=Jj56sm#oJPe^P5~NjwO_m;3VK9uDs65G<_mauf?yf8)m>`udze#DXL^xi}l`!l_k&eGB^v# z$NgGZvErTJt2p&}RU=A3%<2z!JQ@2r(BW($ z*bW*TD>Ty zwzlJB6p0FYx>yhJmGtUlmEu!+c0N`p{}_^!**H3x>+0&Z4!>#BQbrn;XI)2f+iU5DLFebd zhiA37X}(x4T;V+tY$~U)kP$z6vwlH#Ka-VPxz;cN5N_ovRhYCoE!oKS0O5#1%8zk) zBTMr-?OOPPq1bb0^3+}yk@&$i{IA=~ji@nGPLyeuirh7ARK5oN4Mnh+2PVtH9=$*D zYS#styCU?c31QsN@Cp%?az$8SN6b4*I}yP#w@@+<4&E=g6i*Kj zZ~L`Z0O`q#Q?6;QF>bq0@Ugt2c21*Ip>?LVWFZ0fMw~TP)D+71f|1G>9^g3!>c`_4 zSw!}^J4K%z_u~^xbEcOMuSv=bWXMDG)JF1_aw{{?4T|!=6a#B@dKTAmSvG6o1ftSt}5p&|rUTn#u)AV~m^rsW~hLHUG8J#_n1-Zi||PIQ>FsrZAJbM81Y*8ay!1SOA^I zp*ZKpU`6q2JM<9roBN1Pcw>sKZYqV1QXzIN$T!Otnge5zMO&+BMCqR%cPWp~sFN!| zLt%wLdNg{8xG_~&(Kbn4ac0A0`rH0)UrCA-O>5nj=8@s?9o;9Cn5(}Em*J*>hf~?z zt;k}kCTvjCdz-&}KyDk;Sn%KZB55b?edqrjCaKhvVMVdFht`Rno+N;#=6W)#O;+oQ%u{^jzEIh zc`gGV)l*C&sC+*`$huikJlUN+CniX@vJX*OvDBNps+Vqx2%}i7Y|v4$Zg97^FoR5k z0f9GPIE3P3o}F=It*LOxXf&Q=N0VPZ5CQY_EPSULzIqzF!lynZSS{-zY}dD*8H-?jCWvhTli~&PDNB{a z&tj|xDi%x$h9M~KTTMFi8x&)%YClWbb>vM@RL41xoJ6AU(Fd@%u;ne++SRI_#DFMn z)G9vBVKjZ=agrkbwrZC0No6puga{Yvn(OZ9b1mV5E-AP{n%=s4%!)5s%O;TyXRh_b zWjF1)p>qOu>b8;nnuCFdr_q22`jSuij9b?J_ael?jE>rob;r%P8x*L1m&GuR@Wl_} z*?L`pfu>OU1*%_+XR2KB%B9v-iRbH7gC}4yp{f8dah6Q7yjW8HA>#Cp2IHdW9N8_D z*@LA^lD#vV?qQ;yB886F@CuA4>!qxJ@)G$Gw;^-n!mv&VVtytN4Ss@Rs#X)6`qB;s*a&ued zIM+{wR(S}Wvm~FtZPhr;z2B&l^a6CZUiQ#eRE1YNU>!tTujzwBNP_VRZ%a?*1n=`-_zdD6 znmwxMG+*D>$z%%>icytO(cSFn4iQ$SnG1*u3s19VF!?|zc{Lnzv~(0>wYWOCEvuAq z|6{n8^dzg&U!F2Vagj;W-d5)h#ZANubkyT0#LD?2gFzc4@nh6PTxYG}kUi<_bp7mN z>A(-oJC$>ik1MxCKjUoR1x^oow&OOPu&=J^>coW!4wd2K`cAe;8g@f%=`Ujo+>ps= zzb6SnN=-KC^!}WqQby93yM^T(xHg2ior96e@sbIu`Rr2(sJU&NR3a1z6(5=1j6Fr< z@IqAlfmdM;DS}2FpjJA3t!u-bjMW2^*zCx_kdk|Zz3<3sm>ek|rnqfLS)@%mG&Kj@FE9^m7?YdnY{M*1pMiy{T>iK{MVX zHV#^Kzut9mS#$X_1_(;<_KKdCLC0Z6^q%76a_q>}CowRD7xJ!0fvMqKjkZ&qiDOahw3L1|4^c=gOB+*ZKB$fbAsnm#&&_sI z0)N4X;QnKKcxuBa5z@2t0f@A z8b-&ZAl|s$FQCM=yH6j2o=i2%m#nmzc1+E%9Cu)JhxD>0tof9k=l7esex7^ODA{n+ zYt-LUCC!=nw~tTUhDGnLSpFCs}m9waeP(( z8e*=4k&RnTM~O3S<|0N7FMr>xg8SJ*vWu4HsHZR$;LO-}j5I0KWj>n9E*aul%`|s! z^^#9_8bPy4Ic7(eQ2=g_(x0aZE;n6^`@zuKAcm5Tg|JwX5Kd1h=Jjh$`s|nA2Q4vo}X9 zCXTFFC(S%n8(1@5_ccz9QWfix<#AcA3S1ks=Vl@WZr~NDEPtdbr=U<9HU*}Noa{_^ zvEt)=?nc(Sd*-M-LSVI@o5=FuaX{zDI*W(!EQ=xI+U8#7?d4D~ubyUFIXsCmvH!8e z#+xI^)Yolt#8WM5QE12%5iZ%3H`=kaN?fuvh{^rsFtZ=Ug82oBdsRnV{9`6u$tKt_ zKOleyE0_XI2+<_UtYjeI(+>2?00d(JGpi)?T!M=Np0EfWLu}6E%QBc{@$7u`+2hgT zwGK}){dd=L(gAaRKb8PKyp^w8rc{XH+sUD`pPRB)2y!%-dLW&~=@*dOa0-obw!|Rhy9Kha;{dbUnyL^}Q#e9Fb~?LtRhda1 zDs$oU!LU-({bCrpu&4rJw~add1j_yPS!Oa+{;37(3PJ6Kcu^X)4P-^uc#EMKKo7*SoxF`nsDZr>pg@>y?$;zM~-)E(ctNfi2q;2O)+V*2VO z$?+|x$Kj@MI_Y}el>88&uRpUlFO1o;)8jhZALpZ-RQ_qRnHAs%R3Og!vz5JeA#vTw zu-Hua$`_Fmb%MiNykt90{8Kc0;(YhHWrj6+Hjmq}D0DEpJF4vklG3}z%w6sYB?cXK zBQa%_rA5J}Tg3;XZaPX%)wQG!7BfmDZzkB)gQz-Jl`!yc8g{(3TfyZ71%t_%664?0 z2T7&2%(kxv2%v19DWb;n13AT1Fj*27&mbKYxF8w(-RV!7Q}rDA*mbr`UHG)TRP~~# zAc50V_H-t$DSS~AX3GZ_akbQgsL*f89nrtDV(!I44`HT8kL-T}`x3ftH&T#z0WKnm zmr4R0-KFZ6ZW7c_cA?7c6eX4;pj_A5i=0h>C{T5PqMlSgR7G85@#Sg87F-;=OaNje zS-B?GWeLZQEiC4Y^QH|ZAcwvLb$^YzNnACKCUJ1t8 z=b6~~u6CodqEAx!(_x(%V)-HP#$)&^hF-T=!Q36P-IP#V(_=0trfrFDen0WzrM^*G zJCjoUB=$JBJ%nx_u}2=n|_7+@d|SagXkt`5|a z>Vq9pf2c)adBH*8CkR%%Mer6rVbSWO@Xr*FDmQ#bM=81bP$J+%f=pPoW7uM~6c!^J z8Lm^}JR7!p4EM^tb|+k(?ykxYE$Od1mZw2z9%CLqTwg_^o?q+SmeVkJpIXyjy5_Vz zrtjE1K0oJ7~FElX=Z7Nx(H;hXWezAQ+$dcLi%5vO})X1?a) zHy<`wqois(N1lYFJ0HPl$?vvZh#l3fK95?zF2c0eHLtwL#cuq#7&QkLI!fQ<^%yI2 z2YYS3ycvVWcN@UJx5t06D|=ZMX?+q}YkJ#?w?A_mapHk_iF+m0_83#=wen!R8Jg(P zZhP(;Yd$3&!gt^Dc?@qmV{HxV*5-Vfgy6Z#yz|;WTGM)4x?8<>J-_qe?e?t$d)p+% zf7-)ukust3dI6*h60SV~9>>z$Zg&sb-h{?vcwaa6Uh6Lk+uZl^v^1Vt&dPXXaL8Nd zU+;{i+FsVIolm4o&K|$yW4|nUJ=Y5s-AC^&b;e(>oOxZsc>(Tc#~Lp$ujJ3x+IDBG zotKnhJP*LeJReWaAU$<&gwtOguGW~n?iP6MyfzS0!=Bvgw4X-9C$yibIH`$vXI=*b zr0{!NzrCGByp{#lx!>)9x{alK&foI>{ae~m-XKGY_t*6Qh3*~4zwHhA2S4)xOjDt9>UoJ|zx1CuD)o-j?Ji7uUMjWr&>oGJ`xoE_mfK#12LL+U`nT zt|3LkM{v^zR6|qq&;11msg8D_zdK;oY0qQsc|TnKFZ35Uz8}~ASNjVX&dc|6a$@NC zyzzYXwl~595|OMklq0lZ(dxbz9vM#zQ4aY*r};t)14$10Kr@gFn8Z=rL`!&+NJ8`1@KhjEv>{WRg%GAkrNhR)fyeUt5^6_5)x-cGre&c_F2N9O zd7$!t*7L85f3vxd&Gc9M`S5`RA0k$oErb36EM`Q9hdTu2cks#IWA(U$0RfF7{NdB? zU-12#({4=@h4<5LMkmKg)`jQ#AW5d6(vQq?7R|{mtP61Y>L@A_D%oXfT)5;xUWq*&OFULCFm{L58q50S{vlF(481I+ z^>UxlKH|q|{S_NvzvrE+k*8Qhu;9|H}jwILtF8ORUe`U zvJ}a{L_l;stUo1G43gRzaX#j;GGtWTLJbX8w8~{#R4P>{^zTYnI?|!SrCDAX72B`r ze3{FWv9^akVf07NuV7^G6NY7T2ZQs)Jf6%QrvwyeTwov0)tF8*GX2nE=$taqc6P>Q zz6pT93eU8R#I&H-@{c>p4}m)flK2_-VG(2URbXTrgy^0_)|=CfW?4k)fxK;b(6T~k zkx&;+T%v9B$j@Scs$7Zsig|ECRFdDDi>Nj*1sid%c$fTpk;29CrqZ6Xc}X_ zhH^=#yX_=Z4#SO9B`|lzP=FTs4y<);<4)_{(inF9QG9SnSJ>WO_iG_SW5u^rlZFVE z2Lq8;lx@*Cbg4w|Z(q?5Bon|0#1}17$WtibhM_iNhb*JJdW)|s6iO{-@$%+Sl&Qu> z49EC9*zJHsjHJox$1T;E6-|W zrmwgt#hk}8$Ey6N;eFw#kh#pYi3b#D_K3%{%4ymA+@c?*n^hIw0@QcIRPnv+-qf8; zW+~$|DDeizi%FRUygz9KX*mYT9e&!BX%qB;KXJ<67{_8i|3u8Exgleh)5(w#?h5H# zh>n-TgkG+DJf0=FCFRzc84aJRS)!}KzfEpx+fb;%1P@)_-)u(|Tsf(jbNwZDGN>@c z_AwcBYWcP0CJpMsy_d_!EC*p1OX3_7v{EKn$vc>srb1PHJrETl8`DX#+xww)Q?v|z6&+9vFGb$Not!@zU_{n`= z)Ocd5CeMaZB>=q!51v%XG!+$Tc*nBXeEUGdJV2O7vrX9dgvEQxYEsIU7elc?S*$$)E| zGsxog)C#obC1#_#khZxFEy>Tw&Ub8lmeHR`_&b3mvqE~^bx@W~qElHWEV7n}x;CW* z_<^->Hx*NqQJRysXDl$L$urHpQ?~SzMLxEl)O|YEI6n86q`)z)X1&Nf)ugBEv9VwV&`t5Dc&W+L>pJzU|ox{{oHi$egcan z^h8VcRM)C0ul{gEO=ImLX*ox2IS!(xzGTkamJ@$=(~%bD^xVL8L%trkZUe$JS9>}4 ze(OgMIuBr~khZ9Ct?#rzyLlfU;1yHC_2p2~iCf^2RI=G)W(V9bI@z7(yLYZA=;Z66H~Yb>;9WZ;vy+g6Q*;9pi3tuLSWO z`wt@V`S|ZSdQt#vu{3`>jamTq*GoHcAUGX^*O#;v0X#G8tE~}@-AcOAf-?;xo6+1A zja-}2;WrHu8>uZ_8hy-w14GVuq!hpgQ#?UR)z9=HKJ-JNS*-L1svX@F|0fOf%1^XF zxFeT(to>k!SaC2z{50NnjuPMvK!b@0j=(Wfuh{z_y=TI(p(XyX#UGavVrQZkE*IqH z>At_=F~+IrCO3yAc8sKUd`pk~adXRSMB@@SgA-aTZB+F!pYeo6&)~%~F;z6Jq$Vg) zB=uW*;*X0$=FjSPPPahxXb|8?py24EEoM}tI|(XIrF(LSe5~#Rght@RhT!0y;4B9H z{k@lS$u2oaTZ*DawWjGVXx%c*c2(L;#;;bL-L==O82ux=vS5~>B~$15*wqn*UGG8D(BZTF;a zz~Dv^K;JR|Kphc8I(}4>*rqjFq2tF14CpNUw~JbH+sn~c*V`$8QTDy}OC~7LL;6wc+F2jd^25#N33Y9+s~xZB>m}hIJeJodUXQ1#Wv$P6 zG6RWlw#kM&gH(z<)gJKWT2z(`(27rQjy(LK8%i{9{SZN2VZw) z9jg>$pd0rgx0Dlc#vS_QEL2F)QVoUexazt%7LUVFR_f4}9brZQ>e);p!+d z^H?Ry`65XcbyX%8uN*5v7JsxO)~W$h5aH%|hfvmXJ0a#5pSB(?56$Ztstiu?ye-Nz zUnDc_Ej>!f6+OlYgN7&V0*KOZ`;IV6-h9q>hOcxli@Tbde}1gr+@^Nw?i+4Bl!|mV zrg%SMrAR(miH3%@FSH;Q0)3NZks~DJDtq#v*FvUlDjTE~Q&OF;Ks%_UrjUzaNJ&jG z7g8CEuK3$%iW@@J0=Z&CX*7)e9@UvmfflwRHH~ZxW5tk=o6W@|Vpm(GntBD=(L(h2 zgI@#eejAYaYXIeqUcqB>oe5vib#995WZ$Dg)H!mvZ2MHq#swe;*n03rIVSHR@%r8} zZQBqpGxPa1>G}rl;3CA%U0jr0t}<hOFcN9}+oiZk6URC}mq4}UM#v*yyhS~8QL=S(UhTKas59oi2r|M=WE zR;;>Mni$MgbapICpoQqjSTs^A>ZU4Ng?8XkRW2)jdR4hF-r$9bd`i61GcM+cSd~|t z;yS80ojBH2G*_-5ZbeZoBi=|+9Oh=S*vvFiD@l=>dRDxuFo)RHRH>#~fwrWjO@el$ zFfQ&WM|EZrx`nAoO|2l_h@ypRg(jl4HFFwGF%2#DFi4EtZ?GyR7`rS4gqZ8|-E_1* z%Z2pScR7(#L$O&TKOt(DTgir3{}eCHR;u#g0L(}pM4wo7Vd06%QA_Dbqd_a~rm9$g zcIZ)6sUY5vBG!Ci;S!{Ut4K{NKi*hzDCGLG*vzFuD@~D_c4DZia3{yrMX9D)f_AJh z?mcm8#+3JWRA(;5TDas7Bz3b@c6sI3EoF~!Z$w{(TfGPdQH^-A4GMQc!fwb=PbthA z3O=tYYEMJ@RT=vgHD;;#sNt<4-c&vn#SZm=er&m|BQ@iEyk)R01#f07Q2HKj{S_g7 zd&`J|c5bUm-Awd}YIhO6Z5NsxyOxEeu6!s>RAiij?BlgT-cs1zL%UB~(+CRfVxTu7*lAl>)y8 zD;>rrimBGEDWsu_xCu0BriXv|w%0Qwc26&KST*Tj>g(P{Xc|d=k(GZLT(8qRG3G25 zBPO)t?vhJ$7BbnB_!(OLC6@aPzD!Qbe7;I49l&~Qv z+Yp{bdL!1@#k?c^iHd>y3KJivu%=X-y7Wg(T{l}udS)h<%qtFlAObpf*HWjRkBB+` z{lQc-MI9HpDX}lJv+rP`8t8Lz>L8_S38c$2w zUUx5bl`w&YS8r!+4t?+CzhkiNH79Wqzx6?`WOT0fWy|<1u!|DR?3+7UA()UEe}$S% z7J5t}n4uYegPKexx&nn?rE_3y}SIe4E-)+5+0-Q5vydW0_(<#lptk z$85)p$27;}7neQv@M_O8y_$QD^l;Ja*s_O5yQBENSky?M$m59li0+8;h?I!H z2>S>f=SNza>yV^g{Hd9YWPT?fJaDJW`sBVAuG77XPvIos>EqKGiTnT`bnp{64on3s zd!~JZ@r8AHSL|zsearE_j52;+A2T0*a38o@%r;93aOz#ER;oBED=J|sJgQsDQOaCO z&!U)+k>IZ2hTyW`oZ#f($l!ork6-`=n%>7vOg;2X6g{L(csbsc3%n8PHYdYUL1bl?RL*U{tshIEUB$yM7 z=vH+6J0yHm!K>huu!>n!^v|)(;aIRNSX7M4mv#I)#C#mUkKkA^pZ$M;3&Qzh!LnlN z(oO2bbeQ|@4xYn%3U-;OgjQ=7r{>JBWQDsTumcq|+iHa#0A<4^Z)VOTh<^~N@%>&+ctJ|W=o zu%#cD>F%ADQCVP5v6mU@Eo~OogF6&`%)yz#-`e@2`AK{v{S#oBv5Xm34ZaW1MQ`kO zL3Zqg(qWl03~sk}fclV-NJ=IpGO?QKUY?nnP7Y?*h<=Q&B{`8;Ob%ti(r1|6h9OCo zU`jNY?8`0_<&8FrekB2w;7E8*qlgY8NtNJ8wx>TZn;gk*5=DqkGYaE74@m0O^MG!V za7`{|RJE?`F!(tj72QK3E)kdXEFG6f#j2`XIbzT>03Y2zawV~wOhxx#jGjcrkf_Hn zV~{jpAB{!AC~1^9%mVQH{?>yN9ZOOqp_E)quWDX7U{E!H9$iARRxz~wO%x@>jRB1T z0eytZDq)$a!SIs1PVzKiITowJ#I$0`Abdb8+Joc?UkuuF-FtvPT970_GAsdyncc{4 zdE0kDG@93TaJzZ{E4qqgPhvD#gRa5EPL!b))Jt zYoq#g`M&AIP*$vvc?b)-CH10eb)B6rNDxT0SC)@ZtdK~E6nZpuu8KqrrWJjsUUrX{ zaZHa%2t7I|HMlBlEv6p*wBCEuP{0&3soWYqdIIkraAqnUUSa}O43h=%K?-U z)d>dlBYKTJupx=)Ra8dm!xagXMT!Z=^lN(GdIUm*(37cH)Gf;AO)A7(Gs_qCe)On? zIG`Uowez+m6ezMT9R>QYoofRLxsebm`^wpof&8Q(LGlUQYM}(D$fm zR5YsV4Qv-SgL@Q1%r}u)E(h8gMnFSISR^eSgE&==E9T8B#`M}?e{=$e_@D<+!K>pS z;aG&J;Z(6%*%B(N-1jB*GOR$8QcJ0()UxT@&Sv#)HmYRyNQS6)R&`p2aH2c)P*R`5 z%(GZpEG|}8nOaYYXZlFSN{S>)v7}ly&fC`<7%{FIeNUxiyH>(9WLz`~m2^tRW8vNz zS-P%0(4SKFq`0msh=VG}GGiPzsvm(&j$}cxBpKj_*0<~(_%IJ!jb*?%ZxlE}pS;CF zdK`cR@lfnBz8f*3oBWv}hb6=^Zl0<})u>7%5K+rQX|cFc)v~JBsAPmA`G)1A1LfRCgvv#yIgd5Xo@VstU3ik()5OryNP1Te5$ zIf9m4#4?u+18<45$kFg!(9O>g{sfnU%^t9SDx(ir+4UchOjc*X{ovlwotf8#o7}{5 zXK@;GvA|@TA4TgI2A_(gs!nJ5SdK!>MJ4L}O$fG)wAWh-`2IV>O6ZRmmy zh=sqxMQ4MRRoO4^*RAM64&Z_Jz&B1x2)^i^)*2D zQckzJ3p1bseha6UQ^F=;zkF0Tw#$sv$tI#MzkF5Kr;8|H0A3y^mqP-Oa7x#un@h!~ z8`otKzzz?<-DN+vpWi88(hcv@QcdgR33!2@!=>TSu&Y0|>Dfr=vJF6C_vs=V;O|)2 zsP0k>F#qn;g%wZ*zlSr*8H!$tYsF?^KfhhRpc~Sq6krLzgzFfX74--YfeX)$l}sFuPD8D!)==$uus_=~9a4$6rP{N?)pgGcHsmF0#*smNgtQta(FF_!NXGN`TuyA7E zw42^Z7_yB=p&`|jYDhsRL>;Y9abPDEJe^qB3LDalccZz}#H(MHHQAV${241^9!`QM z$(`iFbjNmP$8RLIku^jdKSZOfQQD{hC_sATT6C?xvV(vK=Y~Gv=5SVUvA?cAwG&iZ zw;S529S-W#diZciKd4{bhZ$ahx5d@#2780KOTVn2-U%5ViHE{P;%0xbe>gGnvq=OY zJWZ=~y<9spUc@?_2hW51(M7@S`VMoQeq2AI57@5*PmoK>CFNFSyJB8Hs82535O0VZ zevoBRKc|l(dw|58WM-8#M!F}@49|-P_`Utj!S2*bUO+k}+ceqrHP$|3zj65(V!9Sj zi^t>r^$k0v9%TxO$W{Hi?Zj3T@Z|R&GFc6c-&>rku2+p4 zN4~w!RztU93^_fAXUIM7srX#=plZ;#aSS#cg<+Vd%H8N`_#*L$am=`NOeS5Ir^`bs z822&s8f%Af(KvKWCEb>1%Y)=z;+j2&Zos&53^l!gXUd)BvG_)HuWHd)5`F{GMs1^f z3@QC5Pm{aj)A7Z^QPr4n+ZeEFcqf0lAdj(uk^AUFN<4Z$p@-Cy#bsi4`GRrCm{PhW z&yt7Z{qZ&XEQl4ht>TuDz)Lq6M*`0=U(X;clr|#f_u^jCO8vxu243K=u|Jh5brqP5)d_rYABPB z<1`r_d;#7E(2Bsgpd4WK@cYQ)%sO^_b>64I+#v5o{lhi%KJGYahBaTC_o4SIFenH( z_^<*dDjl27*Y1j3PBZ#@XW!mzc8YL;QcruKmsxmlY!j8 zt{=5QUe~O@I^uvJS+LgE0!|gTlElEWzmZPaMy0fz$3Xq7!`HV6U;S~IP>J}JpA2;S#r%W_LImT!stXwT4x{oyCE_xW8k`CSby4`a z5Kt4U30DW3;7*WaS$6-3NG7QJBI>t9;3RPBzl;JJXo5dMkVTQD|0OSwnSmckldv}6 z7;*utlG4DbKP{q#;K=~GTSx%j7YCLd)sAiZYyU)qK0%+L)HWhK{Sm5^e+o1^t{us? zMSnxY6#<3-jlTx8YT0{n7VDphFehLpnDa{wv_iTQ#DnCAmLyCPW(qOEo8Z<{?5++x zKw=~`5^nh3B~1SoxPSy8+!ekK+{c=r%yRmY9@vVMjQS{~AaWg!iAYbUC)2$iNPu)j zxGQ3zq=HyMr>D_<9|(nn@|i?PA}j%!p4k=CcAhX$gbH0dR0X?&O3$TxJWv;@>$C6| z3q?#UdI~-J?u5V!BuYXh;p1>NL|Zz&xkz^3m(o!nS) zfmKL*grg!Ff%TZST$^9I2gsakzRnYfqT6z8N_77p*3K#@t|&^^Nl0*aCqQs_hv4q+ zK^k{=65PF;#=Ws_oW@CL+@0X=E&&2WZf0s;?$oWhFH`57zxH`t`(dwD=lttit#P?= zXVzFvF>zFQVh5^Lh69&z5_rM%5AtRS?UbGjyG{eAapvgGbV~}*L?fC;TI;eT5T@n; z@rm8kwuh=!9;u~T7x;mP%DTo{bjT;+gmRwY5I$fw(E>iL@gRFj2ZcA+VOTz!uwJ!zoMXleT?yl1&_psg zkEiWz6j&@GW=ure@JC_?Ge&9$r8mQ&(}X}yYLo^m2LbjFWr7H$_lHA=e}(oQ50kZq zWR|dxfe4YeiBYNuoA3vIbq5ePx0M!RJBpK47C(j&QKGo;Aj{4t4uyhRVv5K7i!1s_nGT`v_+S!<_AnNMUSRPkizh5lZ(m<$W8*L>X;F5 zhlO580g|LbR=~>$kWpF7@?*J}U+>$&8>;qEs#ZAx&#_c^t7K=@v25scR{l^TtDobm z$cC~4^1Ibe3g(z9+*Q72IT?dh{${;qLDEI8z|>y*TE^C7&SmyyUp=q4f z#0}6M+s$G)>5uEj1Xi$&>wqeuO2#=&tN`D!+^k5J7oF$Yb4`)7CP9Gt*mD*Z3x%#= zg^R`t-B_)Q`UUd=UU$=Qo zYFcz0IVjK_TU5k=54&2dnR<-13xI?%J)m(@x~@pQm-IpD7z6N(RY;He2r|Y2T*NnH zb=F_1h8nIEH)#Vha|sN7$gA%$Ekm2+58!giGxYK0P3C~uF^xu2Akzx0NfA!Q`l6>X zqBcgYanSNMb<7wTWM)JwWZX8p#=tTZZa(h~f@ny3$o zW2&4NLjfZhL=NLOaqd8REiOxu3@5+fUv*YnI)=?EmengrB|9b4 z%v#!f9{QNMNbqRIEJuMYv?W3nY7JMfw z=QFa8o6;(<67Ba@I8It1tKc)4CPF3WRj`Ln2^ZXm68JEls?Fs<`Q+_urYO|rb87iC z?R%%*6dZ`o``R5(EaX(&F*f@i9=9wwR?M4JCTo}$_=#TliXQ(9)m^({Ub`|=!@hu1 z)ZDl8#OM1-$-M3nAn4@B(n;C;W4NI|gO&l2^21>`7w?%Y)}+q;C?VEiKE5_nh>KR|29gy2lA z+t#`#K3m;|yK#J=i*b|hrZ*lrEUsB*ECw^Kz#A;WS07Z?Ge>O_i>{Knpb(ex* z%d0a_*kv~^{z^TGE)vCMFFE2q7>0m@l39TtvGi$5gSo~QNgH>_?PtSnqbZ2 z_;>$O2nuQw!_@00MC)t6u;_IB;SW9{Y<(R z*}*_$MY z&tq^)C*|^t97x9R=a#lE9pH~Y!OvuXC{3ibt6SF$!0<_S$m5+E>N49ctn&xd;*If} z84i`!Ia4#`wlA!9AKPVjHR3z){gu}_crx7DOICo=u2sC^2au9D>kW8d4_{LWnmsH}eeHOS(#_tevC9z8#88-F#Cp>* zz=uDr!k+n){xY-O(zRvjimkB9#!vM%7*O_{R^M**`z-%EoWt|4ov>4`z5!cF72kw{9LFj)&pb zt8_Eoa0QrK=MDJ9r+$zGEpy$(4}_vWvP~HNB*0b5W?S531RXy+Kn4{#4lU8 z$$T4yM+)Os*l~(pLH8zfGi;aL(TMIqZ)5$%24PV%1ebW^A=ILc(J_b^S=Dsfm3Mg8 zp&p3{+h`VaU7|~!c2lcDgarCy%^m|9vh^2)QM5B*oqoHb0V$K}%NPK03e1v(BhyH0?XRU<+c9fvJHMUfE1EM9XBM(m+$ zGK*JVXCZW=g@~rUX+*mdVzW}}{M6a`jF5~@C6qK+uAf>zxVc8Ie4fk7i8wuDQU|c7 z77Z+O`%&8L8QPF`U3Fxf=E47pEj}6vMV#gp{h?oM6X%}Lg?Kqmz0<2J;*T0T(6-m^ z*lE_SUT?3J0(QAysvkg(Osm%SW|3Kiwsh@5rnVgw1A5bXc21w1Y)_{a1{NZ#bE*d& z+S?rXr_?Dn?1z&eC$Gzf_oo!@eyWrmkgx3g4dU_N-t9e({ex=Vi?W4ANi0lv-SdqZ zM}ieCGR+U)rV!6s)~xe1bgtiR)`BF{M~Trs>|hlJz8eo(17-zS zcm0!d86CuTu;rosGXAVE>Qeaw|6M-}V+~vmQg+Ju4H!>NT;{}0~9dok&OPq6B zK+69iqYgPq{~>C5Y6ICxPi1``4Wib*EHU0Ur}`hlwhK%eO5ginNzRL*~sxsYu>6BBSQrYJdh}Sc3lS5dtGB)8&HK;w5 z&mxQ1jj%B9>k)XdqKbIaS8}FhKJD*wi0Crd4`&cKdM(LLO;peJ*`WlB9J5C~iwMv3 z%~SgPlC>8T9_YKH@>!Sfr=+~xuD}-EEE}hyJlO^cGNv4mI-eHolJ*`8OsCa#Rd`Tn zn(nDtkY56KEIL{ZbJ5xy&wL;p=I$!rmFLX6e6l~76X^K)P#DUA)`x&uksA=#&&{^g z>z-p&mH(HJk%$GMwT{bKPk-=S5H;CW@qCtRo|71Nhq;yco)xclmgx0h6Y*d zkQG;x4ZGqsOusx@H}8@)O#eJQw>`O*xwF-~&WG+giF)Vh|IgB9+CgX>>TM6n8v_Yn5Z7U3Iq6mqga9P^+ zUi;LAOf!dES=5Kp*vHBJ(?LM^9hT%a=0kK}Y-*pQgU5>CG7ECHNTlxU@z}s78ny4z z4nQ?Ubl!XOh7@4%EzpIZr!hRaP%VFaC|>Q{VebP(QY|z`xIX!h8WLE;ZWoobk{iof z6d3QlAzW~R2`S?7bV%aMnX-NsAvpSag8{MR{hbhRW0&9W!#N+QW=4=?$!GrU2L&TG znSMr4qvckm3uh`!*#ny&x#}oUE4}lk>ZWu zGJ?)sb~MJ0;=Hl62n)$ocoNwZD7+R!E12&d2{XoQwra=9Ufm|`T#rc?G;_Qvj2>#_ z4OvBeE!a6o{E5VG11rOD3+cuWS|bYr}KZ zVbI@fVd)?EQbbG%o2B~sAD-iSON)-D|Bg!-V-i-zN$=`;<_eqV(}po3C-My%?;h>C z++f_O-mvk=jFDw{s_5A&8QPInBa7Zm-qCxuf!p7NU4Q&rOW&qtUBU^Eeo-Q^k zPG_RDK9DYVy~Ild7X<8vFY_8Eb?=s{MkGda?STAWVxKnY*8+gJu<=kTRkX(@o9eZ2 zF-)M!Ra?D_ud0mmORdSp9@MKqn+NQjy;Fnd)bQnZDILDgUw!I!c0SIzhqHD=)Ku;$ z7A2*U(f;=9bTkr-bQw<51S11iUXpP@(@sD+cqH&e|8Jl>JAc?Rx@({R8_tdI z@ojm5MqedDKL6&Ro1!OuNtFHbWte+7=qJznB;j9b#B{%Sc*kHxJjt)TX)nib^@+CH zzS@5n3c*yqoaSZR*?S4=ohOS-NxJQRjAM>E+hqx7*|Y`F=X_FcaXI)^;h@@y%WV3j zWnL~~N}z7IIOyG1w3KDjI@@U!%1h%kA;>tcJZ_j|`|G6m_rM`d2%R@nkv8;~_(fFn zN1B?zK&##d9%QmIHRaxH>xA14VVln+`8&>COjF~qq0h<$;$onLh@$K~hlW!b7Rydi z6Mc%QpM1Tm9-QDC-{ixP1KSlRwMA3kSzBKb3xdgaxy|Z1LAADva%X#uQ=x&4f2XGb z&w66>4>#8HQ~2jUp99g9Q!L&DG9_4DHrLCBFzpVAO{CZD%A?G_LxQM3o}05|EMgg? zlK`;<%Et|QfcM|!=fi3{^P$NL3>qVqAx}z@W^qeQ80lurXII3^`b2vj9-$L8PGh!_ zL`2SmnwtU>x39}Q<+~tg1-WfcZ9j}>>_OAG3`Z2`-4@fOOIHT*#*&Z8zg_D$k933} zMFq5ou*%-B`aio&BPaAPD+)jb|!bx;xZ&e_A?TBSTQi)o%% z3h%2w&X2-Oi}Dw}1xymju}RcE_1*JunafS(Dn$hV019$1sr#DRT zU(iT9a+oIae+y)d7;jz|<<%=(!2fa}?*C&)%|*Uc3MGJWz#c2lVODg;hhSU3N|8RCP@`WbM^JRahOw(N|=-2h=E2t(7NJGfH;$uT1@82JCSq^S3QOq z?q(LWMoSG^5OIRi;wxsW(}l=-RaU2;iZi1rjHhc9iF#oH<=asp7+06_V0ft>lv2Z!C-vF$@+&uuScMKsu5o(c@ra9 zrL_#r`gQPx(c5xzoXo(9w{gW9nibn%x6r^Z4q8^RiwY#j`S)*Q?)`8eSZ?gbuW-c@ z;nwAFB=0CulMjOrqzl|cI33dLct%;oN+v!wQVtx-)W?}qM#C?i>5URW#sb`?HwY`k z`tL62)i7L?#~8!>&i={8@=VcV(c+vc1?bc)X}S{5xg5tR+|R2pv#u7?Cjmr>4beR~ z;m#_c=K9_?NjysXou@k3ILpX*ItEJN2kn4~B>G}_p407_l@utjU{D(zFnU8qf9`nX zx_B~dP&~@)&44pxb0~)fu9eOz=TNRB{D{>b8|43@4j~<_mzIBKx#z3SU&mt>3;XRf zmBYd0(0(toB#l~Qx7dXE? z->^R$eI`?c(t%*6|Yjq7@t=TbYdPg;5Y75G|(EZ;(P8%Y{-T<*8{ znxB>xLT6?k;3iNOnak4#++x_FQh!TmE3{Cf%V|eCxXr|6Dir=Qe&ZH1p!QwT9q|4d zRW5?>U&5zN(Fw>pAl+l9(C&IvHO4V(r~9q`lhK%7lfwyOP)KlpZN0D<21{$Mp`hUXXJ?txtTa9$JTR8qZ*?OC#Ar-j{3U zl#s;BILoQG8bnMS5*i)hmq;(wvmvnmcmOLnv$MRT*D$`>rSeI(w0!2*r<(o*^(Ezq zME#q7q1`wn6g{QdH66$Oc$Foiy6*AaIu*65vw8LNc%Z+agCN!`Gud5 zW8O!>ggIF&2^cThRCMI*CsAGIt&c zylyc@$<-io9(Il-KTpd<|&F%`I4|E-V-=#fNc1%ZN*u*~%P?Pa=8q z72i4$HbZ3oQH)H!y}|ooq(ww%%~bzJ!ab<)dyGpQhR*<_@&eAhbYPcul?g9hH8}{sQDmz zSsp04cN;AXS&5mo&np4mv^Ka)zvZ->tRF8x5S!mDsRTy2qqbEW4_e0$fz*RAro%hy zXIc(dJwKM&%ao{Dc&uvESD3dTsb67{0i-aP714sNN@$k5RvT`J-fbL^sKhMWK6*@j zWf&b$LZxr9zLzTd?JMVvRgL)FZHMfP7UL(Ws#03j zkdLqch*!<8u+{WGc%hnCwfDX4>&xBGUuTw^FDL(v(<Q!#~|88El zb+`GS3Y3eUaHFiF9Kr4*U#V%#es1jl5X?Q{e|O5BaMGSI9X@6Hd)l{X_%f)pN~khfK~)oMEkX|9LI7hAsKR?tRU_clIt#p7|$_UiNND zZQW1}KTf~Ieuy387M~8aeDp1lilMQeIw6?^DVjK+S=3-~1VF0~7GttcO~))916nVE zMCz^K{qEsE_nT+#{&N4i2Bv^;zTWOw3J(4~YOW=jVMh^RFs?|MA8;n{;rkJv&LcY zd_v*jSlDZL-02p6SKm9sND2RBQ4A0gQFrr>H>W~J2KrS3@I^BPS9zl~qVC7^!TqOY z%yl#fp6XuF+E!^Uu?vuB***4{_@8mx8{<319m0OXO9U2w>yLhN;mD=3Qd4u2uZBJu zfl;e&s;R$(K$58I-M#j<71V4k?dk&LH^ij+<_{`2A? zx~`?#edO~>ZF3&=#{oNNQE@kQK3##Q9CWR|4%!{#WY~wgfnZ)DYOyG0Gb_=_n_5-G zI@QSU^L#7w+<6Z3!1`#qgksYlWEckeBHovO7m!|aubnMgYeIj9zD~mYeJ*i5^7`=X zL6^i)sH%%6#-P$UQCKE&Ujk(-kU9G#E)Fi&xj%m_GeZT}EBf+>+TRAX>yZ0Q;TY|}E@8DUB^$rEp|#Rq++(V2h`f(` zBa>cR5vwOb87=bZuZ=q8O8!#Tz-Oo_>60s*D3cG7y?tM})wPNTZT&Hs9QL0jJq?1< zFU)0DJizdhQ^pq8KOXSL*2Z##x62%AJ!K8G81Crv{iPRsnRkS`*W#N+w7a2gO^!mdlb(~ z7<%j3t!w3I?G~118I(vj&JI=t(P?wz``$Mzj{1Zf@&Gj*xw|?*oT`QnVz+KpOkG7w z?vK*!E>+Ys7a4%#txAe7RUUw>at1$|T(0&skk)9f@*mzH@ z8pogwYE}!0D`V`;F<0>)_8$)sEc{unvDTMzK7^5TZV9BR=~|Ry#LNW86;Q}7&+IlO z{_hWG2R%C1Rl#wBd^DYMieDW-ash0ZXr8v{L+_IN@9I)<)E--U57FrAkTw1 zZ|D=c!MSYj9V#V7X+e8*?kzoLVr)YRLK5>#_|%UcA=XH=`O#oOUr51DRjzt58m=Qn z^pKbag&SbS%}|?_PR=o82oRBIe|=}Jb7q11@MpRm|4Zu5%v6Ok5FJAvkdre$e%3- z>XvqaV!8c+1|eRj3xZ4Nr}NI)dlYI=Bt=%8Rruqgro@|PJo zw6Jr`I&LZ9Cpf86KU+znboZUIYL!$p0U245?!^_jDH8ohT%SVdwDoDRR;Sq+Vos*N3esoYD$Ey?!AjYruIGOIi4{^0tQa0 z9j-;XFR5fQ;~8_C2eW%Ww%$QLqkR3v{-eC~ihx}HJIDp5F)X%tWh`{Pfu`ha{dpF- zjZufJ7isJLUdc@YKXh{vXllsI@eXG!dyoaPzMHokEm}L@T%^I9D{590Q7LD>rQW3J z%;7Mue|F*K%(ReAmbnOVBh+v226zovO+7sQqR@~Le&`!p9az!|c=G_&Xo<_+fU3Wl z7iN5@^7{3x44L&jA-yl_Br#B&M0IM_-Vny-1Gvk!p>`}VG9o3)CCDx@Q}91YI&(^S ze7Pbm+zlo3&q00yt0evNttvRP*b>7=nxmZ7^-2&Uo3!&t zs1afjila`hQ#XX0GW~eS1cBs>c;lqqc7uDOiL9YBIg8z?(z5o>`bIDNg1!yEKKX3P z*!ZBFY9j-hhw7+zCD#<3Aj_R&p5y>E@%QQ@ut2KtaLLf zaQoV#-N5#WK5z4cW!hce2MINgv)>vIMG1^#UOv5nr2bLRErz3*<$jTRNTc%uW5tZ? z6E(VcQP~e({WwWWYTpg@x|aTg?H5DKW+f;E<ZciK2ORhpW)~QJ_dl7zm_mmRjn;{X$Na?w1gzymh|9)oDE8q(IwgOyH}1 zkL7kMO}kZ68v+YWvgtH&_WQ=@(;>a}_G--m9#Fb?y8hbh)VHJS)AwwP3d#-QfZ*dL>Es(z31&(c;8e+!}Q zjOnQoemDQlGY;L`x;6(Nf?sbchmp*|W8guUsy!8Bfo^YKk24~z>$a3tHQwCxfCk%A zoCVd^IB>b$Sk?Fn?SP9DOM}us)FMjrG_vN9yC8${`elY4cAfm4-uPFmP;s6C>^(AZ&O{e4 zqV6&%U18)&6lEi*LR0)@`;rSn1&~^1K7GRv+#lfa#824tbDdT$$Fd7`YcZdmL;>Vf zJtN*yMva0X!W9&_se{aj|4 z4cauyy2cE$U!QW)S=2(b3VgGE`ROJf@k%*I@fXxUn_fTI4rb#mB&P$iRyhuJ*%M9H zIGfXqEWJp=3}8ssga=0__@{6gbK?sK@8}+^vr??zn2Vq=ma5TC#GV$&k{x+KduUyC z1WOURns|!-MCv58m$A%hl3JBg#qadU&=DJ`VZi2AIdaKJ3yoaS}MZ`|zoFHRpjbN`RqKq5+b8O>hSCL0!y_NXWXN|0}bHH>0 zpDk7CHK!y^S&T2-%+7pkmrHWY&Nj&d;E5_M^%0gv(=q=G~ zS~$s=u+2xVj)eQZ_1bGR&`O_~B);z_zJ(NT!eGa5)R){Av5knu77B@d5ex{fuXMC~ zf^pM^-VS+0f1o}XlhBPPI0m-`uwtBFGpT4)C-<<+t2%C8o-GEiX(F#pj5V6x#5X15 zFst;`k_-MoZ)2+wC{fF|rJi*jrEwA&qnfd$Zl~y1$Jy0)CORrUmm7@F@iLm}pK<}Q z%ryOulsM_?xPolM1aH2hiS|wWhc|Ty#swAWH# zkE@3`>n;bq!OpD1$X%zdLMrOt%?6@b--E>l3)_FO`%Ee1O-#*vMXFP}G1!09Qgs$g z1nG{rjx_r8FM|o3@;|*Rf^{J&)WUbE+Es%C-}WG3mgzc(bMk|{38}4s#-TWFd|du! z*1hz9X@6ey`(RR>x0S0#JUzWFkjdd4N@*?m#D|b}g{M~}t=YgG1oY``S!lqLB=869yFT)Lbpe7p866*+`QL?hBkDE%xRAzpF~|k z{2xksBFWew_P8Q*?(Je$z_~90;kk5hPN)=XN6je4{#QBG-qcih;}U>c;p@Y;mu$@8lxH2A8%O75^Vt$Jj^TXAZQ0CW9UTo&X=?bM9E*}NrTgYFfwV$y!>WV%3{y33_8u?DX_%g zp?ATg)RzXPSkv_k%k`|DDgP7>A3Z6?2aGGiiOG#?7puW$L$x?_7qi%kIobuvG&M!f ztfLY~^AfM5oliJ|l-teYi_y0Sv_H*SPsMF2dcO_h+u+N3+=7Oq^B6Q+8E|K_xIN@F znp8T5#f=OFV-VMZlIIy`BOI%#xfYFNE1a!h{I(g}92sc?n(+oxVMY>-QvQJ;jqj?Z zc1$j=1!-*L&Z*lFkNCM#ya!V(bp=;XxP$%gos|hA_xje3M?SwRK*UY%nnJar=*YDb zM>-5(T0y^D&v|_$id=5tfp+`5Q2Vr8G9e#7mI1|bqoi77tibe}-*n;8p`*?lf#he^ zUs@PKzTVZ1*Ql$N%;Q1|jU>>zGHJ0vMbokstS;`_vQ17nge0mKl6fy zRF~gA@(y-yUO~IePa?Ocd_1m>7g{llDCFVR_y;kqel8I2=&Q1Svl?SV*H!(C)bVfh zh`#7ID$90#^xQ`s`SQrcoX#k=Pz<4}(1cnok|CjB-GNs0m$Fx zE=tu5wLG!(Uw-QO7$aR@R_5l1wG_o3a(mSbr%NG}O76#}TIm-LV6AQUS*f(4G?$h+Fo!w67PSF2yoBP;?zR+Uk$`ZiNgAB9lllMZzjID$=Z9gvsI2dls(r@-EcUDUI{;yoQ3WmZkd7^ z&|4I|Ld}(El0a6dpZ1kkyJE$;G*CA@`AZA;5#1>#eVYxNz$JsH2{qoMk#C+*6%Oy4 z7jU%ucGC+BQSwMLlFSt2a0O%-9k;7HdoE&lkCfy^VUX2p*5b zcK%8UXnduaK19wkI)7@K^iIEv%#fcvdDc}Dd0u4yMZWkn0@ca?{dW7iGBGjt2;~(o z{G~~c+>kC@jk01lsoY1cRyQ595(?`Ci8IfoH31G?RFj|qNB*_e-wRx4Ki^LVo-I(c zbEWo)P9SRg+Up!}8N39uZx~IQo_DlMLHVELEbQy}MVDLTN>Pe8a0)!p7pHzb1fZ`- z4dbp*K&)0^qWzihh#ho)6@^e4dMciOG8R_=L?&roDzzQk*1C7HxWN0nTY{@- zy88U)IU2zzYbBESOj3( zkBpQWZ$)SL7C@I`ND-;CexmPBVs^+$26eT>B^<~P1t}w4$Wuq57b9~%RrE)F>vnTs zG3A?P@>zWxIxQE$<^)z#ni+@O77{$i4Ft>&wMlVnTDKni;!AfNRDgD={bi6-@6_dC zTOIi(GUOXU4GSoEw$rs?L_K+JbP;K-T`P8hYFMj1dy001Wc2=YpMFH@t@A&VJr&Ts zpFn&dS?BQP$QTJAZdQ_6>LxVQ;WQ{QvCeQXb)NQI)9}qd4xKT&b~Mb~MO(-&nAMe7 zkRmG-8~7}_sTa1va-7Sl#e7um5!JMK8{^1o|JSgZYqyb=$~K=g29o`2g1CRg8N-C@ zo2AjB9|o3}`Z+_XB){R$+BU?<2i&R+#l4z)6suX+%u6K>`wwj!3-T}e{xe!FC?E`5 z*WTe4qwrV%#zjn>x>hkCN3yN=jWC-qR=4Be^@R1zYKNrEvgia+o)&YJo_q)e>AKad zrrdgr^prfkU6}Yg)v$PxQur~9Mv{rcaN0|iL4&&hHeStH#V+U+X?XB^Jv~^jv7UL^ zE$+S(>kq3s%Q#@I(_&$bE_#T%eeXmZ9jDlbYoipxH*>rMI1(WD)zqEEJ=YHIF`Bk< z$&}|`*q=;qEfK8`Chg1rb*sX7vur4En)v83t}zIWxEIEFZQMN7W*K)eLn-YL8xjwM zUGko;DE8-8j?a?WaB;%j8ki3wVyh<$t|ptoL~9a+^<4LlWlaDkl^-hr>;efzH#Urd zh((fwx1x;+F-mXOhqon={hfCtmt9xXI~_dzT5kF8?`AYdfR(eJAf`js4Y?6xk5@vD zOhU!piME_@XASF69F=A1yW?C8Q)o$3(>H2|37G2;e$gYZW#68@inHIfC{?zW^TRVn z5`{)t10QldZDMRew&b*q1$d8l(UGEne9(%=baF`gIEGY9qcpWO0v^PtR(EGjDmLgS z+V`_7@6;u9M*at}g3e1eMmo&hui3YZS@e7ia-u|Jt{2kS@01PDYTe51F-q!|uWgf>2XYdE`I zfWEon8F^X=RKa0w#-X)z_rcUD_aFsu9BBus^E-}5npSBp-0x_evjL-x#gq=cOo#EB z7F?wlr5%e1&odAOK1TQ=_6}{Ui)r#;IMkYU!_c z3i@?;OFtoYrr_9++M;yzJRU zU1WK!twNq$N31}H5Pmn}Wqk@6Oq+pbVmjzSbQ-r*B(q6cV2?K^I`FVURCZUea9OBO zJv;TQJiaH*cyT-h;>mYzXcR(xF5_E$t(k}ii8*x?gvnNs48nL)YU+f7UgQcZv$ZQ( z#~1~nQWHWOG067viBiDmUCC;t@OK)OXPrTMiF_?es;IJA)39{rVQr#%_k3+i*cYVe4H4vX)V>*6DG?5#fPC z((m;lbXB80eTOCI^K7}y)wNix(^9%n{%OOo>^Fv0`L5m-J&iEIUC9sF*E7`{DiiO3 zzJxTzDcIwrSHtxoKR>Jgz)NPZ?e|lVCr%iNn}3#mYp#8L-%#V(Hi$rhTPOYCgZ-W>O!-<=aR{ z6s?Q!O#c!t{>S!Q%vj^SIMxMP`aD-klNxWG*sH%}$S_1uvI~1xq^PPN4ZptW$k%8A z3s=|Ly|?ED>*MUXiEfR9D>$mb=|e+PFQP3>#{8n>)AO%S0cwRqf{m&_lq2UfR#D0> z2}7}m1t1^`a7yvBAWn=kI%kpiyIG?~PLINehTD_K-X69GYwE3EQJ*lK-F1RGZWKH| z8uiCo35X7_5FO>TmE@&l-qTSaT#isxSjjtcaQfF6Ov4&9o$70A=21-D)6V!!lA2c9 z!zg3~4Tt$Vn%n3pXFzT5R=uHA03fk^BhX}}@4h*o8zfZqs9ipm{naY&@z6)!7cOW+ zk{RtzDIUX>5k`{$*8yo$^(sZ~Qb#DjN+sDHQRGN;=L|$uf~H+dS}&|Eb7_`Ikk0${ z7{$1>q{FV3wyNDBxFvdwX3aca2?v5o~vhc{^2L&UFKu7YN%s$ z-Mc@?Yc}Y!iVfE|31vI5k$fkqyBZ+#jK4Ca%Zt3!*6C#E_?fbxn2tqoomzVNVd6B+ zjqgT*T!Ordsh0WF!t&(%k7CI#>$YoQpcS)KMUhOGb$u!FW62mrJMx{IV82tciQ`E1 z49-Vgmn3GJBpmywSufd^8g>-P#m=@2|YLk#tD(V!W)$E2M@n^f5X=Fv<35 zA{phewEOAQsnW{y9w(8u32yY`NvxqLcUq_1l>{A`G#H+KM9SK&@q{b`NT<~U9Z`PD zpBBNqqE@I|Z|2Sk*5mMJm?jeB<;78UNDzffcQFY{$Ihyrfq&oqBcchMJ}Y4H{q?gY z@ne=}G<|($`cV2pyA}$^7@5x4sFaU~oEI*uwE`s;w$TqMi~r5+F-vy{!x;>66-tc@ zrTACC@XQ4TIVPm^`;kag88l;C_chniq+mF-5H8m>1>=>#wZY5IMbbZsX;NBS)=j@P zIIOC1fT(SMQymxe*`7Web)usiBN=z4yBdnIotetGy=*RHl!9F8#J|kJ&xgWT8nEOm zY5ixJk?M7pM^_(EMamAp1@0^l>9mR0X6H65j4sbGvoUGAB=*R$2j6<<*dPItA8xN- zIhjRkw0_+()?L?D#tf2| zq|QD#s^gEQd#ciWE;=~m2u`(?V$+qPFBR1rb?DARgiX56=>eU)l*>3-*2l0MBwuex z7KSH7HsL7NLTr0`F3PnM2^Ix>$_I)AVOAZuTZ!v=zu2|E#!icTy{&rA1>1Vox{G01 z?%^H;gK@!aDBROM(446pV`X7@YIx>TT-adc9?Q4 zoX~7r)RRgrJrnhv!ibbSGz&Wekeh!>!J}Z$T6XMWuA}4>s8dUR)7JwdeX>A9UZf_FC|(8y`$S6QRh>9TD`C9u7nTtY>zrQ z%3M0mS5hwKszP#ATJ(1uf-iu}k2Hkm{qpBlq2koKJ1KaLvU2RE>5K9(zxv;F-<@gD zea1NDawlnN2$oTk;3;_R-sJq<>9m40DQh9L1Y!ilpEjx236EI5$?OwTmDiv-`Hehk z!6}Gh0rnRxuE&Jq8xhBDW}i35kcSNxv<>IaugB=bN9d{O2gj@q$)1R9guAk}n2Xfg zC99<3s=k8(bMKtxyP8LBOv1rqT-uxb6Uo0Dx$cw;9ICYqf6|eZ{2$iNDku&n+SU;W z5C{Z!2<|R}Lm;@jL$D#ZI}AVW?mEFCxH|-QcXyY;8DL;e>fHN$pYBUnSMSHJ>gwLT z*0(0vn2w82`*cJJmqzeiH?VH3MyjeUCQj}(PauivRZ-|_*H)PJ8f%r8^n3Gyza{>W*5CMucXq zjfFwK3ZnNK<Gq_7WG@tuwY0io4xj#YMDde|RRiaJdnhE6t>>s@6Md z8>Bj_BC?F#_`9yDu@zQVIZd`pnw=zFtJ#=F`7vLp+YAHuV4j@PDT*yky9K;x0S>MQ zAxuc`*t?SccreO4+4ZWoejfHq{WM%qMNDOOi||3B*Yw)rD1^6f#uX1g1DS2sy$sqk zyxLdVf~`M*IiLEA$~%`wx!-s$6|$tiPXg|W${M?#B?4mL%UT@ju(Mi)zM&!vr9>Rg!>ceawyC2S}R$Nfp`8 zsY(O2n9(p5?X78p*{btBFM$cm%!$d`-ET@WD8WPpUx>!OAg58L-^j0<)si`#|HNMi z`oppKK~6r){zKzi1%oXfVu=pok1CZJVg6m>gu0ccoIOmIvWXE9IaN`AEJIrUUGpAV z-=mSkMlU%)fvHS#=6VW>P}EW@D;6tTcbqm#6nERQAjI{e!kYTBd5*NsC}2 zf1+|zY_fQhti){xzF~{Y-Vr?1DP_@qQ9lYMD7mP>bAm)Q!|xBa9^XE}jo~l;K2hx@ zOo%XXb=)OsT|QtM;tM&{Xa32Bu6si?Eh) z*&1Be_r&?vj3%uM3O?lwKB9^|O^o1+uF@0YN{Eec(_<@@Q2jzCz_DmmYgZXjK|>Gw z?j(T6coJ!rH?!lI{y{d>6*O)#&Q9=Aj#aeJN`+dIgYW4>z<)E_!`!X{IFbxjS_T&P zkyzU{{Ah!Bu3@UWBU7jZS|!Abw3RBeE>C9YG#jPNfN*{==>%8D+|NIr(AT3Ikuzcg z-I`qhqL-{+97rSnaUw8!YV+Qc43@l#O3(s}gF z3*kwqtL1;%!!|AioKW=8gMdQUF!d2fbAfCgC7Ra$o-sTdfE`-ZH5m$L`8 zCBE6VVQG*=8Jrk=V$C;YFU&G+*G97;sr5iZ4+=V)ZxvGB9s2mmvdSa@t6lA~_fBEx zd?1uMY|bQnVZ9V#Hl{;K!XD^tFXq+sgM(Fr^W%o9wD+_4rXiMgMjX|)j7P8*k{e%jHA4$JW`U zSgr-@_&I5bSpBvaiwo{lzfm1n1;>drGi|20?$*YA&V~1EN0JLs(hx_>VS$S&xYRQW z=w;>FC;)w>SAMf8BcxTk6X3tEix0Py>en;q zDuCz6w1)oerfAM`cA&T-ANGs-08zW79ra6cZm0k2FP9OkqnW+V*^#Aki<*HhU5q8D zUZAdz_w zN&vn8CM08SlO4?xCE^^cDD3UEp1{rrPGmzButkBc&W+R}!wa9&R8ZDt76OCod1|0a-O}Yk=G*ezz-X-IXwx7VNn_c!c z4OsZ`V7=FiH!T3{EfIppdg?M>`-2L_$seH2sly82>W<%Tdo~B!g}P@;r?bls$&JQ3UXIv@D>*(O5}GGw3**PhYwnO3CuH}l%_$n(QZYfss~g# zTjple*Vfq22_h2vV`uH4u_EFN3kyd)Cf7~J_qXp9V9@8tU0835c57!%HgYgNGW}G^ zS4zS{zFr|nF~43M?7|f_A3t$p@jcqvO6d}AJ$)i48H2X`+_5Ytf2%MbkS^!s)e?f$ zC#DelKi$Uh;!V62^)GQ1F_obfd^&i8URfOt0yCM~D77Hedz3WEWd!0U>vKYKwGulI zyiBx1wu7W4>9?wOj{tyobSGg_`QeoAeYxnv95UR0Oot z=(Laxe)L9ze2ZP z(E{tbWe4~Nuf%;ZMS#bqScHm5Aa?nVs|VT+N1(=@%NC z5D-p2iHa2(RCfnu=(cKa|Egb#-V6H;tu8j~p@tk-oN>pfkFC?C`b_T0f~`YybC%9{ z@UL^}FM_CC6hDOEf9d<0<3vKK$?Q|*_HOXA^q9Kv9m);%#7#jQ+bA0Dzg{HgebiHG zaVnkKp+epyxVV4asLg#MD?PAE*N9Ku%Y3pUWUPHeK!Gbe)m|@?UsT;r_l62*O7UEW z&RXq$COQKm0T+oD%`*o}=x#u(u7YjPQnQ-HaeZQ#CA%j@9k?z{G#`9q;z0O}6V+b@ z*nGRHpJe=Yo5ys4XA0Q*BfmE@n>{mALEqY}V$a`@A>TFcZWu@GbzVoelDdQ*=3;_WAkyup z^xA+wH$v)LH0ezy!fJSp`p|um_dF7kZr_uCuDr&rjt4$YJDf-`d!shh zyg+JI_yenh=DDR(<)P>)#ru93ncvU*_4&{lre=Bl1s&?W@BQN$c6_M2Vw5_XP+dfK zQ75xQY>h5ZV5oo{(DVEIzjtwWg5#|L9T>;PwR&E=@cjM;X`laShoZc^yN<~CHl{MW zLQg|bN4vx)`AifALIc6m1r5m?&v8GT?w!;=Zw?Mki!=nIlBg8f@7}k90yMDNp28BI z=Pf5|-mo zX7nabHMW$5Ab!|`-Cw8H=9F;~iblgfFF9D{NiJ9KXqJoA;=j?sF`G!5&vu$+Z$An! zN?ptRngrGv{LX^1GLkDXyjc2vI|trN3Gs++^~k|L8}0^5f2fntG+3)Sw3Oiq_cXh< zk;fAi5$i57KXdYa5*}}YAvRwJzaq0-cOmjCF@tf9wC413 zlZ5yS!{TM3e^?ng>1pFNS6T_sA6eQw&9MTi-@&SJ5ls%*_9u1|d{P6e!y-jpFgiPk zGjK<{6U;_n%|#C1E)0!!fkHQ~S2IKYs1^W9Kbg)@^1l2vv8M6n1gmHmp8xm#5!wEt zQ#zByAvPInuV+IUW9+8Zu-3J87Q6*O{B&Fq$|^pYUgE+HVcn7I@jkIBnJ~W2y#o}9 z`ANr@nc4&fIPLvh?~^E9PR@1|seRt5A=A+*CPP<3wcf{{erYco-94oX zLfm4A8|fF#)9XhKrP*%O#xr$up71kdY3;Xn z)xT`pevrW{@5u1RWoIqBkmOxY4E!1)Pl)j7aJw0#=@HpB!YmnLeu=Gt66h#kds8~@ z9M3=QmOaH9pG}$3i~Q)yX0R0|a)ZMR@Y~e`wb`KjS`*yJ+5Klb z!Lw-kfaaSsxI)!Ju7*v(DGRN{#=2s|L&q@tl-zk9Zh7!r_WacOLQI3G87hs)kBHg8 zwvLrQCEfB-N99Lo+pSYq$F9YKV9b+KF*3z1<2cOl@4Z%Kueb zM^A5mEH1pcAIbAjvp~dO@b#0x9K-}Bk_!j}k85^ZN8w-M(&Y&UT2AfOLR-L8P6rkz zoJ1WFND%l}cg^*Z|2U5ZN>c6fDHZqg(l#Zk)_KRzUW6lDeSE5%6V@2%W?Pc?L#`W0 zyTue`r{jN}mt;7gO1?^Hw4G06u+*p8_2zSIGroR)sg)z{7IF)#X|8rWL23<1S&c%T z(tUD0MY#+Hgc7N0CErsGB zLL^@8GUrtF#OW`NDI6D;>2Mv-nCCE!eYeg#25v8)+hz8&gfiLA-ZOg8 zMzgHuFFtgSlPzlh;OMQtUl+FqQEGny6CI?)=^jiD0Ce&V!;zfIaxT@d^t@s*+j7;({T1YcffWm7vy#9O7|L@zbB_5dQ8Udsc(;O`S?LL{!(ZI!NmPQaQzebz zl@O`5he4&($9L0G=01yy?nIH(rqc$Deoys+l&kggn31oBuJK7A$ph zx(z!2&2tU^;GLJ`eLZZv-4-!VSj@z22>`^rY9}YEoX;V42VCeh7XI$LTJEFLv1Y#g zb!Vlh*2M#Q(ty#kop0GqSakAIZ+0gu|v|UJXStM zX`waAqHJ5Z=|55UJThL^befoVW+cJAsxu)xPO?mWTnBYHTf3s)Dj0+V%(x}G_lFD` z!KSrsBDXu$eXnG;*Y)I2R0^VbSZfF;iDxo$?4j2FpF3@_%=q3#p)JW$^ZFBibPg(h z^0LW*(JlK2$a~&=CX+L0Al({_lpX^`ZE@vwD^hvt6+E+cjV5<1z22QG)>s9*A-$m! zGZP)ouuzj9Mm5=+qWn>6L-iVLz8baMha!v}bv69qgk0B|+g#` zAuU$AVVla)_QNEu1-advM;Sc9_(iH;Gthl1Ps(J+t)rLRBZfufyzkYgxen(knDwbd z?>k=C(wFcoWnB2$MTDfP-JZaA)gfB6T^;m2rPZ7VbTGWd zAt7BZe7_vE#c?C)9C{-0?6W+>X!%HqEN9XPn?D^*%{9x39fS#2a7qgf<}C?1`x`!u z?!r}BxwjOJSCVMYnHQd@K4G25eqL(xE@w4p1H8W!eC6jgQ1PHB zc2!1M0!f5=R7`4}G)9>cd1ad?#9up;8^jHj@Dvtzpx%B>#ZmZX!i96KSm8He3Yz4# zdH3MOWl5M3%yh?v9d<%cUeAByTW)5xy+Fssm23a@4g-J6S`miZ>?PNA=D-o~1Tk@< zv`g3Lhfp#Eh>n$Q9n6e4R`|mFyy@%2#^sf4@FrUc9eRzkI#s=oRr`#;&lPGTd45$` zyYiER7MB`o@XF`7C!!HMMN*F0I?dGx=5kK5foMvYz1-OyF86NgCN1V3G`+nF>z`^4iv zX7EW(0aM?w2-zy+Ufu(@3L5r3tPXE8(;84*K0IV3Ii_zN@9wJUiDqvMyBZXTK}q}J zL~-3Q`TVg*gIDi^g6_-*tG#l^#sn#j!VltDRnh363R|HkMf=4qIZ!%06)U7)hE)T< z6tB96HRfP{SwtM8FxGcJHbeivPW|KcqrvwMUvDUR)_PV@tQgJ-v1fm1M!JOBocZa~ zpLGw$TpRY26rTCXaa5FM+Ze!Zs0gicGq|`e_5bRoma$cu&YRqUu;VXvQ&bdX z)A^F`IYOT;q1(;0|MT}Tq0uDU08M}K4yQ|Yyng?)E;im^E=utk3i)iV*J*<#Ap|5pneRbmb{_FGLnsyf;iNzn#Glfq2>bNHxm|9ZoflBSvk{^a?u7YFSq10E)G>@?%BYtAy>5*j z#Qiy}tG)fTypnNLCgF0Q$JkT5L$?JgPWYskE zabT>#s&txchGd9rTE0@5EZVnP_rK zaHJWwigoH;|4(JbKl*@nAhlC@4yvYkBaSGRHLid=9;W2gVjiw9RfUBljTAe1^P#`B zJ+@Lizt%gTeq)OxYze+g|F|}eCDm?_v%{!XGU%%lsFib`!=?SAvTm)Se|@gKH8{Rx zhf!~rRV4RFFdJiK?6!ut;b9?Y0-*N1zeHG2zCvhcJwf&Omv$PHQJu_2s<*zr!0^XB z45lH@z0jr747Cux^x@c84<2}YO$|Oe&~01EL)c5Jvb{{(Wa81!B%GlcgvX{b8+BMq z3F+4y+$NVSV~Dc02#|iRyC-M}V zF1saXT60=jlgEHhAMFS>fa%$O) zDBQBY+l(b?@-|xyVtG8HN(ITKSu3fOCApSa^JvnZ-pe;S$&QGdEsdKdSt!uBIcMt0 zDafZ*UTYOF)*9u+e|;>JwFp=CPG>t>AW{H&5H!fh&JUbao(_O~eA&0NY8PvMF}!5n zpE%jqtLOJiqHy?Nr={jHQ%m<^#b`AlCy__2ym{7F;49PS<;ozhm?+2YGOnPPotlzq%QJQuh8EySr@H|R8~i0Z zK2JSi)In|N%(AeGe680gprnOsmdY}j_Ty6}^8g*2asM*M9H#I06hY}~%_$|_ub#j; zPtz6bavKe`!Pf3pF(wh6T&PEV+Z%f>j@7Us86cIi)An~HsVreciN*~eO@s{W^s|7? zq;|(l`n%;~uR5XsJb@*xg75tO_stG+v`O#M^0SsqH@k6~RVg;j()wU!(Q24diIZyQ zV5pDrh1o0BsB}*2SPjc!g`Po*SDKQuj+6`ghFlYRGnPcf zg|_;m(ys@NbJ*6R;%3ymK$}`El4;{$g5w#Q+i!M^Tc@8wkoH41Zj2f@>Y^!LNil(C zC#E%}ab7>K_Bq;REGqNMCz_fHQ}@$?PnMZ1hq46rL-D0cwADXv^9jG-M4Q`n0BcPm zlJk{ZCesps=Ts_&m+X5I%;C!Nmi)`B>8qo%a_>k716jr+jI*;+ZP`V-tO>2qP`uSo zqq%iQIVR$F8&G7OP~Hm`RW_^`lv&@Jj$F!23`bR`|GPTOrXJh<332g*Z&ga2zGqgm z5{?rR4zc<~93#y`2O5m-AHu$TSOy=U$B`=X`#Xx>BUy~9qVfLl?&i950AMKSU z#ZSBQ#?S=Lcm8|Q?UWwarDth~_SY%Rm&m9=nQTBI`}sp^;3lBWUc1#L^`a15)DdCm z>PH~YP}fc6vdw(Q6wCY$d%Id!0503p5@3-Ajw~sCVh1B~Fn>cCbeJ^*Cstu(TBdrK zikfzN^HZ&Dk6e-LyJdjnA1eGjTkPEve!YxIx~J1xoI z*|bB9-7<)&>~;P_kzdQZxWfbf+dJb|ywK(N!<95ym+pWx<4714#NncPxFG>@C;l^D z;Z(LirD_W;?z%Sa#O;-=V>M=<2AK3OzyTu_*KfM(W-$8qZHPxrgeVQ(#tg@cpyy&8 z`+b-S3DY4B97DEA@#kVk@CuYAv?x+2~OcM&}8BpHJq~ZP>i=2@Q z`R@vR+LN!ji-LD4pz;PBhl+YhKX`BwH$)Nd%8AYUi!qgDBi$95k&J-ynW%(C5=&zI zx&9StM^I6mm&=|ShEtfw`;iF4pHpBc{&+e|nZ_3n6{Fv?Zy}ZEiSN_(&p?P}#E4tL zT{NZ|yP0g)!pV@G18M(&zo%n-kE`KP~I?$LgYX1a&4MffReL*mNA%qff25jSnijBVjf~+Ocz*> zH3;F6(R*TYH*=yvFw~}0{-J?KNy!J{dEcNcjwe!$1tE^DF7T_t;ChS-=k6G}?oGwr zdrh#piGyE(BL;(PH`PGRVYQ5I@`1*g+$JXW6oxR=a1i=5|LG2EGsZAw6)65k->%W*j|HW>S4rpySu;7?nAn%{r@qW zCv+q2t|58e#vNgjEJ%{0ftJSV6R@vbp#KP`1|4MnHov!%qnoeKO(e3E=MUf)rL$4< z^}G)A6&QP2Ze>uHX^U2GIGmXs68(O8#x?(%AI8$T%B9E!b1<7Rj?dVvu-ObnfyOX?upqgso1i%B)j9TA$!NBfybR`07h*A@{^!gfI0q0{|4ixEGa zX^XzuABpYPNl{9tEhuh`T+O$M<5e1Z3it{Cm@Ri&R(4TPX4v1xLV!Flc>^V?=^!I&F%uS-RhRwUMd>QqXF%g z{wd_U+wM7VZMt4Z!yp)ZN=1!i$qZ+j<>jEJTj-dX_?q#%Ugk_2PhDC}Tg7BU89Q0` z&2%R4(@?MBvUroWV73eT9@fhbn^kQ2Eh>5L1B_`{>`NDK)$U4|)+?3Y=M8vo0TDx) z^I|(Q)Y&qEMWZc9klkQq;Gy!gmQ<&0_^Ows--c`nWD)U#d9kVqp?a!LSG^*8`)`E{ z9zp~f0$#W8%Mko`nAdM$rR9pC6e$=GBB8Tq$DEn44i#U0#(Wlu+Z1=8J1>hGr<*6X zy3{nUU|c#62R;VYBmHh5In!Qc^7W#n>n-`wxB6wZSXO zS6tRltYw@cy|4=v!{V)(c8$!`mT41u)#z%3?vMJh*M^0lu>9(AW6uv!OHh#kOK$yh z98~{MZ7h!RDtm;@S&u8Sc6u1P*&W?5oYkvI421fChS3!lxf?AqF3$@v9^~@x&sR2C zk;*EqK}rqdvTU;#Piui$dY&+r`A38hRyD?zXun4P30x!_XG}~hZctUcXL-?Sa&dg7xlb%kX2=9kK=Q+Yu<$_1TiRB<`vaO*3y7Gvx1?MTSl6e*NYH55+ixat@0bt0mGg?? zM!D~o_n{w5x3Fws#c=(tH!+*`{%22Cvh_G&zE|4{`T~Na_I`0I_e?VF5|=Ty{x{9} zU?6@+po92`p7eu@qjwyI`O;B$Q1&mW)ETSH>X?`Jb;5@!T`}neAU<8VTVZTsIsTQA zSi+grtKhH;x2Pc};DAB4eP1zK>?aZtwlTwqw`V*1Bw3(0D2bwL2IpweN?zkAq1wA@ zH*Gi}bwXjI=EsxAs-!3`;gr9O;5CNMGbYe#Zm#PK_`yL^_OARo&ev^%{(1P31%?bM zxC5t~CP{kdIRF7z-4P9GkliiJ)NUHi5)~Y0r>K)ibv6?@db9O!y$M%?PM^2>*At68 zuK7nT8#9}>!6rs9^$x|dDPf+_0-WX>v-mfj+K9ZfX4B6nQTWxmWe7>?w|kb~1cEb+ z%{aZtz^02)5GT4=vHtR*uTPl3fhkRC?_U?v!x!RBW*>T<7d9Ug+|l5aogDC1i`ZoK z=p{HsZjmKhUAO;r2+yKufv=*=0wlyqQ#PU;DHg0ULZF7$9Pfwck|tLcp}PVR;|Y&u z`UM%-MJ({_v~}-K#VLIRWo@#5!d5Z?eA!M@z*0WmOHCj9_X%$C$Uf$V-`(?mbnJRu zOWHxkSYn0+n4Y*Wf?5RX;Iw?Fo2p3P5tV5tQw#g3V3pVje2d1Y-4SGmQT0;!KD%4t z+CV!hE za;_?Nikf1>X}yr;-v}!gaaOWY>RdPLZY+BX_6_+08qT5(VofK8=)I68lQ9yw#9n9h z)Vx)19NR!MeY=paD!WPKOPxV~*%|fPu1x{%5q6~@hLOR~+L52M4a17&pHXs|$E>1? zeI@nHu-BvTO8$-s-J!%$%f(Dp#{dNMqrfU-LF|G?FY%{6BkpVW+diHQRbrF(SYw<|ShXQ2acjwA6M*Rqk*$b+A=xK8&pGWGC#O z;E62I$EfsSB}`8k*bs+}X5KS+{3Wm5e`$%@gW_6rnR=Sr=N9vGza+>eXW4=@&jpbV zE02Rmp8R3eGu}0S;oYe7kdOi~8!Nkw0Ke0^xpnZ-h*Ppv83v#bWo2eDWK>XtPp-rf zy{CgKzo;pO&@(po)Z(U8c#-473Q zAnQNu_kE&LPqoV%Vqa5s(@bk#%}ms)URqq zu}^&8eup%tU(!nO5~P_d0%N1k_1fFQ4Uv2GrFyaLvE`mOqzd zPJf|yO`7iSMPT9`d*xGF^X$V8U(D9R^=4W;W6OU_|3$JbkR!$X}1_6CH*jIX) zS6=@&SLNAmP@6(YKkLaSpQ}zN<#wC$tgVEX^g_#eggqu|V=f*%#Y&hV^n^O+z-&!C z=nA+MLVJeL7*~$tNA5jM$6D5W6IT6C>ism=-cPeWmA*--lz98ebZ52`r)g7SfPTLD zSd^-$o|M@4?Mw3&*lM;wV|AD1* zJC|iG2?S}kFAL*a!(wF(4Qj}qIW4@h-vj|(Y^3`aaBZNod!G>pgQ@Ju27yIyS9xlT z;B?rp0hZ`m3gJk$z8FXi)n?C2DHD$a)uWflT97kedS^z8eI-wO%P~Ab#W(2k2-_Hw zzs)SrZvnrx|KA>;F@l~9sbK^%4^%lYc3 zW+fWUA~?)JbFVgLG5locn$#t;-)N3;o#!$8euf2Ic4z>>bf=x4?nia_deNVAGiyUY zmG~u2&b&NNn<0+;)J5?wNPxQW0@o7Lgu2eYi`;-zzFW8;He_=X%c6)*6Z@waqZ5fH zF7BUK=|WX4FyM#cJv(C;j;IGa7gn%Qm`{4YNRDC8boh%357bl$9o(0wzyR#{lpH_4u&wil++ z4X_`vwH5IhYBH?Mh=w#LL6^|mj+(P^WEL2X)YMq5?`rvOl;?atoaOwkYFVQ8_gp*9 z{GjRT^FpO2Afn{pzlJY#GNKGQ#rX^))Z~SGI0EvE#HMKNzu4d>Djp-XKWD~T=#0Ov z?H7MJQ;zIdzmmL{oG)vRWGUn;*gH_+F$`r9)u=jQQ|%6a*{47 zx2f!WeYgtrLuzRbHAVTchlZK@NfB@mJt|m~nPc!O0p)aZ0Z6)Y8UyQhi~VXVCSGqW z+Dfb1@S_B^pO3M8dN5hX@fkAkhaX!BB;#Cj9{Og+`nu#b3-08bBHQcKCvdq3O@l*! zsL!~$F)uoor&I1d>l7Xkt|~UCI}ASUNtc4?NC(=i*5eA-DKAyl-sNcFqD8zAChg6- z+mG5aJoxmOjU-A#kmTLfnYC|_5f|`1gUivcnx@nptNDg50RQ=n+g*}r;y?cNrxD*Y z?^+JAi%ust2pcYB1xxYyW+3)E&3H;L#K47@uRj1x*=*LP`H0$E@7K?mB{W*eA9T#n zBc?Eg88%u6hY-Dzs&5<|BMoo0Gnl*iVT#vRFj zW=xraGwTxt|4iHBbxh=t~p z54}y>m{qGH>xGrGZ7%c)Sr4NLqm}VX8^y=(50zeU_T=%q#+joJzCOr2LZy&)JtA*Z zGOFy@MiiXUsP2z`&UrnQoDN&$5dsm2KIxYV+$Jx}7H}&VO<)F>9Mjej zeEu9KQybh}k(7_0TL)Y7&FrLSszy)tV%-D%*}|QnE1ss&7oTBOn6o&E`q?F$D)v!r z<{hqmT4?YgEgNrYUvtP2I4X&Fb#UxB$cFBX%K*YS;C#=Te^;!rZ5vT^vQeOuyy@0( zkp~k2-QZt)Dq2EES375o+UToH4C#n27JRiHqNd3MvxivAe<3`oU%7M0>fDZE)Hth* z2N@iX`lQFlsoDgsyT@K4Es+jTY?h{-B($pmhRvMNcwUiXh6fXG#uSD&S2Kz0TJvvk zaNQ#@M-P-87==(T-chDTUMO?}BzL}j{zw;GXTr1^B~tR2fhj?1PCK zd^Tug0*G^N7HBgwfLRRd?+ft9ap{B23i-tkMgE-q(D29-I-+oy!{8E1Mhw&8YRs1I zW^4($lo=`{rB;@!jcydV$8%K$1O2=t@U<5k@p%SKfSIbQHF20w5a%kwYH_ed^SrB; z>z>O>x3+-tV_}kM_?V5*e}bl{*40$0KY1rVIAVENESYGy=Z{a)I9Lx?othc`bE2($ zZ*8gsm(--4>zm9p1+QkIN3eF~KDsn!dq{f@<#BMZg8LCK%hz17da!R4yG}tp^yAFs zk%*yoVfLb{y7l#H6WD;liMy>l>xb?R*Zv^CicdP!RbwMWX z&Yb9u&wZ4BZsv5JCrnv~FU`#(t$^rJX6Fa0R%77PMM1k^AY~wSq{E?q?2y(BwLCNmYL%j5{bU& z(OaRb5OsP;Nzg3v7g>wbhwMlJ)f)20xtLEYZui$RPwq(tVE97m1yFi{??>h3nr~P* zJz;)k)Uq^)|I~MA9Z8&?XrOs^RXrIXB$%v(UjBJ z=hO;Sy{(G#g1)PftN~Khx&3S9y05|?wpcV*-p@K<*3##?{9=9KG(93Ap5pin9GG?c zw0}OBEm*6QBb(rsQfY1kz;ZbJh5@uNd^o@NJB|33m z<}(Myl43ZhY$nAg3fFdp`z$F^?cxYc(;M`k38%*^Ukb(!5orhbKBqGWf0^mi^sYAX zqhrsdoY1TxTFRY;z~`-hClB~MrD@vz1TXfV6IDSV;|EwsESIb{Y0M)+@BSa6snppP zs0uTdrLp*;Uixt5OS0Y63@PH(m~(V{ayj1ox`6D=1+EpGfH+Lsl4kWgN2 z9Wk(Dk&~!2^ZH*r=&}y`W+vUU(eBq8&E?+)W0>pYH2CdsteGbQ(h2qds3QLn0q0-b z1@b#V6;l}$Hv2xCjCb!KGw;DTX9o*^hAIUlVc6?hocHOolVx}J z+hJ!~iKTznMJeG@oZ!Rl+E!Mr?>Ymd+8>f}fISIv()YT1;w6d6g1du$a@a;!R9-F55uProcb3Si=YNOH;d z{cqz4_JtTsLz0b*_KtRjJwSNO#qb2J3X`{buVdFa!tievRgk$u#~aFrwXD76 zkPgEUyIRapEX1PbU9ETWNF4VQAOX4@(kJPBf8S?_H;kz3}_(unu8^Qggb` zpkGlJgVi{$int$5mT=exS#~k_%CYXJgd*@Nnmbm_JU1fkx$pXHQb|LaJAhG1jDeduZ5 z^zRa*0V3O_`ui>iQaC&oY=4=45L2&{bp}0}>~TjMtiV&Cc4e3}(0Qm=C7A&YKNlZO z;zp_Tb-o#n*{36eaV9E6-%fm?srqti{Y~Y2N@>@(@u!`+I^76yp<*aDWSwn(kBOS@ zh{@|)JBLK`5!4+`o2E0@#Z1}crJP{8T6U6CsKwCr8cd$e=3@KFlA-;|H4@)&oOn$m zJI_|TvqhpGwRR1y9qW>&mG7=MOH=d|w?gi^W;%S%h8n}4V2rO_BH5pTclSKab8hB? z&wFv6SV#bJEjhA*Kbc2lv2)1bt}$(f5V5(=4Tcxs1gEofJ7$Ye%F_TQ=b@vvu;iTIUskwza!5V#*(=1+ArSo>?WXbv~4jh~kB#|~x%6w$E%6q!Bs!_{PBp*}0VS6;W0bvb z!V#+fk?oq&fnn;zh_~y@$4h(+rly8|XAZcKZ^?Cr)fOY;-rD;9Ebq8zE^ddeY*$sQ zY}jz|p1bkVx%Ewex$)|uWLS7bGpR!dKFDbGga)zNn0}4m`bv00zRO{Gs=4;QV*}5e z(;uj<>{5B@D_OOi4ju2YK(=+nc z(iR3&t@*gXbPZt<7xrzTkC6`H_ca{;RVD zD=dk;Y$INsbAN^Qmu^WhP~R^onT14?T0aHdD_Dr=#Hv zYHO1M`bz_~5N+GHMBX+{O>?dplb(MPmdWcnS-J)GrNt4czQGV!HLkVwvC&=dZ{s+5 z+}QT9uRurpEB%m(M7~#N)cQz=Wmlu-W8Adidlkk3Wa*yQ&!9^DSUErt&`9dhvb)rI z(0KG81n|s(YbA--a(!ke`p9dbKkJ@)?NhB?ODFTn7~ds>VZS4!kgovI>^CY=E6YmU zObV{TuC|3c(flcCKC#9REhmm4&2koF^O?EnI^_?c<=R#Z{x|o9 ziMMgD5fe(E{Gr{g``_1g$ASc^64TiUA0CCKl*2e)+J~FiV=u;RtF-fy-e~Kjabr8* zwT?M&PPA*V5n9jBu}L0%&trj#VO1TS?}XwT^e@#TA+~R~lFNS(+4=i@av>1}?y1`0 zJNU3U*}iN^c~nx^ARyQQbD4P(H=jC5HI1;h&qHF{&r9(-l8Dv+h2rHkFfCwNk&tsj z(yiL3vfMNrib^oB#V}dl0rQbzbM<%@`Jou>$D+{P6}z{Z860P_HvlCX-Kw*l`v zl&o^W_`ER0YpO{jruzBAm1f1|FmRSVH2g0RZEvOTqFA8ck?L0h6VK#7Od>h%pA*i> zC^GD9qktXwdRQWRE+neAndfEP{Z*InIiy`h3f#JLpPlAERxy;l-!Xi9<|E@A2Uu>E z;1lIv;COQIqW(NYJju%0{(H!NZ1IGS9>JmR7rHTJ!&yo8&2D|?f2y}A(UsYp@Y{<5 zOIEP+1AdOQaJ}y+gR!f9_Iv34Ta!NUgNNS?kN3olljUAA;hnxVWISchQSNCC$*`b= zZ75?+TC|=MF?Ega_L%|yc+F?*Ou%8&>5ONs5*{&q0|kGrOnT{!dJ`6xHx-vYpzDV< zoER9tcM;_H#cSz&I7~kP6NB7`3@B6qf_pX)zkLD5Z|n6c>kjthK9v0Tb8sB(QB`#N zQ(%o3%Mw1_lJo2PnmCTcHxm4t_B@_{B=LIko5kUl*65&xtcaB_>W>n+ngtnR*yJvD zAa@Rj*vJ60L0s6pVm2|$5rG1~$DeNoqmML--~N*~X<^+_v@iT-I*ZVLwZ4T|@bT1( z&{A{G5GsYmN|n8RM14_$;)hZZ7la*%!Cz1bt)d4Z%UlZuf9ob_?oF-?@jo>11xG#2 zfOo7v0+$ux=v+SK$N1z*bV z0bE9@Ib%-=PLBMUr}_2&0cgimlouK(T56fSu2oHi{l(vHmjcNP==!cZ(hNW0vgZzw zs2ZpFMxLN%`}DdSsa|GFn<^zo7E}0Bb;$zbbV=c__$gb_p=t(JB-&Z=506 zm+-Y{8E`nR$H3^@GzPV**2JFXCA=Se3=8v^U{&)=FiPpuigTpcA7xF&P>&E{%z5f2 zAA@gJlP!{)ydit>X+k}8RVeIgBBALr-b=33>(K(H^z8$AK1#1r{?I+2Gxu4WLu;6=x|TQvSdBTc@{2L0+59h$Fa z3RbPSo@_2@Vf2?(jW5OdWOa8En1|#1E||pqCAQ9haTp^wH%iJ>8F#?&g`c2HZ_JZ&kB>q(&I%8arLUbk{YjhGDlo)fua>GdrD1-j0Wc@9FFnp4WF4 zoIeOkh9L;+B0I#QkFl%K*@7E}3hNUDwp}vS->GicX|an8PJdFc8yM%aghrTsp*eMt z#>7-ToN+yDJf!Ca$*LCHz>&7Jdto;9kBp%H$jVKn4t z6gRGkPF3{t0buPKh|M(JD_(Gnztmxfm5H)b@AC3hQa!koCo;Qh)y?+BO#80IuCwh* zdW6^);8JIm#I{!~<#@9^WD`3$=9`v!W5vz)k=WZh*eCSGrF@D0r z22gmBmoJmY2F1lZQM~*D$ChSFeL?Xu7{*VndiX8N#y5RLO~V=6!@~=}Sg6}7Zt*H+ z%V~mfhlq|=U+QQS`Ba^qfu8*sGg_3~Vx`1tT|5pgwXS;2B~`DvS~6y}+n4jo7O5|c zj}+NKKuXux&IFK({ix{!n{^E3*m2#KvKm_V1>ej5sNl;Sh+Kg4LP98HQwk1fCAn5EKuXPM{cb6jJcfRkBz7VF=V4W~!ScZ&PEyyTCa^`tPM ziquarA`5udUgub+Wp_pgRc0PGJS^flLW!>k_B=ZpQ0lVJx4l<_r)?Xj13Z1uc}t0* zI&YqF{e-jM-?kC|%m<>H0Sra>YUPAe-d|7Hi+-EFa>_Y60jqHr=-W1~A!pb6_tvZ= zd^MUJXrGtR?Hr@?J;GPJ{VTpI!vBV^zVx#6-vg|S0G3JsD=g40eHm;*f1U$g)dE6S zGr6%j+I`?3b5xP7;C9LQT{nXY0C6P%NaPmV#EmKdPQah1&fxkIU=*-Vvqp{)8uAQC zCmPSgR(!+_M9n6eaL4ZALk?Px>&na4fE+^&fh@2&irhp!_^+>s2VmX+3QT!CkF#(a z4+_S>g#DPSB_DIU02RK>J7)@n30L5uDc&vF%mJR%lZCu}mM|&~Ch|S>PJI9_$A}o^ zQ6UatdOQGudipBwOys+Y8QegaLv|H1QrWP!Qg(KM5`a^2^Z?_GNru znw^cgk2xowE#SgbmAIyC>?Zs1N1RznUxHNxTf)fgZxy+LxLkGS3}!6`sR0F1EU_yOJM$7>X;8j-n&zG7@NP)hu?;-)Z9 z`3+LNcuc??(XKBI)JxFQO~ob*)dM&PYuul~D4Oyf2;hR~31uM6o0pZi&o64!^gR_#!RZ28udrmp(IMA=lYB>DMecp4{_$3erWN23L&Qv{xo?c0Df#y}cg;sFcC2#qWP14w5| zfRc()!NdEa=4k?ZSq56^70gO@E493Bvz6pB6g3Qwaw6-C0zw&Q2pqoKB;4$x!Gj(1 z^9iz5P4VB{~fM6M_qf=)vgM zLrt@9kd%AyWTG_F+c~yP!jv4(kBt(b@;;>wivuKIiH^oOXkt_GzQIkBB~O*T=r5L+ zJ}IzIG^lyI-$6O_O#~z51;v14P2q5HMm_(XhxTx4wuLBd?I{~NQ z*S}aSc3=|msE;`1e5Ef+&lK2i&C(`oA9MCn;KU&&r{fZ!sb>tF0CV>jV4)>42D2)< zICgqeb}?_&Qdm|D?NbVS1XBzOgMHI#uSW&%ynyf6EaBp(6)##O(`->))x{w@w2L!7 zCM2$0WqT2fyQe}w?IU5N9VOy;klKiXCl&UN&auR>m49@NjiR#z=r=z z${QrJIO8e7xmT24s`f?kb?sj8gy3gS5E)v=xezfgc?Dv9jA*H`;l@j4=YG?;jCa5^ zqAPVb-lyVa@94%Eoi?aYY-!++pVZm?Ca48&GAd~HVP=HChIkwUmfqCYy z^XxUC!pos6bjjIW_R~Z$HT2GWg`I%G$tR-Yra{!_D`ZGG{e9i(6SVO>PH;|C*&9`> z7BvKWFP#mCBz*`Vbd%)VP+{9DoH0+apAeKO)v-~027}PCGnlP1*NxxCC}efItB^vsy=FZ@fZ_$8Wh!5KepZYU739ZP=*PH?l{AA<8_^oE8h6=Ap3P*on^bh zRaFmc#SgnJOI@ye0lwrlr=C775Nep{xM=Xgt7Ud?8}zm=xk0<;1pri6cHsDIV2*ap zIZ)C+k(_TxRj@TT`ABE-FsR|-Dlod*ZMn%&L)pAeo?Wk-%F+%@(g%Ck%b)=tQQSjW zJJ*Um;eToo>3}PLR=Yh96IHOm*cjH~Klw0ISS?u36fP1~iM8>?g6M zvaRlKGky@~tSH?EY$DIeJDG02L}q70vE40V1`O$oU|-om?VRxpgFWd?0|g)|DuOv; z38Iz*>1=y%=JE*uJfi@AE-*T8{$m*xeD7%-)ydg34zQ_r#>y$=q-1edd?UX#uTF);4!}B zX1ShoUg2xx9s_cKpv;6AVX6-BGQd{c#EZ3McM8T6LVpB1W6GEEXa)2dTR`&kI;r## zA*gzP+#Dpf_eK$D-b}8RuE0lZ0&2OO?~U-THuyyL2Vo!%vP~^$e`e{N0ASlQ$oVPI zQ?KwX@vH^c<7KWd(O_>Hd`SK;S;F(zxu4<=5rLUcmoFc4cV&1gT#R-CnEyduR}SXEmY#gGTI<| zY^=iRxb7oPenLP~0&7vc1daL|)lxpw`l9??#mivZ3qr^EaI{R3Nn*$aX;y*(ery~V zn)7$dk3S|LcqTL=1`z}s{M#HI0NmSa6b}m6hjBBN3Bc9?HE5Dw)PXM3i+~mKPRq-O z0I1$-IcE#n@710doHBhICS?;IoE)#>oVkDmo4ArMUnRQ z^A(PL211*>WVq>IvGn6ypxD*oUeS%864+IkANf@PnXt#y;DraGTesVQINxiX@oU&@ zymU2(9gc2=={iRAQ|dxkkZ1zWW?QfZJ)*4~08q7ihckh%#}jy`N33SQ5gTz6(4s># zc9(5KswE3}`={;BG}+kSh8n1*AK{@pnO-`T2O)xlPV~0)R*y+Uw zYmO$>gI9QpyBsarBxE2LNupTlpJ0yxMV}OmdqpS+LJwCuUiJ?SD9lnDu-G$7x~UKF z&dV@r_W(#fpt28ja=Q@|AuU_%28}Yn8{cY&7E}MOxsZl3c=(SBxgm>nm=CJ$+*)Bb zO3({u3U0AK_{&hWo`09nu=?|MxA3emM1<=Qoc_Vk%oxM8722Dp} z2cRQzpn?1v-8o$_7CY#MY7{BX*E`r!ApHpQb`z&K9^@3A`R#U42FL}v_s)u&{@inx z%EnzHz=>*WZu%b@yAy)ux%iF?jYeARQ)NpBbL@P@)lOo0gi!C#ciPqbLyYZQ;80E(}wpA2$@;eJ64&viQ8ViktMEj*P?sBuO<1X555f7*@_xCRjKQeKxS?Lr}+ z|67BdZIZF6N|BN=!q}wwW8HaLsfU+Q#6vmJ#WlyN>~-A6XeSB(56ul=AAe}DbC|8G zlD)6ZehNqgHMD|Lk82}W7u&OgV~%E5<8wg5t#&V81lV5~rg{K~C}8sgV8xSoTj>$| zlHg5;t;an79+Y9;1}OTsU@od}>qmm8q4BO(^Lh#b0#V`2XMwK(v!12s z-#JEC83g=%{Vg-DEn|UCw=%hA(9K%WMm*^Mhz!Qf{|366Q8c!~eOdkg!(2u273PYw zh&)9=T>Sw?+87m(FDmAzF5~*Bq4cCm)47RB01VRB9~C8Fipa%uJ-|_XJv7&J&Zs2g z_=A!l;&=NqH#Ur>bNV>Ggm0Jd6&T%tBnioLwT$aen^3?6a@^!azAps^>CIcg9dhfy zetCF%$aFfP$h zEnZM2+w|r6a#+eg2ZewQxvEwOyX>&X|X6i94Cg@m9A--TX?2#-nNiwVH zrMwHXPN7IYehF0T41ta*`K4ZTm+q(h@m3Q#q$bHgiX*caZ<&_yEI>+%wh*Ga2L>jk zXw^i%p1)FhA@FXPDb9_OAHy=&plEw?*lkR6PCJq?fpR1+qIn0>>?hq~OLY=t`W~{NsE{DBEkIknFd_?OisO6vy{Z9zbs102lQObDNafi}yipBa!ttxJ2eGP$ zeF~x8^m@*IRVF1`j~j|}DPIkaR=x0XfxFo>e`7L?d^Mk~Ho|lQ=;#@OUkH+=T*BLr z0q?#n`}s1-B;TRE4TKBFg`$-{FlU=aIb49J>QMl9710mIfL|>mr^KalBMQ!hsamWX zDWYII#D$PchX! zY&Uvc_5kyZ<(w@`S*8vM~9RES67qeATr9ZJ6Q6Wj}M>a>kJ!Kzh z&$!7?xl?s|Oh5kAvgBC+KUdh!#}Zqp1B;*=;_L{(8?cr54$(PHQ2P`&KF45xZ3FdD zHB-ST#`vU11YW;Gv|r$>=q@)|2^)-0=@KnJ%V*w-^EXKzM!|Ska4vzE zPm~*R_!3OO*+w0LOHl!J1?N0vVBqWVlS&y>7NtfUPt_?9u~4=NcK2lz1Q(@xBuW@G z!_5eMyvpmK4Kpax(8!?_yy8By9xs$>rlbd1Za!9^`$6IW`{UO@Y9hmKZ>!^m_R(TB z1qcZako@ofiQuTMfM~>j1aJpC{#Bc;Q1lxV<$Gd1Un)Bbc+~_Vd0Js(h~oDNU|&^0 z#S)~L8fSdlA`P^<;uZku6gAmHCwm2y368VfJRyvw7W+(rT>+3d-@pl< z;$b)8U7iKT_;Y*Nz^YxGF{qnF9i|FR@9AFr4T;?YnAgj23@|$qz$Ly{B%5`XV84Qn zy;)+vvaCw6GmK#If>V0pk8TB|A7w5v|IrL;;8k=$J~2mtsj++e%`lLr<+*ynJ|Pxl%`P?ytBQSH|;C&%JNgOxT0iDR!h zmNK{2I)2#z1jgLDcF@=~_>~6LQO|%w0n7n!2ht!p-14H~D*K4Dtx# zNY5K`Uy_>|dl)Jx9tF}9Jz}B{mvLTv39S01j;ZX1cWeGghxP%Xa^%5X`2Ay>SU&rU zW@^W(_AMGa8GTb}1ARDD>}ehfDED;x>FNpxG55pnE=&1Pa>EZLFM3%f z^`$`Xq&HMbtTS40ZWUFf?&dSwSumLNhNbRT!Ta*;H$5DCRIqBF(0t1`{2%b=Jh$*^-g#?z|h zAkr{S;K^OpIdAy7%s&y(im3hzOItszRcqr$`&s>S7DC}cD1>);Km1P zbcV5P7;<3{u~z51xC{O%)3*NagsUR^ z7hhJM|65;H9DaqX(zu{_o|{ewuxe7OR0L48XBhLlP3M3vXK=k?u*1W(j2o54Gs{?0 zUreb|1v#oR{7jLz(p4A_@DxBqOGfIn0FDY`GJmbe;k5C5Tkr^Pyu^{G{5j9C?tblcoS}8S96EJx*0fdr?e1DeBC8o5#->PkFcTh-N=;qHT>^+@a1(v#p?iSA| zetxsW>96o4eK|D|%mJ0d_f$h-hm>f9F9`u6v)yFMpcY8W`bh(zakJeh;#YV!NaT!3 z4&}80xI4Sa&fBsM$ar#z0&Wqvdmpk|!m7Jt^kw@*>o zn~tq)1}p|cFC9{wIeQ@_?I7^MUyH1Vy6$VTsXl5pvf>Uf?OpXlVaDyBi$0ENBiap>`ih5N`ht_!Y^*@qm^!efGSx=_j1DroyU z-sn=pm}L_sehIK@H^AVJG4t-QAguBxOzG?of|t!xj7=N`EVkoA(U9KMZtNAV6MW}DUgf1MYERtL9iv;36y$PMjlDvA2A__U{#L`rnaAC zj}g3Ji)}O*uxl;1HTzD{PbbMOOkEJN|E=m}V}yDtNzO}rC8ci)&k)!?ur+_KTFMg) z$5Ui4rilpN1L##u;L-LQ0RFqgdU~u%o`BmeG+oZK#ZHUoyb4M%Rd)VX@uF9GqovP^ zPbBEGJ1nobM>Hgxl+r8wo>9n8*`?;ulAb$7OPbm-Hlyckk^E%9ft2tzumX{ld)sV^ zLlYqPqlW+&K)EXM&UnA*IfHE_xAX;uEjKX$QDS(}JveP5Z>m#d7n+nlfIY#$hz#+Wyx=|Ec?BF1 z^YkuJ`6&s&gC<>UK(sP&}6HtpgO@-tkd|&sHvjiUq*itFM;(fi{_;njt zrh2N`2qS>RQ%rpsu|v#GcAv{RZ_37Ok%9@AIei%g!ha51`~p31T+THcaNr}2YBZRKa4pN$-f z*;qBwAu!a{;%3PiIh18XIO9p`c;(5F9?oFjt#It1c3at7$ImdN#&pgBF&r#9_q%Mu z2+02~28ERp$jH?lY#zl={sO*BfpNuVPYRq-uVX$)XO9G?%F=9((n?}4-Abj)X&a?g zo&7=QEv=~Ed(&68d`*O#I^WxoK{frK4yd!=-}W9w3%}^RWjgNCf?b5LC=}QVFP>el zOQciY#hJM0&ChH%`KkW{Q~kGPhF|zm^WQ?MwD}dJ%F>+Q72qk%%6uYnR^?E(R#!^sn&LNIIvH0kDis0jRoRRl!w8CGiYymw?~xBaR!*n*+RFDvVKaK?eI$S~dU( z5>bBm0Pj-rRrC_yotCT8s=-GbHTA`y!*xhGx*m}3Bd$?|m+^fCnZn=Q*V~cBq&zP&lCvDc`$(e0mjUz;alvZM9LHB&+(#0H6G8y4xJ%* z05>4yW;M8zm@?&2U&{Lt`LANUQ63DCFNY2A*2oa-ypLRZjx4oM6J?NRSm0rv*Rz$Zq3 z5SXC>0~lH(&+z2pOrf4kE^VW7!w#GP%`=0N@IS^-lny-3Xq30f>SCinr;vlu$E{37 zk{SiACj`sPchgOE6>jmEK>l$*{x#)l|5_cJH@~UjRqN4FA^@E7Yqc+LzAk&oqXMm@ zF_n9%0el5;q>g7&rdDydh_CFclnM4I-{{i?V@ex+`#lE~brzVsy!^04k?Q6tt_Rvqcvd zv%~24lkqHj_**&7EoGG~BnZnpGIZD*8(kba*!E1%z z)M%X8&d_}C>3w=Aw{X#L z6BYvR`U-CwT(9^4>NrUq-yn+%Gm(k3x0l3&`33k_M@gb;#B+)58WQH>1Eah)_lL*)>zDr~~!3un? zxe+;R0QgR0wmN}#OApm6yzzHh9al-(a6$9QPKt{8#d!*z`i0gPG>4l`YOtfbETvy@ zgZYMYTN`_r+0_7)ConYfW%?mV1M&YH^V!R3%|>fwMNIzhT`2v^;MyHgl7Eh81eeR~ z8Lh+bx}r^;dR?JQF$Dq z@NJ7-4sf@|z=XY+caH30?QI-e0X<<`Zg#ZFatBj>FWXX%5Xry4kL}7p#_i@KRdS<7VAmw35IF;07Cvkt6wo?x3_WRU;mg>-;(Wi%S$gZn9y#ipvsus?3zKmexzu>$SW`V&_lHD5$0}x z@@uLGTP_-y100&YDVxfJJ>?k=Ezy}yjxqH=E$2sr*sOL)<8YIvRW3;OOFX5iv7bSC zCz(+u3I158V_SI%&j3SapH?W%id;?)MNo*m zQ)pZOl=Tc{T78v-e)BB)t!wm}mE%tNobuGmqjflQP~1Cn<+L4{Rtxm)z0+6jc}{(M z@67-0qza<{=A`6nNyIqS@qkTLzqyQz|>H6a;o3 zD4M}ct}S_ql0RvZ!py7yX4(W4zZ{oS42~TA9t7J30O+TrDRX2DNBKmQSMLFj zQGVg5sGwz7fb5&&$pd`s3nk!Pt>d06g9s0(6pt3xeZ@pF;b5L-NBX z0m_1V#6}n#Ejok5di=I*5AYgHRJfA>E5<&NU<@eqMf?yL2hfZ>p^?%EK^?t(SJ^0) zLoJ@G*kq7)5_E|7%g~%PK%kci6ZioTi?RWPGgLevlX~+6eM)x zp{H)MzF+DFdI)D42i`Xp*C20r@g|#L^EizFe(VkaJ-gxf`l@Pq#T# zC390g4!uW#U|QgqQ!dJ7e7X9Gj>ETQKbxyq>M}~#oAPy?Ansl|#LQD0Vct|gLR%?) z%Ejo-gEV$Ao?N^kQ*{WC^E~eryl{?EPsdRXflNS|nu-?)=i17PJdQU3;o6!{>($7A z7KMsmj1f3=I0;NTRfer$F90c}GBNo|^Lc}k%$&n4Kb|UE%JHyVM~cjb$_$}0OOlX1 zVv?Y;ie=X@xGr?5bG*u~(d{a3xM!C0)9wb%tyn;FZJJeypBDOx>|BA8b?e!E8du8BIf6005^8;5sNDv~Z)HCo+vfkNpa zu(6s^4PV9_`&9A)R>+~}oFORpfaA|sSPF7H4_ z5V2ZJlN)7Z>?9S+dmb&wm?pa^fGNnaA1?qd%SV}RdWTh_s{9a`l%fF>`KU=bSjMq! z&M{&&*I-9Ai7=zyu8{{QI!kDz$@MyW7B8|hz#H^NdcHz5!#tlBSioRI@RG$2Wf`(z zstrgXd0^$vNvhFVu~%?ah#vW^$yY~_E$0 z7uu2S>VQynlMiAqy=>S^=Zt&mrIkEUEn6OisXtM*)gN`Z@oq9DLiquubEQuTWy7a7 z2$+I~pdaETRav*i>4QaQB2URcje@+elusJpZCS!8YMXYdJ3eRzO~JWRHqe19Ies%I zfnc7hvgN!w2i^Lh4z_zIXM9+*C-ZH|Bjs6d^6b8L&iF#N zVJ^c!;ESPsWMo;F&5`H0Etpi+#9& zfREZJ?&RoMw1uV*IXWlm+1 z+kAufV_)R-$1Qsc$1hbRd@++4I$A$oq}Pm@BsK;6cP&T!62-1%&j|K%nJrcbm)Yux z?TsS*$Y4(lcM0i(aW60# zj%Kf2Iqd>~s#z=1*wwpc^|UL|2*8Ek=mI)(&FTpY26gq$qU*YjqD6!C+c(k$gW}O^ zS5G4pHgh%IdiJd4xS(_6+U@rLTtXN$|0@%#Yey#5U+e$B+3%Ez_0>ld{M1=MRKQB4 z=tm_0E_=mIe%DqCd%22mbVku73{+nhQuJjxXN(TY1%H%!8_fXp0>&|FWW_0#$eqp` z3;43UL+Km4oFA1#eB5vaOy5MlHwz)jukoXED%TKE3~4iL*+SUii98Sq8AvqXOtTh` z6&hjkv{0s)1b_pR`0jW!RP8K0Iok{c*c8|-4jgqd-;-^nOyF5~ooq2V$Dt;t@LsT( z@260sQOg3M5#vS5?1Y3#W_NUg*pmi=vzTY!kto3*FA$T5jMUBqzFZsMlfRB>_zU2y zreJmBt$K=yd|yz!&eyXCMEeA>R&2(%0pu0;gDbA&Am6nRnkbqfd-<=xFZ%^^RFY1n zi05Ry&t$$H_X`l!T0VrzK}aq&6i@RHw)Y%}QQ?_ebXD~o=iUTmV$58VO!{68fxZpZ4McYHaI7C(GOR2CX&Nyy@0c7>2qpdlCR{P;Ub&DTiqcv z!Y8tqJO`q8ssiQVY>}7lZpl=y z;0KyncsGW57uDF?bXIYi!`$pZ*%_xYl^k`W#lB|d>Fhum8qT(~@$HQ)aSZ1!$@m3x zHggI-vYEL$3t(l_&tywm(C((NcyTDO^C`>ZCDl!zgiZKNHX!Ra%iWauc%T@3Al&>spsNd2uh?I%XNO8OTcUe)x(1!f89UpM zt?q0RZ(pRc#|hTuDh3g^?Q5nN0mehWXScU58!Ec#M<&Yv?UGOMs&7J-1OoFM9;U{u z1#1BZu2Pu8I0M4Fgs-JbD5Fbo<_UhhS~j(rZ7vS|TsMGxL?FL08ooa0fI;Vs>rDGC zUZv3OfW=M+V%^+Uce6-fKgHSa(BjSWY&YLeW+yrv+YgW5jJurz4pWhg)fKvhGD@Qs zHbTT_Y~Z10)Kh?wCqted7HzO6ZElBK9Ku+L$KCJXGkb&PsiKW{9?{JiKQ}OdYM)(x z#1x*R5k*hjEW62gizzZ^Jf@)^)VKy&!4O4VP&OD;1$`AX@!aj?Dw1U=O?5rV?&?8a z#<#Ke$a6TNyDtb%f`I)%)2~!0`AWH6LK9HYMDVhSCDJ@nFI(6tl)r}2euGyJrIW5y z{J5su^zb*@%VV~vonshgOp-l=grB^#&31(;|B@NX?;y&-1KnP+o5?D=rTmtTU9lt< zzwe~Hq{T|v!oE#G|C2RanJM=L;W~h(^8|KjyVXrKZjN-EiAp^@PIR6Y)U6eqFqvl$ zqML@h07E*M1dgAxcowLZc5Ry*E>>7)ox`E69V|I_)YmM>At5=85ub`NAw z5$M2ZW1AnH59qyuES$PUcB4BaXPC%t8sxwzdeMazO-OdKXm!KV*FZ+N%7vR!=uISC71=w zHiOn00<)D!jQ&|r-#+{Q$ks~oe<|OPC;xusVf;_*6{J+YA)xXN2`xG*@lfe!N-2D} z13azZMu+5o!f${?$ruYymOo21Y`k*9t(2 z45xdTz&;MlQ30&ko>{<8-NN-n5pGG@@uPD}hYWGUgghn5BHnqEW>pWK*OviR`JfTz z=m-z$i3y-g;(JqAaWdUeD8P!q#Qle8o+wg;KnEPd%hKwvdbqfB6vd>S&j*rQ0tJ4u zXrC|C0N}vRU??_FdgvCI*hPG!2sYzg9}xQ~zG;@=ssQ}S@CNaNA`JXVu;$UK5?%QC zMynP|a-$%q7e#`;l|H&VoWk3yfh7)=jCBIpiufVI008$>lzK9h9Bs9b$7SedXNz2q z*3jJSPCo|RYb#Nb(WJn*27Kj1rEh`=1AU$#Dj0^axdO;;GbI^VYM>5;O}sK-7i9EY zA|ADfJR?sFRMN!9`&+eSuDWL)^HU}(&L4%|5I~U-2bFp_PGp;)$X{2Sg?ugkPsz2&;$H@`_o| zGI!{hc-kt?zC$W~=BL0?rgumKK?-9vHY^U;q1tGO!>O?S`x`K?^%!Pso}eA3m*Amv z#Xdxd@MKOO7TNi&WiaX+e0jJZ`VRP(&^?(MFxo50-EoQtoGsbkkn8EArDHQWTkE4HreWOgAqcG= z=M02lu`962p9CPeZ4@*+M(PWrb2WA?Is&q9UnJC`cX>BBTJo~vbz>X1!}x~5Qr?TZ zf#dh0;sQUFTP*>mb@6-M>c$856o%}@i!Ang$JYNO*aU)BamKn1>X&!}?}lv#i-Grm zv_Uwwa*ZayINRFBu?qx8vq14r(7oUf=*!idy^YhJ6B^CpA%{(ch?(SM1s=7hw6pmz zpWOt_6wVwQ27gwZM?_;CRpYFs5Y%9DyB7vvF~ddYNJ(4ByOBfbB*vZ!ri1!N^h@-C zQkMUTr<*iuQ!)vYEcTGd8E>?g^Z0#O&-WECI7~90(#UP}F&-7Y(axS0IQ=ksxCVgP zoJ3x1O$Us5omVGdV4FK>w8TB#M(b;}CQboHliDzmCR)TG)pdf`oWeWD>1-`{UbKXF zuB7~*dOnMwqXq0F{9d z1^YsEJO?TOgZ+&`oFN*p|J9st>kHGVc;;5lm|GIRY(3z9MPkQzpf;b#K;<{eZVLVr zM1oORfY`O-IH!^3pBU^+o0dCU>KNNihmrrTo_x&VpIUOeN^uM_I{1h5Fr1g)87 zdcpbNkKJ_|^$}VlBiJe%esrg9YOg`H3=!FIP>AFRT34a%)#hfQ>ccK^>;%!Qg-;7^ z=8J4i2gjx;wkF8s;VPNvg9RBO)nA}xNGEjhA(ERNuCqtGIrb;3#8~;w%u6MM9XQBR zr&g&_&hF^(cTGP8nyCeCP}bXI_UqvsyUMi5Fo9lPG87VZv4uaY0Z9SQ@W5N^Fm29Z zQZ0W+ar4Y_V7Hc%^r_n`ePIl>6`TrI{0KB?uvBr;z&;$w8OIKXepK(Oj}7CqI-E}p z19Aw~o-Bb;xW(|GKaKt5au9Y?++@!-=S~S+#M0)eZi06?6W=|4lr=hN9sy~4(PFdf zoH2GZnU$LE0UkO_d1Bsg@mgt#9uo$JwB0&rLo#fQG-zE3kmHNT~a-Li?<^|9VVg)W+8h}H2$n$rdP9*#DTDg-sGt& znO{t%+?vJw*eHM=Trbp95F~s{5C~(VutS1gtH+{D=JF+VAPR#AF{wKw<79F~D?Akt zTF3+0^+)MU*{tSMm4l)%;-?%!xl84;^y|55xQWi23*EPX@6RDU2657!WIXTZ-xsNJ zyHUe|8&C@DWjxnfCU}=V5YdteuUMn9Ck3;beJP+NkJ{s9cX7@Br684<8ke8kTsFa5Ue zCwoC%7k>uLJx*gIZJxaXObVRshLOO22fdn3mjNcaivh(x4k&d4?7m*t@?^o*(HFAV}N5@^>Fxg_j5R5qp1?0@7mOG?UqpKU+aK>5{h94){+AnpN zjwbPbG*q-yBI1j9FB>ZQ5EiQz-43JPA+c=?{PZLkhPg?{?GC+y=T#G$z@oWtS5QsMJki6hl9VXN2CYR?_1-7vc=m71~ZWjFTY)?$4BaH6~^fkgPPa@lt1^lW>4kSU7^s2 zcX%;%=_t+8KE)Zcb#?A`q`f}Ba8a4I{1K$^7 z?8jSfzNnpDZ*qDlKnK;*V|BK3jdP&7B+@{&K!obNT47JtI0HTKIxTVt)(Ot!b{)WJuAsec!LTTp1I9wCnNcg&2`>KgK&y=wjyzeU zffqSN`<2h?{u>xB$j{>VnDQ2ZCP14?z>WaizPQIp z1!aNSIwfBK1o;FQa9p;0DSO3_+l_x1_}*%Gp-?ul=LMea-fk=7aZ2#C#(vZe<=JDX zvpp~VU7a0ANrI7Jso$^Dv6p?M7-Kc!x`6Utu*f0@b^f4ZE59;ugQF!j3Iu4=NN1!d zPvrr`qCA*mepPv^L`xGc7}U9Ohp(y?N+ib@VXif$|Em|LP$=;ka&C3a#wiO1l^(BF zBG{C32F2xqM#=yyYoTsjvwGSB+!K(kED*7p3U|$(wcYxknf3o#1R{&aR=6*^|Nq=y z&41#sC=bgZV>X@ZEt)&B=x1=FY4u-OcPj@Js#DZzIX8;xmI7hTEOWuj%0Q!XaHj^j z!TR#B1s}{&3Gl$4Y0E8^lV6P@jbFLNW^rxFlf3Z;-yi3Mwm;xvU3}=qa z@w|aJNT%|ALAbf1rJ;&f=FP$2G&3$jG5C&R_k=$`IFj+0SDMtK-t|*fQQ8tI-Vn73}`6q zA)-D@w%a5xIYF$4^HmxyAHCHkvH2=z459Dn&NnFb{i2tS(2V^J=S^NCSJxIuX6*<1&cbr9vw=yy*r(%*lz(c-!8FJ zVIx2=(f^QO@c6j`7{_4}rcS3s$rm6K1aNAQY&VCL8qj$2R2uB;4xANk2EDk*V#7c^ z!(pPEQ*>jAQqM)o5$R4L@o2&HG~PAIgOD@@7)e|Nfn(RS5umk~C>{`x{)_O%Dj!6bSc|W630L;;Bq2M3j%9W;D3>TS6OS|TO6_oahyL2+} znp`eyBBGo=s`PPNxDoolZ`}9nqD`+RNkpWuBgSfuJd}!@4LX$XHf^$<+$W zP0z5s@FtlZt#`XwUk7`-104+mwuK`6UUsG8+$8(yXw6h-RK~{HY~2|l8asS2H_L7Y zh_HtV*?mtjyQ3|fJ;rADQhGa%Ytj3riwWR`pEEyPL_%^hOyWk9Evj-XQ*HGJ8XbD! zA8fV2&;VD%NjYMRZDg9*6*P8m|%bu>w0!paioxFWS2N?cn+`)1r^;786FUnPKWuY(pK=VNHUwNR%TQ|575^ zkga-oOAj#C4^E=^yKJE4D2IJlf=2uYNlG{Lx!qLB5ymuzQ%<5fOE=NtMzaq3?3^zs z8%;DpLsII{%T-{EH^8Mp&~BA1@)B8fm+D?E1YVd}_RkuXeg;FV$D3v64PLv;anrwa zdGTDqST=}cv4V#CMM}tH|Ih(Ro`$j+tJ2!-J{6)!r9nLq>W@Iq{w|g4?>uN}v^h+w zN99UK?Y)XyEHb?`u-JdE(DsQPjYQqWJxUoU= z9p2cD2ei|HpH9}CsXVC)@;(2`!zm|g?8j}Kelqz=q1T*WdS1aBjEAN@$?i+SSu$H` z@C0ZlLiRxT|I?$|^M3v&n0`VrPAHKdwUOh&R_9?<>G)Yn!F#{WRQ5Hz1Q$SP*lNR8 zTbFN8C{k(D)IUY@lcj*eU_o#4DyUD?rBmdyp59r8@WU=pvK6JmtEWLLQnOB_v%K&| zNbCyEIZ^D6@az#bWv}_6)i#)Je3Hhl_c=CKt$N9&gINeg zaAc>Y{$se0Lv*ohOf|`KcyD)K63-DZ+&~f(R5=;^VzR=1Smjw$u)2ySxw;bQa;ufy z1*FvZ-rCZS_1;X%=PHAO3yiL#DBd`T;M24V>f0%%h@;-=fKP;!O7wN~jMc3o5ddhE zN56d9f->wljZ(hW;2(-9uBLb+ImOC<{&#`+gn?3k z1ybesDLh-sgMLzKRdaK>CKBqrJK39q#9ncJehb}d%uy-cY|pshr>ub<8zMq$ndC7)RlPz5G@|75=OgX7LVAg8Cd0?L#m;6q)R z+A6t`Wt4nGIVi9zV zrCJbx#h#|I+a=Dp$Rg25_X9HMFo8=Ov^y~>6xjDg zm|Gk|ns_Mmk~46Sr{;I@;3d3quLI?vKtz-5Kvve%2r$#CLD@3W_A(lZa?owCK3ept ze^-Y!)V|i{rU1_82|lKqRV$h*9ebF7el!6?7fs^#OcQ?Eh3)JuvjvPPs=u$nY7WOv z(oF3K9d3H6?G@zn77d7-wgICJ5|ERl0k&wZP)nx()f7iMY+o7r0==enNH$=?z6NA% zKDleOXDu&<)Ez3-!~2OG)Z%ujQDp5Bpbs8xbw}r^_Q7Rh&Kkj@n%@|u2_-@Dlk+fr z((h|-Jb`D^Dx7hiuw(U$8y_sOUE9zdYgAM9amK-pm;6d95&d#b?YH{Uc%oSr=%YD2 zR_KOK1Q_!2`!)S{RH=)+(7(_fTdHTjp*j`SXd&k4)q~gtRY>dRQk;k2#L-s z;b3PKS1o+4FHQb|$+(JR>nuxsuIi=TE<3WDGv4AU)!;$95#|xZW~*!ft^sMU?&?cJ zu+_ZNF{TiOuBO|Jz7+D{OsUkf4P{l@wW?Ed*?}I%KC9a$d|cb^1@ly6X@xSH(uXuR zx)IDn>SFsbyBm7lzD=i`CqKDSHr4H{bZrN3E5?4TOqm@n`q3RudmZ|ZfbmjuY#P7Y z1_Hv)>an!D!PSnG*uW4FrV3eytcsUJ%OmKmO9zuj?0~NFlv8b9AEVJN4g?6BFL-&C z7T;R>3&2OvEZ=uI_G{fKX*&+bXq@q5%7)PH!F2qq;rv9d7e6!ThI=VqxQpdnC>TFy z_OB)FsSY>p)5!F*KT!qwuMhIlojZs>vU?>!P1|s@(?n+w$kn$&rNX17MiyMFu;b9z zzv!Yo%IJ5xoBd2?msTl7z^>)LmfY-50{g)*p8Z;~wQZb@qqaK7_HMJgLmbv6whbB~ zK%aY5<3n%!E zf_3pAiGXtdbS3X8r;6^^a|&^Zj;a|8L;s<4s+S(sA_aU|%&%TRDm zsfFQnrWZf1`SF3|Qh_Rst~1%gB8@QYG-cd%(Sii>1;Ws8pfVt6=!BVild_$wnM7sG zP7x`ci@j#q)$ChF+29qCP!HbXOV$1-wbd3b<=MN6tpO^1X^^)ySW*J$uTj~fM?`jz z!7)f+>$q3*!V1P@3U6sIs9uz-j%L*JM@9PwqN?Dr7;)CCQD!`a=#w9aF0~v5fQShI zfSA9^>$Quuc z&O*K#pRV<1L1DG)$W$sfq`JsyoR4@38p}$SlEx_j+R3XnRR?4*0 z*;HwfFF-*4OGzKkI}7-3T5@Yk>_~DREyoG3v}06Vk^Nob*emUpHdpn+Mu&5*K>k*< zmY`k7Qyjj{QHXVA=zJ?!?pK%KB;Meu|f=2rh+_p^CDWWegl$P&3ssP<*)58K(MLC}TI308NU2=?i= zGeWHA7f`qWqSZw~-gb~n2Ux(UoU+f0gGGrHVt+7OE9*I}{nq1x%-gEz`$ zz%;(7-PA5p>DWs@k&Q3Ah&d;xY0jD+2QT`m;cPDF4Y8|ewB~1@K%@V`b|GLC`8Mu% z>8lU*`m(}gUIN-0y{nj%G->yCc){ZxU|tm4JPi8!XoqzV=ltje3nKd`)h)(!u-xMH zlSI15Ft;yng5`&!HHfLDJV-@*F0$C4+jwoW@2nA&Q?;=fBhS(8p9LqJ&eeZ26h36^?5tm#dXmKdAHlnPrsmmbI(PWcxL% zCzMI^|3}IAf7z#JNbv9X>4}nmBCRZs`Q2vHW`&gQPEMyRXhOC35XguCetHW)p)!iN zY6pNKR~BV$)xjAERZ+-`*7XC3_VHi@Wh81cKf2tCr@5H6;-P}?%SrB9Q9ts(fWh@A zwAqigi0>iS6%2B30{8(^*d0&gd!mvHJGA~Q-NFHJ%q;zxz<8~-fVj3%nym%DiIz==tP$>~*sWOm91a?zGiX&+m z)}44-L^)`{EogzZsm51fvH*>G(LA9}*}0^Fj*{F6);SW&B$#eme0iGY2Y-U$%~Z_F zYJL>DDoP*08&%OM&k2dAWKMG+XZ#IltL2g9oyWkDpFE(-A&g;C_ zME~q7Q|-tP+WPY1ErpgJbCkFOYYE^@Ein`sEk#`mC^lbhIeA8Cgk@?WxO(vg^D+R? zVkiaMx)aRsGi-tLB(N*;t`7}e@bO1!Hz&K)RQ1KINXfEc!Q_OtRB-z zeq%2kircJL%^EDWn>}Z-^?0enfa&s3brV|0VRHH+g|4J@J3utnsNqu|u?Ya3-r*)- zAnVk6Fo`dd++pGps(P?pHr`T79=QQ8wt;8&b#lh@7N%vn9E3g)qUlHb(ymNbnfSgs zx&{#8VUZLAFoc`l*XhMafFV4J*@C~bs_wkQ>o?JAH*JJyA0e@iDjfR>v$|;~vkL)y z@08f}6F2vJo1va++1DW5M@r6xsvjt_r5z1PzDQ;FcXQ-) zFGJpxsQ8HJXD?c|`iSUeBSiKs(j9k0Rov&e*%*x+LU?Qq+_sU$-8y^3<=Fd*r5+*e znI-(RsE1Gx)zsH+^851S35}APFyAcgH;$XE9z^csy-f6oM#>2f1RnEAUn$Ef9<8{& z9SX0YG=6Pa#Yu;YQjma)cZ^FQA{drd2Gi}qL?1YowhC$<7o64n_a3b%`X|mRf}n-7Xjx%EPNmTqVAi;hpE9epiF6pWHj##4O)qyB z92=LFB7jduXSCT3Rd7f*?T9N*;=7AUd=DkL=R_Q5a;2T_!GA;>`$LMXRiWYr@&a~` zwtj$kgy2xgQohk_Zso}3>&F{tE9a1!0?K@WrME+2G%bGICFG}`FF13BdR#hw%6%Oo zdL!6EOb%x{M_b(3=VMOZFP7UNI_C?cBZ?uOUoO%s7g$k(^y~bdLB>y;$PWpc(Pym+|$y%TvK%HtlR0MKcBQODThL-D%igWN- zw2r$>{b{yNpv9D+Z(j6( z=m)pa`epQGgpP3zrAqCqh_**SGzsi>@fNJ$}akF<7FMU?1QzemSg<2%ZZjPoP zRqIL1l^IYz(|pX~QJMih_?*xU!LsKI>^<58`?)gzCwxvQ&q=XWQf> z40VIzp464uO_ZcY8$UOSW~bzINFb3`iEcEbjBQ7cX@of;T#X`J$g>+|UjLd>roCyD z)I?=7K9OlTF8bm@v5aa)fUZ^D8Wkpcm}U6|SXw{d0;CGNOr;eJU=A2-$}tVeRPPH- zyGAfxDD%?BwNnEv*e_RVtQGJkZg!*&fb0iT6wfxDo=fm#UX0c>Ny==Y4a zP(7K-m}qa~3RvJ9WzM+IA<+&_5gTbj>mc_7B&Ee(6^XLpnT%7`bdVG46WPY`5fZuN zOL-azaEX@toYG0D&I68|robL^%wo1W0^@ME?!`CB#>E!Zeo*h0T=I~T2OosPtYsgV zZt@M8U8M4Cchl6q!x~9?luqR$*Z~Gr)GhY~N>!+sve1nz%VWdlce~h46nh(`naYaF z*TZJmY^yu(ngp6n`37S=T7Z8a1KhiSD)1{;D%2#%>`t8{rcjG%d)kqjBa{U@?iVYC zFVZQ^nwN}kH)I1{R7u+Jm^H3PT>W2I}YxKL#qGY?g0~V>>H^ZPVIR$ToODa(xZT&-sCl)+u+&?dt0F}0tFV5A)@`YHZR&{7z@kN8S`+) zPrCeQ2~N39qHUE`;PwE>9-Pk)hNCZbZ^Qcr@gcew?jy6T-BQPbeI25+6Wo@EZZjv( zH2TBl<%2nPh{v%LT}wH=1Md9AWE5BP5>zw2%_kddLKtIDxu!y%GGTMtqa#$43} z(|2acemvZ^lz-Ie_rvkjb*gc3JNa-U@`dA`*R=8MKu9yyf*5Htg&^yES`!Qm8{Z(a zse>`6p0i*Em2Y>^UEUeu6-P?!VT=t>a(rn!%^3C#D0x~v@{2Ancub>((#p8@8DTRC zmJDt$)9?Vnp%Rl(B8z)Cu3l$pbE+jLIb4%kJ3!L_a2_O&-oyWpz@h;i>af@Gak@Zp?6oi~>? zw_2sQsElg4*6^sZIBHkxsB83^)l@3=mJ*=R!o>-+aPetM>gwFsN}?bA%>Pjp^(0NzU88C9!X5}YAQeT^wv-yG z4CpnfS_3(n0vJGIY6PtsX|S29N6dn1nMIURrBV+oI1kgSKX1~K+eLgMjmSus{wR8Q zl+qGLa?8x3^luE==$tGa?5h`2^pB!(R5%1+A|*7JzXeSmksBF^5ra2E1c-bKcp_~D zHOW8MTCNxiejW>EcRXJxV|&r5qF+$Srz8>FY?KJPL*{oYEdW^sG!U%fyySVIKPaYB z`HE~Rzh@Tm(?C9ggci(ihXOZbuqx5ZuLJyoDd+}{$mCkzFsI(!BQBG_^)z3pGTppaw~!1Ft6){deU+UWanv(Jil9FUEqY@lWNpUT@yc~Bp>7=c@p zQ!Og%( z_E5y%O;o(-kAk0cP(g%0QjHETx=pXgUYU_S5#Z)xYi!U2IC-O7VOso6>I=Fxxf$(EBJy9&1y!dhjz|JNH zYu{ja$xD`BEL2RjQ*Yh5SjuxcglP&-S4OxveTD2yFm} zyF;g9Fzi{??2g`q(Ui&R-@q(&D83|U*Vzm0AaNWmLM2b%!YZ3oR~Uqqy!d2| z3J0kxjJ}}x02L8}-5e{}4~S~ij`3_-PE+->e?UHq*TEdaUrFp>iPO&&%C!sNgdfR_ zVFqo&EB-@rn>hSA@ZEUDtmYnGqWK$Wlpe7jo+CKNiCUlPHU&%qa#qPUXc?Pow$tJR z^^z)1{=LXf?%>$3bgP;#MHhw3Wj|bFaXMg5AP^D{XE5h{OFve_m)63M!Jej8AX~<7 zC_B3rJ)^o1__9VuQWkkopT~WviR$lUzal!a4ud{kapnj_xJ?cBh%tfA?ZKSFRC}G* zDBJbu;}qu=#W|;)ovm8xYi)R6 z{CaE$%+g1O{YOE4ya!FD!!E}Y&J}3M<19R2GCuQ_eYp z*D+fgZ@R@FEkFN=ar$)1za;lDq-(Cg=`)OSiN_GFQKT!0Le$gyHTz^y#ZVMaS?Fft zXWjO6PWvs;{}9nRTlK^9z)`=gyX4&XzGi%@+ZoSmuYqBVZ=*6Bl$>enZuVV?ZIXR$ z?=CkphSSIE6KP6nf5mURw0YSdg?ccvRovnnOuln0=QzzzJ^IFGLGxMaOsL+fl8T#tOqJpEu>yoq{dRX>ke(+vzmSXtiv2Oy1$6*yMTNH7|0_%D zD4?Pp;lVNl*eV&n!(X+gTuj(e<`%}|Hy*>uaXb=2#OKX|N7IsbZAi~d`d}LG_iVvercCyJ%G_%ATbWn5N5D^=%J-#!E2UCc z!UGb3WCmNf-jMc(Si+Bvi_NW)T2Ztdx^XjSjixn-ltj+`UND};w4Mhb6mQ|&q>Oe_ z(JruB@@F&>T6mMu_J~v^X);fHD^g7t%1q*PNIuMWv6hM<0s5z;^EXkTq&vY+CsU=S z&1L3JcLrovP#)J@0apP}jLJevkA4W+u1oThF3GB9M@W5P^C6KI_|}8rqO*z{nFA71 zwX3Zq)8-h^l6ruQ(;gOS5jZo`A5q6~AZmOV!+!Z4lVic}Bol2yEeUHRW6>-XZ+J<{6-xo%JOnAY1c8_FF z<*UU}(m)u&tos@4Y>WAiN}3g^5yeaS()I$;I+XT(AikRlb8aC&Q0YFbma8YDYZNd2 zTxRoyQV(qa*IUfP*o+`1*`00R?NdcEd&Vs?1*kAe_8y9t94Yma$eG$-Xw5X<`~&5y zvJJ`Zv?4MRyyO)av$7^Et(pgFD|S)YEkKs|yFAXTQoShe`(I6yfj%2rS%`p*5ZhTr zvX$MHz7RjM1Le}Y0R?-O=2G%`f>&jCQV9pjX_RQvR5`uR=?KeYmm#1-b^E;CFo>R|PBLs@Vl zyjk|p1ng7KURDW`o<;Ss{%)|L4y^wX962FoxuDilpp7F&H`LP>Kksrp8$?o-R}lH3F*(F)rwW!@yM1lVA) z9|A|@9gYg*j-wSOXN>8@46FsCM7Nm;>_`S8uG9KN)vjPl=9hGM#q+@MCb_NDn&dT4 z6`fmTV}R1E)O*PDAh4%2&iHK^j*Y)5Q;U}^ve?~{U5m)q`&$6q`2c|?q)vMoSY-b; zKfX)iwMmr4)t#>3oNvg^ky1T!bte#L^@$gv6Sh$1r>)Vlo91GRQv3Xza#oW^@v;XS zUb+nmYrKjHR?9w@Jv2FgR*aWADCKTrJKjD$StNg|aT$3d)#Y3xjA&mEaFCNls;EJw zsoVUpU8`TK@x3<~Ob(ED^|D<-8wU!LpHen={0oOh<-5kF@K&A5bGO44^ z?(PfoXN0mUar{i_OH`os^8YZr?B|A`(9BA%!)2mOi#?6YEoY6OQnnH0khep5)ywR@ z6CCHjiXYwvAX&PawMv(pzpa!Uj$G$x*+-@#3tn`vWb~EEW+C}$-zG#xEC`)P7ZLG- zQUmF$yTv|jY#XK{2;WvtTLNYL79rSGpus0NmNu1l?vjWfM5lI|>QnfV;yjz}+zlP9 z*!tT%$hPyCX8ejdZ}GLD8)#_2@`70P<5NUaojKSGXAWjH9W!$=Fy9p3k7f?$oLda` z7ui;~wLwY;d+ARq#-64^#4}0upnM&!JDhO57oTfMj$h1PtR3G;6J4{vHDRJ8FrZY_JjzI4AFxg!? z#-p2R3|5PiK^dESm}PGI3#zRR2RFfC{;hAiWw)Mf_eH@lv0PE}bJ{so zyUllA>tTPu2T~Qa&78J(8|9Qa)09>{RJGmj3)6K1`)gS&*sf(4sg0~T*kM-z*{&?h znNT&9Y_N_$>&1Ht3Ef{u0ygYXb$T`9>~hOg1$HC*1Um03#hBYc`tCOlUgh<6KXDyS zQ>dJt{8|sNR}>U;I(8I!Sg=!9?=#CwWVCK&39Kkzeo(aWnO3%FzDP~*(9tSO~*Ehy{L(>{$G*Z%J!4|(z5{aSwbZ?m?- z?4Qsp3d>URv_WmTv5!&Kk{?%w7~7&t3F7wC?tMFwxyFV3v@!;`nHxjGU#4?y<3Q4#he{MDXUgDz#Tyw%;&dx{Snb2MHmSHny?jD zz~Z8*mb^iI8Nn2b29YG5@ZC&=_D*#b;h2_KF!3zQe&QCAG zHPJm(U@DsmmHAiMs|FVe15rpx{>42~Z<;~wO>;D0gxSC9X!u%$dlHDupToM$(YbYRne0=B_SQPbctqX;Ujt*}vfE3!w^k@nSqb1q^Bl=khY;LD|R3IKdfo$o6(7fcdPN(c8V1A%td@-d8`|&ZlrJtcW^Mx`WFdR;4Af#we0>s($iUqZC0rww6!AUFm{$$q#Ojt!h46!xdkv zve8<3hH@e%aX+Bl8@#?vw#N$UeSpr70Ax;8>hVoRxsqTxXFsB`eJgyO3Y5LUD<@Fd zn72S(+RMco^%c1-A*>l^!p$gC$$r!Zn)t zQ#Sh&=rq_sDVi!*0i8Ki^5RvkotSD{BX%|nEkAvUNabXI80)z(IYQfrnaflM{Am$x>Ne`?$F)r ze1SdG&9i*~ZZ5Xy(96#k%KUM_gW;kVRq9@Gxa6mMw^`L7K z4(Yy>yl7F$5#y@C7;SwMXXAK~ z0}@g07jPlaUGYPUbx54?bO#k$&sZ1wlrnN?y=*!BRHr0<-1VYAfpi`wncA@7bnJ&v zIDpP(S=@`3JgpOWC=j!nd_v5XYByf3uuleA5IY?2C$rgro!JjnNa~Gr1_b>WFgs{v zQ{6(+F;Ad4728m?RZ48hO;%Qv?Q!gRf^r!&M}D_QX%dtXzf^Jan(l==+Z0`{h8NS% z=Lu!y*cUBZIghGR;$z!sxxgisJ)T!7u1>X09mu37ItNRdILIxQKx1uC*pJ(6dA$m- z6{pQPojXdKE@%~VnEyevk-~pvs{E95j51Z8$~UZ8J>`loN=FwVu(F0$=PhfNPbi~~ zWh}8=xJae6S_>D;;t}yt&)hiSoK{*`Yf&RrhS>Oj-)Ts@^7uV{~;!)To~W&4*^^po&Ju}&L|pl`BlbF1Qe zIFsuwC~C4Jf2^S0j%c6Uu*}71Ep*?p{Gd)t2Ei&;P-t=!S8l}3eoM9JE(eASmviiw zQmHaOrV4;!Uq;~ZIMGkZn;yJ|bK-Bq()}GGt=MI{B2^)S4y;H0LRqtK3e}HdlqyY? zFM`QRBcl~{O`$(do)L_)=w!aPOc}&wK00~8+IrwpMO;4FRvbblB?>aa2wooY?M?$Vm5J7Mwjlf^xg zCO_>qoe2i5^dqlBm|AW^t7blv{al5TnWnf2c{QmZzCr~{wa08X*-c?b=w;-mTtE>D zW&SYbV8i4-uewQWQ6+LegPtl==W!gb6a4sg$vW}3s&J~+2=$lIssZTad{+fMP30sP@_zaTU+x{UkXLB~2Vi4%v|g_FFxB0u zP|y~RFfJ2-7s4y?@*y-L-S<$*Ax~aY7)Zk4Cgz+VQkdd(UVTWzNjDo_ahJ1W7w4tQ z*d%;NqkVoT$xVF+_YD_AjV@MZ95~(xg2Al9=|AGS#E;V$DZI=N`Z{s=W=kM2A~wDXXo1VD{0kV20Ny?7SL8x?A_a z0(UINx3niTx0uxK0pYXnFz^!s84a6_6nUl_W;NC1v2k%XRal`N!~l<~X$1(fcovZ7 zCo=i=x+zEUY>kaqIpb8(sY+Vd;TDv0@beN_o7Er^@D^2xEo?6nVI6)`h=GEiTRcKdX=EkiMGpg!0nRX=oq+(X1tCg~aGO0NIb)Eut zA#K2UlgthvrMOnG9~8AGp#nE5>?eab_UpQ>MRs}k8$v#x0fbKFozdOw?*_-ZFo+pt zTLJK$31AJBOtBAe4>YW|Ipc@5uiU^-YJPaC!)YH5CRcI#qUwz4_5yIn8`~`Pdze`9 z{0_F)V9rq6sQ$$Yj!JfyMaXtjEEVS_14F&SGKd^0KQqQA6lAAW{Lsk?NCO>vfh3bE z)ut+L^nkJ4QS#bs=#q0 zO}`2}W&tgkrz({c+M^xpVoF^Hnw!MaF4lWSA-^k8yBS<>i2uPRq6-^Ib2%W|Q@rse z-%Ix)-z5M>Bc}9+h@yOihvEr5W#;$M);WH>oQj(UWq$GNwDUt5B575b$4iilgz9OH zj?-18zwJ$`wy+N*c`8qhZ~S3~cG;=(q5egNUtT#V&6l0Go(im$G3&*Cz~rGTmx^xRgBb^X zW*ekkkWL?|A>H9E;|>kP;m%a>rN$+1HB5_s7R$SyolFcve_h{{Z@J3@)j1!};Jc#6 zmV4@SdcqlEI-BR`C>@~QoK0*Iw&W5rRw234xI2kcU*IEdCvcdhuj<7rS$*WPd;Qj{KF1fl@)9ARP1QZE%0q>6(MUDZ zKjpF^tBy8GZ!TZtBA}>D>Ke4@@c4+6b`QlVdRa7`HnFpKoMxLh6ODl7&NyO{RMn>i zTS5KtJy7gL#FAUpO9*LlAvYcv(s~W5eQgx<#6IFp>TSVWZDd7YOj+pb3EFnAKqsVH zo6)mXn?cu+?LgDHS?3l{n1pnuO{lW4(cScQ;0@w05)k=pPTw=_9;}7tOBauvNiuoq z6ycVnd=mp@=Sja^{g~|tXeot}jRVxIB+f2!r`J5HeGqx3LRzwm2)&mzZT9|s-XW^w zh25=_Ho=QU1GA_B*6+Qak#Rr^UqBKI)-1-BLlfBU&`*wAF&b!!cceeZfukX@ zQ_Vnv`@PW3#>}N!NQvDY(_by2(a(VHQpSl(2iaG%1Q zf)N5neVRm~GIzf8KGleFZE;LtMk3@?r|iSs@@8?H2TSc0%e9Y^xMvb7BJ_mM*@su0 zC|rmbQllkXDFP*8o^r=n-BP1<;d*>SFgrEBVUy2%M?%l1){L4-(6zz(=}?x;;JjL^ z>ggMvox(F0EMr3->$`;Ole!pJX^C_wE+c(n!uj4|4ssf(!W6k`5P19T!KDZ+iqoOZ5SIqDJ| zi5K@wo-GCDr1NrUwXA$#Q@f*W=&&z4Y5Ib4lP13lU)}a++a1Q@F+4U@4UIDik-f+> zcuk%rdVa0dVm8RljJIlVK2;ZY6v$syoLjM|>J)A0?aaT%7_@W4Na9hf2Hw?H`yQ## zhDt3X+2&pxYNPiADE3H`LJxCQ=jom{xl zH)i0nZD{tkAc#7&Zx{F}fMErauKrBc&Jd1e!Q?RV5w_k?A8GkACrCQg4bj^#A%^Ez z1Grp2Gp$6S+w-{#8-zPh*$nBn(KY7P8nDJoBpAc^R`gsd)BGP+K1C~zx!X$li!4Voz9dpTPd z=j11YV|L49n&I_g))9{eD~XdgOD@+ z+HquB4mzrp=KT4xsMT(-T6mS~28L=j@8g3-Ee8;;c6jjat9Kjq!K3%e1hSoGf@m-4 zMz_BEa#3QN^V17;!_BuG;=3eT8Pd`zjdcu3(i?(i_AQF%i_fy+tPlFi>A6$XXI_8E zWi-f+JQ7u_V#FVV>aU>CPR2&Z9?txxP;hX_!Dy{`~j4fb>=KF`Sr_qKYwb&8a z)Tj(BNTPcMp3PjP_d}ft5fRP3KZr{;fO?A|kd=$$rq6PgjBd*_kf|dBSBdK7+|yg8 zozPAnJdUJWL~BnbICa#9yR~+``>644M!TvpW;1H1LIdIEDZtP5hQ8gIDr z{n-OEg!(pI6;F3h1dTT{7qFzgL&Z?eaJMSp;eDc1X{RQSO~$c-O0}+?BgTQl=*`+a z@?3~jBM+J?b*3!oX$af3aN?)F`6Yb1hWf+?%vxM{&)dNx66Sn!wt~od4YZih*}zJ4 zBnp0n(%fb)&uRnd9da6}H9|=dsd9zp58@5i}K-i9ew2SKmeASRJ3pdPJW_8)o5z~-5_gD)M#;eNv8=Gq=6Sqc(7a!2kKxhL0J#p_ zOebDTC&7o4p-<;>GF5IHX%}iT@}#A25)xOcj`UF61AF<4tmWkq74$vX_{1yO)jluz zxVJ6NiAGSZu<#&zzdj`(v=~uKNVSCDnySULsZmWngC0SCPX6*ZtHz^A&%O&$jY-$T zt9pcD$vepxhr`RAveG9`3r)WwkSnig)j%)Ikih@wUh%~1IZdx2s_g1g1%=lqA|9Ruszz!%w>s*`PWwvwn4#s6EZfEh5%uoZ z8e#rjMlT~oT<`|`#x+xncW7$x%X=H?&Yw!jKe?V!i*K#sJfKF_CS|#XH|&olWYvsR z5vY^A4&K*s95#KE8h%EoG8?$*@AQskC!&$Yi2VTYKz1(*TEU1mz1;t1@zI)86Y; zJTItS!^@59&)Cp(Aw7&AiKU9vWVd;ZN2=^zI&MX3nmbRPqsH1_a%a}`anGuflvZX&3dW58an)C3hCI+0t9^Iz#m;RiuXK&-g-hz}+l8+1lPn6M?kE z(7USj8fpf@d3_XQLzvb@YbQIwE~&=3PZ#Fu(mx1}%}U~)XA5Lo4n!gyb^V0M2R>L6 z^D5&}911qEjd?}?eJXcTRj+rq@f8IkW%OHe-p1Mr%AmZvWvzcp2uO6d}u5!^NKqsxA!8EB+e%EVQyx^di{sw9ctPr5~>snf`h zxP2@^E;GoR1cIzD3vE7?uKad+;;z^03|L{R&-o6Dv%09xn(@(TjCi6HlR1t>a{ku1 zHyF!Lzk?5;HLjvsD$p$I|AD&9Yjn$6QOlNOnFS zj3=0tqgNjug=dZA-PZ(%u3{kK?{BMw_4KN`r#)2m@{%T)$9-YG#`GbXg{`c+KZGE% z-4eYv0YeP)dG{M)$$8*odSbe1CvBqKs3-ViQj5AkUJ%@EnboZhex*lws=Rt)C69Dh z+L~Mv@frj)F-8Y?AfI-zljC>V;y4yNa380UOs}I!(iB9-`wOpBJLTH8S0uAUo}u-s zL-2IeyH$orPj$^Dc=|uRHFZm04eufW(eMWY=ES*%yI)|ii=|;wClt)y1Yreds^6sw zGKi?wnBVH84WP);55#o~*nH%@i4Jk+a|>NWYotpNTxq=NBK`7%!bwbGQ>Vn{fUSwQ z9%h{qS(m*KW$zqoBItD_&}y0^wC$1FtE?Mqvip;{=+AIwjWBbOAzx&-KUZ#qCH_>_ z%znY5#qP^wsMd$wCa%_(3=3@^?(2oU!$2)23X{DkIYr{VN+|!~>1fnhP>5EWN?veO zclOJY3evJAdcDz^D=q=Gpt}S$P0iS!k)NRe-+sVuDU%5fV`MJST2|pR$Iv(u1(4Nm=y04Cd+G zW^eT)nxI|X`h?R9&qL>tO;-D@FD>6BG;(aWuuH&EjU5NaoPA2CKN;=O#fmC0klnz9*O5VA`|K3I^O+HrM6{i=Iy5!J}oz8*y z@{d8VE`mBFb!8OP6Ce->6C|c?q-Zvx>0$-^X9F}T5Dm!05Mjt|bk!JUVGG>A0LBaE z1@my*+L^$exOupN=PZnYtNquwzP`5qEZmhFX6g(!HF4E~Vu4VQ@&>i{nzJho2nlro z1q9lED1DrC3cRElmk<|iX-RlF3>FbL9TgiF4q1-UxjY>k9j4b%UtjNP*Ua1(#L*{l zWst0kBYq2xjS?;_i*o9e`t!v|La~{GKu!#~w~Lf<*Hm?8i{>hHLLM`qG>td6WAHOS zHt_b1+RTK2_6CU!3<(4h0tUH%@z;awaRzzS(EYdLj;_AzjtK7f+&Bt;z9p;D(N=sBY{AbK;u4Q?`GC^Muyg0{9q@5k}a=+!M9u;R8uI*zvCaP>vY?;8k?Q=BQaxt z!-YwaC-us(lo0jOfV-n^7T~T`cmH|MiL1eqz@5w-o=ATXqrPoRyJD9m*M%Mo#c?>@ z$gF-#-)vCmc9*bv(^j#TNFhJ>N|WQ~k7u(V`m%Guv9qtd<(;Gvb7r0hdqL&=iL{51 zvEX(kqBof-kj(scsgH1$ynf$9mHrt6cexfLqslre2cGDfNYXnu&98_EN5W!^l&6^wJI$8ZDeu3vVcUq*H}>7M{%L9NQFYSs)jj7P0A-{ z6?k`E)0THA@R)dK4{x6fXVzs5m-ECK+wce$VST2M3)%KLxs>M$(n zfvzOKb#=Ia?05Ye%EMh#ALu&xpSu29n~oxIxDwDF5SaOQ0UjVH0rUJa+kaU4umsZq z2~i+Nk5pg(MC!}|so0}R{anibL{|8KEGqI5NBFbs_nHUT_v1f>-`5#y&%PTbnk5Ke zcm+U{`K}HEmHD_FLw&#Qa-G>PiT}p&>q&+0JI7u|nWFxI75*&1L=On}{h9F34*16a z|KWh{s8_wlK(=@QP42t?o&(OI{n5bx(*NoLo^LMrN0P5DI6SqAe<#^<0W10+ksurm zZJn&`%)Tc3FXqIb%jzD%J(R(ta{s*cKRSf-^E!!7fKSSRY9)2R z5+8Ao31Mn$b5v94v*B|z0Th9NISx`d;2(?PD%jW*epCv-&{3hi9fA(f4pI~l9E-vk zZ0mAlTL8<{5Mh8o>~9qIgvX{ZIjSv2XKw8-0eeL#5CsQq0sLO(ae{NtBTh%97@L$V z^8jK33Ph*!f#~!nJr;#COvTw4ZVMOShw*?-j_w>!J|qMqAgx1r7$|`JzXUq6pE@OP z!`T4~{k@WbxsC8rU-+3W!%axnl|qO4%t9H93kj}$D;TivOGY@A}qd6`bU^%+%vcD__BdO4(5k}{DCraoN4A; z4t)hWa)E;j`J%}I(V_~pgGn81^xpy58(uqVl^G=?>eB=kK!otaKo^0f^?zj{d%GjY zkf1%-Yg88mio16hh}-JF12VBQKB|SVxNV!PVnLv{J%>{(1jzg2SO_?#1kSyHP2LeA z`QY*Bw_}sNRT~0^bX*GHu=L+3e!bO{JuoNf8XSY-=hI98e#4xVfNYYd`5XLa1`S34<^ptbWY*Mx2ZM~#-qQLj zxqBjlfkPcbf`R>=srbIDe}Dl@R`Sb!!K6pyxm7@F8<`FmZCVa&&Vg5yv6311{iPe@k0!XkP3yi3b140@H}EH6{BPZVRb>2+3QXoEZvXTzP2azfPpjsn z>!{=;8V&UI^;wmGhF_`(scx%ynOV7)S}0oDDSp&9F|?HPunRK_u}g;XY8TcpvNKb% z^S1m7$4*GpEwC-NEVjIEDru)^pjcw$SzKG=EMHML;6o^%^%)uxV=`71 z4hHPM_vfFc{@3>Df4BKhe_ERXUH>13<-c_N0l5FF=I`NQWMg7uV)_56691o6{@<1U zpCR7v>>K|1XJ#?Ofq{|zv(o>tn&qDcyhl?$enSIe+AK-oQ1nDWGmXT2NZxEMG8a~i z4t+E+aF;cA4Fev){rF%uvhw)L&@ySQK2R*N5>;ggf?^1!LLRGJEC^cWT&K}kG~M8y zx*2!escuigd7u!bLR&87m;cePoIAgx9K?VQ<$NO0zwl`Kohh<$r;fBigDC%j1iTU2k zTv>R5INW{nWj^B_8uK?K?h54|&I_r02MA*N&4I&M4{1BNS09gjrM<2iZkFcH_i)SR z2D?~wtxLgzCUt9y#@e4LM4G@`uNZxOzf!gGj4X6VZM6M2U0*c7PPrj*K z5-wuT$EyJV$Q6hu8|7iPJAG}n?`eCd>U49iLO}Ll4qAV#W7hA=m5qCx5+;tJgViI< zLU;6*+5Oh_Lrb+qjTb?Em{aO=oiu3&F-`FoW9SErp{yfBB$}7ARyy2LYog*=Dp#7j zPFdQi!#G;aZ)fLcv@xq$*h!+h%D-iHVWp;D#Jll}##LMbW5L+FiF> zG*z#HL=+0>Wldsw2M(-byp1`h+)Vb<#&7%L1{fDQ!R8%d!r=>D>#ork)?R+O^ON`V zTxJT{mH;qWv=9^=&XfG*)=F~X5%uD*AJU&hP_6}!asAsLmEhd4g^GTe?8IZ#C+UZ0 z{03k>!!TR5tiwQAN?}|1YS)el^c*9kQg8B9-bj)nckG)Hg>)K#jg`wDneMUNMg&v! zv8(^c%p-d&QVEA%h^_CDxIo`rqhr?cGAa788IgK=3_OHE4-C>&O*+%8ppq3+cQ}-f z#E2?VqY5y2JdxdvAQmgy6Itma6qX3Q9^>9oliylf^7x^-^w`{($9$E9%0HhWQs{J$ z?%1r^rFC=WAJzY&6REPR)oW8FJ>*o<+-w_TPD}O7;O;{6DJP7F)a&L5mgLzpY=6FH zA$3&omM^FPdK==sjXXsn+7&;3!0+6N_xB}OZc-|GXm~;pMQvIVmSOhQo7#M|?Hyr` ze&WxFQb5S=*`1WtZ%ivJNp2;y&vcaJF31otC{^ljN9o^Vp_bqB8YHOgO&;|4Mh3pl zmg~`qavffCNQTlI7AB*9>1q+e8@w{iw7c2`ipkeT4z64dEhK%?;o4i(Fs~U1!(HIB zg?~iA!ZNtHu>qcC*n_t+tX`My{k{Z&KyF+m+jR#YkpG<^s#HtfZvJ==%0J$N?w`HK z{{wCO@gjDc5;)TikBL*k^Tl*%Xb_~@Dk=;5Y%(Bx5JiC=depCVa!*&mQ4z6^ znKqwxVE8KSXc#TjyZuGHx)970hQ$sR_43Z(RP`9l^or!NVe)}{Z*Tjm+ldIwWTp*f zXgPz&Wf`h+s7vsj5w?S>rG2u0BP+C-5$$Jb`D-EG{g*b#Y_)v|cPG(9O2ck^d^9~< z1?=iDJeNcU@UJUA-q{lZzD-4L?qZTBKb<{0FuU09{j;-wT+w7Xa>bB9R}?cKOp>*{ zbA35&iX*e3yC25T#ltPOuf0DPm2Rj6)*Y6xxX6Q7PNH{BP5G^*`qteLZ0VFOh&l{HASrd6 zZ3e1R#eXca2oL_&`CKyonq{_HV8~mO50f6Y0AF(u&hux+!DOR%omG4Mef^I(8mIpF$C<5oa(JzqSl5>-)6VL?`Ezm=o@jJ%O~ zEBpD>sJ=;fMCs;n_w8e98cphJq+!r|x_FL+eB;DNX!Gih{@X?f-h_)522r56tUe9Z zdoRyg*Kn~gHgz@&>fAs=sdLDsb+Vl2JT#H?Gm$LBfH?Lg1r#z-bKe*Wu|GdUy)Fm+ zPg8r>J7V_~!b*NMMozj@^uh40$|=m+dS@w(mi19u&pn|ggtm^ifwJ6E9HbEMDTUo# zzf#IUVK4HaaIA`p;vJhaGFdv}uj4mI00DR$0SrbG++w|+yi7L$1t#+g;+fiJYytn5O7L#=-;_B9Q{mV^oc_)L_ z_2xH?pa-qSXB>DyE1pxG?aN7&2q?%IcYpE|Ohw8B|4A$`<}|8f+q-H6{j`H zX=!XY=IPYimb{s_Wcz6^eI8T#eF;HPZhkA6;B11oVxuGwXGP==_*YLq#x zRmon`-6G@N-{}B{#KOPaP3T1k6mz9qGo}W#TCa5PZmP*b4O61b5V1P zbmbd9`AQi+?ZkI>J=_ON8Mq?DPa}mHOca6^0w<;|D63mhss5spJ0Cy49{7dO0V@b) z&i!~VEta5hf_m8iGC#uE@D`Ca+TG=~Z7V{xLvX+{qir(f@B+bVpT=IU)O2N>H`>GF zXm>(GnMucdq!xHBGET=1pzTs)Oo`JjjT~4zT@@hrXWh)VI(*R_^sr!ePF;WVH9_S{ zC&$}Ss7a8v0?N+X+pOA{2OI*b^l4Wqe~!;$`gh&gRJ7?f%a2A*O=!mCU)6Ft`uwOR zq}NqY-QKFE5wh5je0Eq?snwV$&J#1;%YD?$0bxb^`yN4mF+a4nFHI3N(Omp6N%u0- z;`cqHiv<)|91#c;o)xXkqe;J35-!s8O)Ip%yoj=p8m>n-O< zDt&VPBr+9e&sa3)Fn^!OJ3k4VPhlW_@1{GbS;zg;L*II=XDI^Til*(%j@;qehTLH+ z5kGG#*B7!EQ8;WnLi`@Pj`m09`)SG(Q4zebPq%)#?t#Xmc@g`f+2^`?;Sc|xKluj6 zMjS|yLfeW=&XX~}uP^^-C}5vm+G=WOb6hywNvnA;_}Xu;0-jLvA+t&eWr8M`Q>Ti5 zHTip;`-ZCvx%x2!Bv!jVV6xhI&&>swJU2igfBj?O^9gJ5D5aT#cvu_~Bt9y_9!rn{ zVRnk6R6TB)8-cXDSP`qj7uu*3nt}oeC`!{JbBiL-tyuo^Sy{;=i$W)9 z`p)lt1ftuW-6Hy5L?}P{H-2$WLEl2a{!RglCMXvL*GbJe-+>YW!JWE~<%@gp3{ha; zrRRvw{o-@2gt9E)xfx9T zrn@TYl4LON9ObExZ$#2~DR?E`=bn&}Oebet*2F#5N|HU`{xZF!)cY*|i)?F`EwGs# zJ|$2fnB?}=XO6R?PC4<|P<}`Q(?I))E^gZemSUu1zJMm7li)ou$YIVUcYD}8q!wD{ zlRJUY=iCbf+SxNMJ7_2TPCPd{{nrLBIthme1++PMO4IIBX-5I?>z~+`-80`(k1U(8 z>yJ)or|iovJ-@z5m$oj9cu}+YNdLNaxp3zbd#ViATzG0}JF}e$cpUe(jM}0*6m6{* z%F%}qz@|J-D>#yN(lnz!0#&WTez!;PnULOa1Q7SP zI5?!OuPjiMjvSpFQ6WN-i=+k4PD{k!S=PePyFt%i)xk8<5HI6rAb=WH{V*wyAT2{Z zE6N}jo@p;ieqo-C9>+NwQoRq&C@XVdXbA2L2YMnCg*m&lFTh6C*}|WQouzvx4E>Bb z=<;w9G$Rii=A{ach)#Z%|H<$&gqU4>~vT#J&A3Vd0`&8YaM z1^9{X>cXQ_?#QC-izmY+^ye{8OGYwG$9dq$J~oodj&PA3@4t{LoGba6^q)S}`FHr< z{Quggr~lcf=jlc%dgs}?Nf}8xNn`&5*F*9T2EbYE8V3d;d`|!P%RZLG{96Hse=#q9 zr;9uO59jm$c~bX(4M)uW$-E0)euqUB40o8)K905Q)X41K_WV5GJg^a}Edr}lv)lB34eOG7A-ed831?<`{*?tt_gHub3_NCA zvhc6`&5!pA41h*}y$*KD)x9K(W(1CMtzE=g_CZ=1pRttV8xf@N4)6#H@FVhN5`N;< zff%6dh+~b5yw0cDnS^2)1+|Rqrl2QFCJ;aay%RG$u&o$u*iYFagQJ9^Hedqo4ggf} znaHQ0Ddk*RC>ytDIPF|!NC}Z&2_>{`qcwR6#-Cyr&6|p)D~-DHN@2p!!@5YJ7jCJu z*)+{Ob9Sef#|BJ(<+{o;VC;mz*&nAfebZn>1=ENKi78zI3t?jIc}PR&dCylQlQ;j& zy6sr&kyTBNQfESLyf+FX2?NP)MJB6ZM;qV0tGGdfX52_QluQ&o#uzCHM!00Bp`~p8 z++oQOLfVn*ncNOzx|l1`)j{yc^_H`sv{b(>Z->>BdZgnt@b;1;r22h_LB-5G;k-^FbEiEBA>GkwU=Bhr17%FY!FZ7p2}1cTg`8CO3!+)_THj0= zKSkh=4Ek__Hw_Wa@A(?Dz0@i$Ki56Dxr9^Ard_KpdVh>7Sic_O(L{^}6a1n_hfx=7 z!ld9Fie{!wOQLFW)2I#{O|u^14d5;1K$7P0w(RP(L$iY;t^4YM13J(wNno4uImeNj zcF{DG$hN-~2m^ek-mw}g-<6rCfk`nk3>I(Za6a}Y6VMJpw~Y)5H>`T=6sg=mtdyhp zSS_be=V}}lB392QA|bHm9pRx}(iqmnQzL+?CA%7&ots5mQ@%pud6iaUM8A+e151dZ z8belst0=+9Qu#($bd!0D+&omWnA5#%iVQup>@1r#1s=b8P~w-wCJu*_=;Iq67)Z*P z?fj6~mi;1UgB1&iFeq}*%B9%;9m#{bAAR4iMx!5x%l%G4g_xP3n#%K%B+5M4y%_nq zf-2Gy&{Kq#vI$VR569u#(aZ!~drFQp_`AE zxQWe%7>-;n+KDwA*=`=OxLoT{`CH)eXygP5y!;nVTy7^3u8=#iY%lORI9A-7+~?I*tWO*D z(UucK=Vf@9kX#u_RuGE?ix$$Rx@Y8fRwcKCaSBq`JRN7qxuL0)eI8Re_4jXEMf6Sj zUzM2Ba3hoKm1$v=UhThMw7s`(KW_NHGPlieZ@;hq{`ft(5+Cp{Za$mm!94p91Hpj* z8*bk8pWOU^zV*oZmYt=o_7|UH{R^Kfo3kle08PxnAk#{akYbVpk*M-NyZR=zl;g(zx`s#V1FhD68={3yzd~$OlLMRzYnfdHB8I~s1 z?Q}CgKZ(H#Il6xy6LViKa-nDjN+B!?ZiKja-6*hG3~Q{;rXI-xQRVz$Gq6u^4#!fb{J zwL`zNfCaNawe2ErhMKg)KAAuf^@-zzMuBXDSp~v~ccncE;`iVMzEa;%d>c^r#8ckG z6$rMxVfZ%a>Pa-S2i+ZreM4qAAn%FGyocFcD01g-tWr|tmRdVsbx+qLS2u5akJlqr zH+Oc=*P~RofPW9uBkDNI*(tShjMpRUIG=q_)g$RRr+1I_zIrSG+qS@j__|GfUnCR;v2HJV=ca7jF0N4jF}Ce)XXE-^^_X z{Q5DL+1VJ)eMxKt>nN-@)1I3Q*~G$)ftBWENVQ7A!? zTu6fpN}-XHY}CQPlx~#3NR?>Rz(|#9RKQ4;Y}Cg{$rZRH}k>#`IE+-^)(h}mD(G>ix`?9#t*(?I(Wu)`9c+nLP4r1 z39@xFGE%gYn_?LWABEz-b_;d>IwJQs)c6aI{|)si_K8x9v8Lp(qRh=X0TfAzOXgw} zIJ$Yi9-#B?pZR4+OZ?{2Q79{K4qG{9MT0I9xuAVy1wqQVz~@ietF)U zG-*vn(h1j&W2>(-K62N0UHrwQNQUq(5~pkNYzl&DvD(!bxPI9QkUtmD&QtGBLCWzlcXD9)1>-!Pou6rMHG3}FuW=OfKe&x=a`MN9EP-3bhM{Rt<<%I+GXF$9ib#Wi;hV#rSinnk)-D{XymrMY(FXC zM`fK%cZv2{sh}IXrBO0>Hpr@0Gq%wItxvy~Lp6$EoQj^KS~5|hyK*8uU8c<|C0^Oc z_GNSoSN5dVyQBL>-f(<(3V^`b%D-K+)oD}`*5)l7v~uKCCA6w|07gcv zlEkHXw7(%{$;mUYkfTvomr0HsWhu?8G&1HD25O#OHLQZPWV1w~S0oNorWtWw-zQzQv9I7WL9XOPmmdmF!4A$qc?U}#e#0V)R(mm9>c=vhXrNRe(78Q#ehxCNql)dQ$0Zw;LH z6A2Ik#6^*5PrhVMM|D~VyuZ`1jRR%I9`IX8Ca9MrccteAfx1v4fC8H6Kn8j=AZh{- zUKI-*kcROWr2PwOF^rRuOvp(9pv(akf1id9wB|o3WN=aLpwesUR!Nc8nuwP0GH($e z9Vf3kkX0oDFe*phj@fuM(8O1#rAPb)EB=Ommv+#^Z>k%?yj7t+!d<5Cqn0#BKta#r zg4|9KD;NWel(8f2PJ+hl6SEr^YUxuN3(zo=Y$zyjCsNnh9yAj#0qHt7m)VqN^SD)O z{3li)>2Q-&j1}a&KEiS)pxVrk$Jg?>ZeH@eIH8k2!vFW7La3Cxj7+2ES$RZTz~k@N!wuq= zxBbB{y2p+IZ*|`tNPqm^@A*>G7O?PhF#d<_^VeTq6KPjnSFLaRM^|kLHpKpR-?wjr zjf#Cnq=^G)5dD_du0P)W4tBMDXsc2`e$ColY^7b5TZ%V4*ZI(Hr#$P?Y9JnbCu?CT zz@l0jNYT})#!4#>EwM4l)YYlQ0Kq~&Q;-ltn7^l&7-{(du7Ne_KtnB0N>cfX>cYmr z@pamX(&~&xopC-|`QqxhhZN>Fud+ff1AnD2(L`8+jNX9z)?+i)e$&EXif9nl70a+` z<*-vUDi%4T6d#GZUam6z4rXk|Y)^729_DLa14K;OhL z-bgf|#!ywy(AW4l-bhbho0LQ;t%VP;GLKC>kkrBgXqXEomPlx!mgr!i0JzOT2}@#F z2mo?(uY^=lENB1>2=-aCM(Ce{=ygZ{#1%^_+$@*V}?2OY{IqbZS zF8%w3Aj;$kDs6?@thaBrR4a-O8Jip(K$ngEIFLcI57pBYaZ6eU;1HOZ1msGK)HyWH z^amcM%4#3#W!j{wWtsq8QoS@wH8K@}WU0IwrOKJSz9it^MrcMZhVuq&21)w2dTM%&ogi%fw~yb=T69@$J6}QK$RTQnmx^( zSARwfGxORwPyTElhL29hDq~R6vuj>8Z&<4opzw@*S~YE0GR+?Lh$h71XROuHZ)vx1 z-ufBfJovM8m@#^TZJp5raNfFV*05rlH(a;s=-l@+f0#6yoh^xB8j#c0ZtUDJpRnV{ z1ki8QF>hEkjT}~s24YPxYUxxr|H{zBs$yK$sczA+Z1`asJd7S)hBd<=wA(RUhE-#y zyV2ZWZM!gC+M@`Y9rqY56SO@GvWS>c$u{8{w3k|qX~qSzgDe`SzOxKlR8DbZpK!6; zy{z81oLDYwTLexy;##qfS=3K4WKVM$*mbUJthzOwINa6U*PmD~Oq8}wS!J_wnc0u5 z#k6G_YZIYiTnA)Wu`e8!PqAk2aZ%e@Y)m#}+Op3a=4CoI^;r~5k!25a$=ju^Gc*Al zng$LbwbrW}*DRX0EW)RhvJJSp?6o$k8|V??bsLv0a;LnrKXKvPbFJC6u9-G1S;SA( zt=TrsSbU!X&kp7)v!7ngY32pGjvWTLt{z6*w}@kh*CJyguh=`RZZ>y33?$~)Q1pjy zwsx4gt{i@w^2>%zT0y=bn5_rWL9j|i1`a@=?hoYqas2APf|$;EwS!$=Xj2p@UQ-B*w5 z`%DT3$-;+`PVys<9G?u30zKg=9f!y z<)zd4k-tzDB0LdX@(UzXJA3Y5ukJ<;s)t)9sXH7L3{H$aopCW?6EN}VyR@G>4ewSC za%MgdIE?kFw4XRl?6wZNW@4N3IpH3#tpj5wbk?+Sj@UMUl@lE4H%#dIRGkVgndfZ7 zH4JVzHM5|SCVA}iS|&`UE&a}}d#96=d3guvf;M^L^gnOp^oH9??CV=@9Bpraz zekO7Kuuddb+)K6rVDW@oIxf@M7Iq@?9tKyto!i=e;~Fq}LMt7eiP|8pognqM2G5Sm z>_NhWNjfi+tHEK%{dC3JZ&o-;5f8e}8^DMUXNuP+>Jo7F zI=!7d=$I&Dsu3uqX@0cYn_Di*mG+7MLF6a!;(dR2e5HxSr=W?m!#Zo0Flro6O2jT; z;XQeabILktmNnoMk4i)?DCHINh_lDKY?d>s;k9H|HQ*MHOhhQi=jHZz{Bv%pXrL%t z`pcDM@gP5MXR}uOu?y&+aRM{FifKi^wrkU=-cXPjJI?+AwF9JZcfo$j0*LlCgVQB^?wuDH~;r zKOmwP(DJUn)wyUGF{>HvloJn+LpT%U@+7$9VanZKm@ey4hRyRKdIE;f3nUCZdG6n= z?p2O*#-9;!3pjXhCfzU-UOA8M)sCXaml4efeDle9a@Z zdVd51>-GF`ZLev_vSNlU??8y&SL>xZX+ulrq6uW#G=r5_BedZ=@)C20d&S;g$KZBc z!ZK%>G-IAeE2QpM`mFGjxy!y_nRcHx;E_itWb3>9Qhf)wYU;Nvnla2vo=-<8^HqAy zx#2x?oj&TGfzE3M&wFV`FqGiCPi+z0DV<@=+YtKVdpUl;Fe_Zn;Fdv4oCW~{gM@%Xz(Qec)mI<5 z3r-Fp1v9CpgHumq?A(_WNDHnGSt<*)PXuWJGpUt9xe>?bpaFOnA|ffL1!RbNBk9ym3G5=;()4tl-TKz==w zv0qD`0X!D%rMPiaNZ3&m8>f%H& z0tlW2*AzR}ee58N(4)rBP|XO|g!5Fz_A*@w;G~e^Frmz3gjn=Q$hc^19OgP>*9kqO zuVRnk1aQ1KjS?oVV`&LBq#I(b;R=YEL~P{d7Gu>3B?wL8har=nGO^i&%|*us5+q4& z#g{`Fl9ek+@hSR9v&FFFvt7~%Pyj@YWN+gc9|(LjvXmtl^ptWHYNaVF??E*m#f2Rs zE}^NURbpD<)$qJTE@b=WV-*Ptr2P9mAnCFM1JW+>$B-O&UJMtWeVMVj1T#|lgX}h6 zo+N0E-G%}5&@zOX6i0ilS%fe2Hi;2J#mV+!65A6c68EY4krBPI^yrcYHewQa-!LPO zgPV#YgxKk^=0<@fWG}_l`ifD<ojXx zm$``cBGc=uwO2by?p6+PhMpmCabq})`%)W4VO-K|YPOd-8TCrg3TX6_wmokvY=10r z5q4I%X`d9Z$pGueKc}+Ny=%NwA*ndymb2*tL&jB7P3Vd=r7G{$PHN_*hl}dQX;R(k z&NR8o?KIYk8v(%Rajljl4Qkx>sRTz_Iz&yZ3I;7eVWS2xbKE5rmyTU?pFL*WE|r(g zRr9c7QX5dvs0K_Ox4{J2rnb=WYkE|iYpoTrX#sP_JyYRrN@x`4i!?LJ*<^u@;~1$G zbPF1#Rd(uYwTbYCv1>%5P;3GRB=#(dno)l`71gbq`|30jaKZ+8VW$`sy9E zjoQGxm%^ozW5=DP)jAiEgZy#Q)ImCFjkwB9ZLfS6L&|JKI&4j*3Vp4P0vGjzv~l~? zQ95Of$~%pN%yHLD2n}89=4Gp*Ijy2xDW6yZJ}bAyEKVQM#04tPq|b=Zp3IicTdfz${AYQ>l%@LPjmCxLN54L+l!!E61Tt%yKyhC$<4^ zk7LAEVK$?jRn{zV#59&t7f32f1jzwogEWs*%_?D5KEe{ai$~5OWs|}tXZCHxHx?QX zkuwd1P}?A8Rx&~t%cf;pJ*QLEptM&%f*MOn${f;UKaRSn@2r(ceoPF^i?fE92~) z#EosjJK~tO)lUFt#-YYx&LGL4@r<<&ItjuJ$_xSonFQ?yp#~)esTMXSx+n?WT* zB}1!2sY5qGyF%GRUqF4bVm>B0ra7iKW;iB0rYryKhVF*VQ5^o~)oFTuTYh|gU4CBv zcjG|gd{0!_*$B~~{vgtzf}rFe^`NGpjv%j~=b#aW&kPt0Xbe~ks0^44=nUA^pQ|x^ zLp6hzg2;o4g3N;Ug3yCpgE)g)g6bNT>J95f>s9Mz>jCwW_1g8y^+p;*X#2@Kfjjv- z2|M*W%{wv1RmR!IUB=-gAfxvlE+#h^? zZZD_z3nz8jSZEmRB4tfV*txZQn0#AqooDy6Clw2FWg*La_HOH^%?s9b3!-I3O4PY` zL5M+h&ZuKCru|sCj%#OCv~T@P>Oz(cE)z{cKUYeyUb~dTo9kS8_X%v{4+ny z8PXhXCA)%U%(PCfGvA&&)iKh_Gby>7)M?B-ZcUTEW$m;;?lYgfd)mnxBcdDDDZ@OV zqDjoMVHz!0lTY2f^yJT^ZTTg%;BIj|xtLkPu4frH4a}wIvvi+6c|-9yJ)d8zX_B_A zn-+TZu5uMQ%AY394cO_PhR#jqD|1si%bDY?c9l3PpKg#752|z3KMI+)$>rp;bzeTI zURld;QngH;=3cFClC-Rv=E}X`<9Dk))nDwO5VXHmtUb|R=_qhjKT4an&*kU);(mFu zxnd-N_Qs21uEug~qhc~UmER?b68sJ3{Syq1Z}eA~U?dp)W>zcP+3EZqQP|)(m|Uda zLDMkeNMU$L?4IR!GDi*5=(%NlGj6?8uDQy#!dZy?uhG`j5GUPf-zy} zKWX9Wv$oqfP3_dRJ2(yQl=d?QZ@_#(y2RULZ+E=z`!j#9R|HbNI;xq>PZjsb!Y0R2 z(h0GfROj(~fP}JP;(3EJ_uu_?uc>p)^~8Gt**zqPR_#IJuYVUEw7=H54kqxE#ci zyh}x{AeA-Af(v9@H;)17BnOz~fIO4osn8YUvU6B6t=T5bTR{ZLc`PgD-$1^}&{T+u zSeXo5yml@_`^SjS+abRb&8RjMS|>=j)~p*R%-cXj$qQ7L3O^2uLA1#|RIdt;*#s=S zRxV@aks!6?yv9lM4iG|eDpi$&R(3Tjz_xMPyazP1YTP(yo(!@`W~9K5Ll2!es(TRuKZjlhZR!bb49?e*93u zl&rJ*5wha0_ndYMdDGWzp^G`y70vpyHf_BO;QMG6*^Z6^P};otsr30vxxH-3MZ-0MchUm%G%elbegW|+KXFL6CDc0ZIC z;82rev!*$i#mni^O()u7C(K`^>(TP|w&8fN2&z#_Fd`Q&^Um$oeb#hsvfbANO-xPI zs6NUxoes604lSRJkh||CrvFS#H;m#B=)7N;;e&2a60KJMe1Pi6b(j8%&w}a9{rKW46i;Ih`(YvgY zzyDTFcMu;hr>m>Y!P2LuTib)9qs8jvTv&(M{hn1<2Xo{YWGFAYlXI%WRV z>cY#`pU*hT(Y}}-z=S$ee_P7;+I(KPHwsv`#@YCv1!Kede-wfZ>;F;EHmv_kZZmUN znyNG>D)BTb60_0Gfn6O-RoZoGf>mkhx#~UM{w-N+CKow5Ip@{qIW~Znszs~LV?t0A z9`BON+5VB{L`|Y5f1QWnLw6Rm=I!n6-Puxks& z9X%G(v+kaYwvtOR=q1^xEC>9*2JHn`{Qy@_AA7cSZs zE}rJ?@rd4GPabRBUiaLxrQ`#+w|=v2_2xM&?3@snZETlr2uobZ8->XCKWzJhV>rev z#2~GPC83^LlKEYQrSUGXS!Pb!Mx?X-Z0f%^z{h2YMUs-TIFquFma>2!EKfqk#VleW zXUmehi}__I<~teQn}B~`;1~CG@Ig^=9gWdTy#1u zt6^!BXC~PWkgYtupwkl5LINP=%Sqmwi#o;rvnJ-3wbUCMxqmLIb%3{rg&=>^w_L!? zbhV}BaufU6TIR#~_VK#?r64I9uF4PCL6NAXt0mJS7e`Uvam`5Px;%1qY+ajsfR2nUI5b?TV-K$V{? zQPS9Y`T?o)axYbU-NYXLZ9?(#Z+)Weq*0?WaXyjPIDuaJ`0pvB-=ADK8|*PuJ%#Jg z`CFSl_`CRD&sO6uimeqit;UVS@WiaMwtrJ9Zrp3rp}``PJb*Qem8TaXz%yz5gzCu; zAz6gffN+NB2d7m-amq(i0Z;u5=e!d#l!>Up@91Ds(sTe*f$>eS8 zzQqfXboMKs+py8#=6cNvp0ZHh-XE$yX5&BVflO-_@A6dj`nBlIn{K?%wxBE9RoMcQ zr;JEgv%E4qFT+n1gP|NB!4Ei5=M@(JChzo8u^Geg9?^Z)QgZ*p_g6{BZzXE%F%c;& z!efcJ(oRHZ3{^31jB<_}KBNqecP<=)Sxls1)%L=alt93KQ`#whoWn`f$FL;MH(Xs- zcFv-6*U<1@Nusk(!6+aO$C=MHd`SVE?Pgi zaFpq3a>UDJeLT|HT}opFb&u`TriUZ~bs5}xe*K_!=pm^mj^+sr<_V!B7(pc%W+gbv znsN(=?L4T|=M?PPaMG{Zs9N`~cY#NK!i`58jn%PxWo_G{`KhI`XJGcYZEFVjZA)97 zoQL*fO=)4?@8KQ)wjm~(!0zNnq)gYN@T0Orn=Pw@+&6RCulsV|bgjj*lxk7~ClR8i zs#VOHY839MY?axIm5DTqmr3WIcjCS-Fb#k^v?}59&acz5rlx}oyt+f)IIFpG6cyH_^)Mui!~ejw z`dhGJOiiDHo%G2Ax*q+ae(5iZ1@5R?vEA(P0@Vk)Ng4{R%%$!|&8I0blr1zo!(440 zp%2_e$Ak0Iv)jYe0l)87;#JePJ7>Ww8n`taC+gbThqu?!N6w1&)Z)4`HWG7}TlNF8 zB#js~mUKzXZ%k$OgeQ;kOk0Pmoh|I-0e50tf+VfHe6^PQh(m%H4!Mk_)e_a%nh79Ar zBMb({XZvrgD&hI3RV5ca&ict;(muNK?)-?v;7A4FS%+}U;8;iCaL8g&&7x#z$<&H5 zXiHMmpDW_0-b57sjlp1s=uzkgQX`w9DSn#Fu_ z=ryr(!*t%_InIf+Ke|^NhdLLRb6nD^ms;f)ec@v(%~G1eH9C5|UDM$((z$sQU!Aa9 ze;%$YS%s<0hYJxD@VATE<2?v1KFng^c@(LqOwK}iJ0W(w()4icBp-CXbXK0@x{-V* zQTfIV=L74VxT|ja=UWayxlPblbSL64?y^-?3(T#tOsU*K9;TT-{;(|G$pu=1RE>c-PVwabC4!O2`TK6l_300Gti*S3X=}zbZXFO!t zEh}eD>Js7+XB@hhIh*S|DZkJnV&2A%T0&VaAC$Q3X&wCJkY&f#ll|g(KolB)m0@rkOG(2Yxa|g- zj7paLAlQ5&)OU>or&Vevm_t@sZGYmj-COf7vPY_h#q*qe{E`i;;j=WkgK_U;ywp)t zvfL7!U8?KIqXa4ES+@7kx}mmTh6LzlOfdGWWY>0u6h?V3Z*gzAX#vS_x>P1j(3|#C zWeJ4mUyJNCbl$>AEh}G0N1PBAa0)S$M=&f$Saq__VV$!#*PCOUoxeq?bw50j*A@-0 zg9jgc4y!0YC$h(03)!jpvJc=*)3hn0T40@qcY-(#OVM*OD6(5C@i-4E?1IS^bVEKk z6*s39LfV7g96X6gvhy*pMe&eb*e9Z%U?5mZAN!`g9WRQPk4sR3KjyBh5E}QMV(Cl) zA8sFm&ce@Lti$o+Z0r;xcB}j`c28}e=mdu_$_n*h>$8E_!Fgy$ZLpM#9!c$PWqWWA z^6Hfu@sJmf&#%S)k)O#NjF_Mm7K)M3C#2leX|d#7BUHQI7+KB8`>O;_XZ3VJEXFk~ue{~1gN&?ze z_V&2RdGfIe=CeTkj7&@CE?%Y)6C1|68RxGrh=@7sZ|t*iQL2@kQ%+vF5g9vpDtOet zE?r+)kt|hhzwtU@gD;6f9XZ2&O=4YD@232L=cS-wc~M$jZE_w|RgAsFt->O;hbp%W zfkc#Op#3SKk%7d~!PEMy>-1D455 z8X&A;>4^zkZyj(kQ1?iEN2`2;QI+4~9uQvcnmt{SKx5NXiZAb>Z~

      NI`UOGdv-LvmlPil--z`c{X8phbf z(=G?pPF)l+012j-1(+PHP(&IY^NIV-;8LzaYm_WiUr2rMJqq@Dl;HQEUW(aWOoqYi ze9ApEQZ9p~eMMXGiYjH@)jcwDVoO>L#tzb0M4u0g z!$Vg%Wgepd?b2hHZA7S{zRb8y?ZFlw{+{zN`KqwGpEO^clE`z5>-Vt@MP3A2UB1iR zc)D_&+B994uH*$zS2mq30v`4C04U=*Lf@VBoocz;`j{bvnL|vvRj!gFy_7g; z&+X)lm3#y(O&9_UP}45`y15uQqqZ|+)5naSu}I5@L7=N zeS{GyD_N&hg%+KY!Z>-Xa|pW!$*1R|o}@aX9dWU1LR8CKz-S@o=6Dm??lA8Ig?KNT zlNVp58uxyr#am6V%SpWfQpbIUis^a9UffVnbgwVtLSOCJ^hFSrZ5PMTxv3u|DnUnp;$V{Gi;_;4f7XgN z3f!X%hn94hD5Z_(gcmbF zAT$O4tdLLCq4fb*8B$B+)D;#ND^N&i#7{d-BvLvV-yFxSrnf#5kNu82D=)I&MVeLY zTY6L~y+@;xc{#(jZ)vjHX(n>#;oHu;aXz}d!@d2Q@2m$}Lva zD;rLDn7V^pOhpxldI*$Gpoeo>(-O$hI4B#tBv*sqdbQIn%4cz!*_Kzr7_?3c5h{cv ztY2dPp`jU!BN99yWJvoE9oM6m{c{dtT}Zq>KSW4HX%5AnvqXm?RMk4I)T;Hq$$NSMR-bM9Zu5~ z;!-Wb)xa+x!wdW)-#e0ZiRRj`!w+!Y=+r3Rvp~fK&l#Pw7;O#aQz~G1LU4kk#Ty12 zqi?rNbrbSI z0q4Rzlh(+$wQQ@u2WCDBX;jC!DS{i>!>~u~`1Z>l)>1irU94%TR3x=N%28ZouP_l9 zbag=r%50O`oDHfRZ>4`pa9qHy3o#Q@Ueqh)?z9WYr>Q7MY_|_yYpjJI)U0WjB9Xhn zrl!3V9w*mbfNtG?QaZH5;8^KUE%1l;jFuG{X*@ry=2zIm;QNbR%em<6$ae6?M!`ox zNKgo`YN=qE`2%%~rWzn~R-Ac8Ep4@d9MVVL4Q;y6?<|sTs^3PFMZ#K<`o&d5e3Z^5 zm8@(x1=)^I?oY_%InSe(M4xye3Cp%rF(1qjcq<;qKa35lF8ikheBmwk=OXw5dn&{2 zY^6UgGYz+U1!WoA_~x!cj!?H1T)JyEyZY^Ma@IwebcOoGE)FZvYIXjI2O zR5jzGq_*D|W4*rZOrE(U3+<_NgiCIUmxiHW;ygdshPiO<66k_-`-Y&4ZCF+}0`BrC zVxp9B@z}}V`nz5}@l=z*k8h+ zguK%k(}P@cSIib_C{HlPzP8+_v`u5^`2}_%%oZ`Et5jJ&$_I~WJf#F7Gc>AbS~soi zgH4z;EXh=%y9@7c|=x_vV;BS*?di^vj~CazRw%f=m3d)#h&^h0Cq zZG;{9i5!`LcS!5jD@_e*M%OzBd4F98oe!7JF~7L7zP7Zl3Pd0h zr@OY?8GvQ!avy-LPsU~0O@ex*W1DzwTFr&VjDDC?ljNUZiqBu&P8}z zfgH~fCIX6Gny+kNE443}FNSNcchVD4j3MEdc${B9;H|--IAXeuzAd#lGktMV2!}s5 zYs@Jg8h1{;El$OjTeM*}HYsi)TqJv+Ksr{{zhEj)XxH)|!^rX>8nc_~3YcMKRVWz6 zLYoY62V1xpvwupG|yuM~U2L zPtZqS2T!BY&K~?Oi%`%RQ8oB!RN@fymX~9$nLuJ5Vhtu1GFqlYDB>b@LlF(+WZeM} z*_lq04~@Ekm57aro$dlHQbh_F z)#C%(!zJm!wb_y9EbkZ;D@?VfI;M;{mN!>HZrbQ)Zp+nZr&UF#qtdT_WTR}GiFMdC z&FqFUKaBoe)~eE)Va>7;(B~CBXc}rTEsd*+&9){T9SWT9prk(IFnP1Z*f7hsS7CQa z*mK5tj=qj{hjSX4Kh#NGVECI*&3<7)5X>Pq4x8wRf6|wI$pdB_HcaF&Leag!r@6>} zo@Y(^(e5qEa4AO2ZFff`D`H)G6|gkMDqf)mEE;FHvMrU4cb6wDSr1MZ$d+yY)!2-5 zmwd|s*HJxVYuh2qSylzl;T3;C=h`r)8-GS_yh}*VuwA~qU=fMTZtw#VtdH&t|cm36_G8vy+1>>+L$$ zN)~B>CH6Yv$z|s$?+6~Y&%V@b)(}h)#yk$|M;?+uMx4Z>e7oFI?H zRrC4Ya`rH08v01lW(?sl&hH61TMtBDPj_z)3m3v#9F|F{5mwQl!}UAir7#`uq0YPA zShgSr;CJaY=E8fYX}*cDbT4t$B%V2Is9c74+|+O7@k$U>KbABXKGun&i1z5G@XR)V zc>)9IWp;20q1Sv9f>NHtr~0r6Z(E%B&L> z7HATk&J?9BIRTd@WrMh+lG4b4k&emnZtCc5yrM12mN}p3MrrZf2r(1v145FbI_2&N z(w#AUia20QC;icEDR-c=n6Zr`;Fd{(OGM2BsoA&C!iVDMfW&SMTdgW!7X~?VPj=!^ zf4@CYuX$d_oExb)5-qF9BvcsEbsRxKt==Amj80a*pzTK$C~5Og;rGI>2&cZ|Qikiu z-bU!Ei$dI8;A(A#<}%cYKgtPq8NiLxdYOMcS4Q~rTX4}nY$0{f5z2K7NAhIeF{QB3 z8y!I}E~xSPT!SsLG5VSrB(Tb815-u>MF*>63P4;QoN6T zZI)GJvy{2c$kKV&D(XR~SUI)+X0cEvOAp|yszbrjAPSa$?3n6LKDd%F{5?9Nwl>9I zqPQeyiXPu3?DDPKu6<(7S>|A4{FL2aWTcr_Ivwt*54#?@&jD);puAy5cn~Rtzr(>Z zX5{Rj3C&CmAXMd`ERSrKi6j|npdQ0F8sMcPq%f{sZ;R+R5|mC`01!%+O;*=Zqd@ zm6UYyL5Jg(nYKp<_K44i9C;&K<{5d6MRS|gLtWi~hhDks>9PH>#LP3OgwGMF*%4s8VG zLPJ%I+b6mAiOi}cnZ<`QLOV*~_R@QFevsxKXj9IZ<^08bE-R)y;diA{w;qWzV#OtP z$5z#jxlV`(?)9}a9kJs)5|>W-%_*u-#ckjY>j2JiuHp0k&<%Hpp!8&LYCQ}TypHxw z)04bLp~f;tc$#3Twd1ru8k`jCW)I-y^5v4#*N?(Sx9wwZO3G~0KfV3?*ZvqP%{by| zsXdt28i3sCsfs9zp!r}kBA*7MJ?Zyu?7F$Aoc3^^=cG;6d{uSPq&O9gs_skV3OGLK z8jI5vf5lhWAW?-K9sM%Y{^VPKwLP3Y}Y&86Q8V0Fem445fzdTB9MWos?S6XU!wwLQ1&P#nqY`!pxlTeSx z8jmz$c4UYXBymZ)0mCzrjKKva5#h)79>8R6r!L;Sz=HK@7CkgTy!OH%{=zYx7lwb! zZ72Kb0}_y87i*`bqaoUZAX`Rl1PqrtMAF!L1PioBt&RyThWxlP%*b*3*SjyEEmfM% zZ$y>~PEd78bQewtD7c5TIy=neCr=tJ;DgfZh>_!pR%m*3~1!o=+OQTLey0Z(Y$`U9b)-}VJzqPgBKasLX_ zPNpe;>f%UwoVg>qw;$k(0$@)D*WCBPu#m?ke`}nH-T^B}v`+=mu5)j!3--iPrhRQl z6|4_A^Aczwws5-^IN&tBK;;N-Z^eMm7OF*VY%kHgq{-=?+FCj`6%6U_&YVkZ&{3?| zi8=#c3RQ{oi#w=W^h_>Fx8$hWIL@6^rxGMN;WebGQp`Yf(7@2CIB6%O(+!iQ?4L3- zLqGYxC-fR^4R&?Ekb~G0oWCu!;QWsMIcjyB5Cp2+fS=1ePNFp2j6qXkeCqu*4ctIU zM_Y;}+Oecn7LUv0iVycvOz+?8Xp>)vmWeD3@BQn+3!e1B@EDc=`Vqr;AfK@2tV95P zw1vPKFz4Mu4|sv1D70J?IT3Gojiu%i8Oj{@4(A>_Lx)TcX5(IRjrEYt&^SK-wQYiR zMrubOz9;vBV?)1GM%XgqhBEcaUrUII=?aXxA(N3!(6(jOdg^zN(~h?>UA!+gWYcE5 zByg%FSIElAuA&(ikV{`{rPb94v?d-N#clrsTc*vFsjery# zzW07DYL(xjlA66JY4WcCg2v;^^)&Rs?}bC$1ELK;*|ZVC}c znGeYN;zEdY)s8?B_IwsM81dlyQHSqxj2J}>3c+$s_@_C4K!}4&eGR{~uVKpgn~7Ra z*Mmq5lVcpQt0m8Z$ILSG3UL=x;X7-1bq#*SPf2Ou7KF3CndD6hMil1Gz*I~gOje|1 zX=h1qu#H^|?@Xy;LLTLo*UI+qu<7{+NX*=4V&t^2(5h(Yn}9wEFoIVz3JcXHSrp8n zU3;yZkR9X*3!72n@+{8|NK5&}xG6JCQTQ>qz*Vf(S3J0!g{kgX`A@KlW-`+X3*|^0 zQbCuoG+9YTnL4u79)UMC&Y|u7^TnOtU;Unsdf|vFxBt)K7lR3NWT_*fq)_vDr1Ro1Af73B zs(&;2gf0m1xW6+FBTlJYPD^;mDjxVVxVbVIziG#bgXkHcS7YM-px?2}WM)%o(yV%q zE1fx;$7$QRrh-|cXOOFbMMdrXtku=0aDS?DA>ftg?tJv7#!H*|dO2BqnHiW#^RY3I zrC|5!e%Ilrb^Sh+DIquxJM>`xx-X?6e3_m#oY2ksV+AoyUrfq{THjQ0mbNThujUCC zgKR6HUce<|^Wx$30TyvnOg!qo#-Dp2GO zG68=WZx2~GK?V}59qt{8mxM2GQSE2Pog{d_>xH&~6zG?o>#}Zd4!`K9(KiND-H<1; zvDS!4q_OAUooMz{_FXiZ$FT_S~I1VaxOxkA&7a#8xca zmu4;xMC;b%!u2bcSF(~E9@`XX;Gx*ZIeT)Rflp3bqk6bYU8fltJ@{KH1y}O#QhvKr zoo>*RVq2pgmtK%)20wboy-E}&l;Jv~ZPOyqxR6}!cR9Rx?d!8d_8x!)QDsPvN`$H1$oXw{keWt?|1h?*`ri!PV$$i!D-hIbA zde&cOr}~au52dC3Nt#ydr04Jv!|SQ@{@QJgyzDko6|VNb6ezgRLdpAf^ZN*kmv1iR zx+ijV7%^^(lw~rL-Z;!hgk>}YFJuU;oWHv_IGO9qd&{dCS?F>#+~As!E~K>m zkhm^*Pno7B?W3ZUq#^kXjr0p>$zieU^}^qL>Oa~3M;v={Kgom1q%NwKN_`aa0fbGyumQs$Xr|HB9`;!C-Z9NJ)-`JIrI2)}pf*C*KG z0x--kY*p(OXKfG5)~CxXe7Jrjhm@1wWugD=m>4xfX6T}z_4l%Y;w%hzdJp)TN>6XW zN|-1AnRS`7ze097bzKmnxSwcwVl!UA?5q$Y@8Y?HHA-++{-RDx)2}W|k#(H%`eQG5 z`3{E1lbjz6MmO~;&jRY{yB|)w4-d?^Ru`-QP)pu}s^~%wKR}(^h#1c7VYVdnEToxk zhY@9YpA}nHCg2{on|?9I_}|jX9|cPacEVx6fFH7-IOFKsz;0~0LMJepbBQHpIK>+w z>1;66U|XcnGTZZ|Bmdf16iR-N{-{7F@SrNxZr}qvCZlK{iRey0Eu@*t2W+h#Na7riFZ5aZ#}@83F`ZPv{F?h2|XXKOYFqZvN7sbJx&<|lYezr7D!UY;u# zABeywAls+Qxd~upIfu5xSowm;@r6@U<-Mx-sB=!TLfiuQsB^g&Ux+aEBR6A=$sfEHFw#gFd>~y!yVGgyh^?(WuX2`|2PBp) znPIUX_ysD6W*Is`^MgVJM>Bt_HeR*)h4`pFt|d><_YC8es>I|wO8vs7z;`pgEA}gP zi73j#P|NftsslRyD%$cu{*+iF|LD*({vb&PFn8C-Bj@c0Z4S;|8MU#WSU*E_-Q%h$ z?4%uY`idg28A)>vJ-~ex|E2A|$|q$3yc%2!@Vrr}EAckSy{A=$7Z zDlsj#QczOdOwM2BDdVrmOQ9-#j!?htUXkuqg-|L!JK5ax3x20KtD=bc*TGgLo~9@Fu6f ztF*BWH=l~|UFduY2o#R{>r!Bi_eZ4AVAyksS2VSGIS}|E>nXoW)V@c;EnyC^m63`o z%|8#N`;Y=|oj|tX@v2*cYNBc%-}SwuH5*uvg|3*=o33&_573oXhJtNxx($R}k`}9Y z!awfko&4ZDFcjPz9V=3vLUC6^dh$9X-|=IQL=qohiZTijb4biWZPJ3$Ls?MYnCY$0 z2WTT6ro?V^T5Pl*(d+3PrE3s&`LJxAY9jdQeE%%OT=-KiRc|~F5ZxPFo)ja3?X9IWwJ2{(^5B?XYUew064ZS${-{DQgD zeA>!;;Ww~2fjDOj(>kU}sXjeon;nYo+xjDOk9u7QOgB)(e`2oZ%oH9aljA>dxIlJ8hb)o8-%gF52NgjUxv)rh*oUE#vWvi z_d1pz`=Ed9?R_w%=m(uZAOB5;r_$ErkU6PdOA?rz+n-&2jA z*Yfp8y;I`p2VBU_%&O3O@Wa+|%f3e;{8+}z8rl5k^o?;Ki%VTZBjbhWrCd9eBj-nK zewRUk)5r_`#K~d|Q?Fy%^#Bgt8{lNLaKtN+LOGM_6yUhLennbEm@Utu6$5jpoN@26 zUj;fM|NF)6Ded|msjNQuf?eZl=;66A){-wcd|L#q_@;S)jGAKg&i^UKGF-D}^2GPY z?S;cvD#&j@VWx4%i+a7bs5+&VMNg+5opFoQ>GSsMkNq5a$SeV_=hVH0O>X|?i-M%w zYmz3#7mk=;$m+FVZ}t%O{o&bHr1?|-7->aJhOp>Uj zjN-M!J(>J>BdN0(K!q2yBXYLl)j;v!)gIKZ5Aa3q7R)XmFixS<-Qh2S9wB4SN|nZa z4E=SYo>9D%<1&M{78IYpv^&kP3;UCo=Rm=LeuqB@mVzS18N3R&V1w0I(-~WX6@zY~ z-oW`cjFss99wDp5>K*e?Ps@Yb^)4LaizxSzFXag~YY8R>)tpZUd0i`bqUh5Ha%J24kh3{t%@`ynVyTw&sUy{BQPQ*t=M&aP5<5 zNOC%+Y@12_9e1hQ3v<~`BD7k?~w_Xm+mMRUS;PRzSFI+ z)5B#w$uPQD!J)fu+<5`~wAx2%5*Ge^O(*cH*}YkXORfmme{EmAaDDWW7jxA#Vvp0b z^uq_c4-&qF&ItJHqLI>4!5OFa2f@C6*3`iFLw+2)UPYE~T~!{xy+=5TI0C!HmB;p| z;_B>D>lbj5ZL>op3aWkNJJ1W|bX_tR-=0W^OtkCeGXqKGLytwCDVsO`vf?9D5gUi= zdpQ+(C!_3m0-7p8b>xEL*eQ3Q78IU8rjEz_HVSVl5z*3=npRQTL^nx#TX#dB39mvrFkPd3rGa2(dO=yB!XW)G z-S*yeBR4uPEASOk`iROn5G`QZD>)g84ezCAgnsk3a44xixB0>cXVXfBk6z|k7_}NA zFUVAjO1&yYbX;6d`xc~@6{=1(WMOP@`+ByQ%-U&d0M^Jw*;+;Mc)Z#wwilf67kZ4c#b43-*AhoS0-yQ=AZ+=Fde%)qk5@B7`GrYdclP~S@&JY1T{K+LP@Re ziQVN^$22XDHS%zEiS#{F;xwC1qb7aP(efzZOdn&7PIReh1-o{}5K#g-uRjo224upl zmIX$%hU19SX_ZN@%%gP4BAHd{)U)Z&tPU&hkrwQ-3F}j(z>%1AVaX(m%M1;0HRa^T z9$;Vugi^=PuO=~@u8t(}WMCMplofz>Q$_)>q!cV0`B(#vvK6~c+D%SXMlcM~Br*hn zri4aKvsc2z=nXXTAiHRc8z?<*vsY5JP7}%cAxk+TbMJCiX~NNsE0VKadQd!$^*+`G z9AacVPqexiZ-NVO){J$&z39*2W60d@F10j0mY~sGESyX}mV{QBELE9?j1ps7qFGmQ@nXhN+Z3UO!5sCgD6?#6RNQ zEYJ6OYt?!yCj_|oCfdeyP0Koo6zVcGh!tb@Mj=96Yad~J_(}s=TDHY!cZxBhyf}W4 z5w||b6jLsNL zU_ot{4B=sb<7p7q*^9cgsA$uK966<`Czo^fHu3hSQRnD*z}FbnAuEKa;wObAi*+8) zltD;7##|hsF7^0r{7CYUERh@CNY6p1)J7KvcLGus3p4AY^aqEkkPDAgt-*@eD*z=k z&;D$nQ_H}xO{R;JAyOJXW;<0Y7kh+gnf=u?n0oIYxMGK8)g@BSSuPcgR;EDBJoY-$ z%628~A?B=`YAT%mTuOy1Br|W?{>AGl#Yq@)^)%r9uEIu`H%aFFyS?s_zYbvf0z(8V z=_rjoaMc>+u?XFA=^m5)f{~}DV>VEXO2p|KMEeY{X`o{pojQ<@?HjWlf8N<%bb5e# z_3OaxxToOwBIerRuzAb)W3kBC86E4aCQj`|&&GpV@K9&Q2VTZNEeSvQC4Xxf#R~@a zM#sn#$NQewH%b9W2qeC-*a!8Ve}dC*ujurXGQ2DLT#QDhQ64jgd~PewY?8RPFdtj#Aj5|F>TmNa<1wE;1Au3Y(6fJy zv&hyyMFLBVlCN9!?HNQ7wCAAeOPY+l7j&^Dw2C6|^I3yCspLJ4OtS`KpYLny*D`X+ zN!mz@YF+ph%Hyq7w@?P#rOHhEeS3$)vh5IfsG)mL%p)L=QZd3z?$h-cvV>F%okje?}b9)en|e3 z?FYn4%@>%xz|%f_4VSWCjD|?{@Q}MyHP!1i{W~&W6M4-uyySo6v(5x0fI+( z-ZfP|cE}e=P^gycpMb~wT9Ux$9`VSr_}F5ZDVn;@SS;*QvEccbf1v1PE`O@bY+{it z!>hsO_`=}9Xh_-uMw=t(mJHXpI$cWKeF}=-49|=c6^DaPi^T9`=>*eD&N3;!HoZL zWgZsek)$_YAES%Z&x|t^$T;*zk8qashh)P39K{z%)Ollzz0(7bAC#$^m{~4=tozl! zGbJZ2Wep+EMqSMn*K;JvVV<1}gPsQCmzq10eN^6qB}{5olnBb6Y)h~-8hJhrdPIhn z`YFrNHA^N-|NY4DJ}r>&>7qJe$9RX@|L~(#PwwJ^pM0%RIafga!o5v-Cz$r4K%`L*fQFFd95Us(c7oaNt?zFRh|`a(N04 z8iQPx{le5!%ox+`gt4oVzVXpSgUf+m)EfjD-b)3qysz59BBwPbgnr^~ZszL0pqdVg zR!7nM=J-Z7MQEG{2TQC`O!M98oc@zk{mO2p>bf68Y|VGHQ^ckpe}P%4NnIRF`oiDZ zb(JS9_xp|rw^xpIvkb=JnSHVCRMb86UA9_Dx#)z0 zrh}w0O-twNr%nUMgz+44okx5lE-Zedmu9Da&fmLH`)FfZ$Hmtu+D}LY+vR5$wl+K@ zk}l}AHB?$lP=4jw8{qLoe;cY?TK|QkkV|jlkbag-^D(k+F#ge{^OJ=7qA4G5QsG2S zq_SSQ$6udMsF{HQaG`ApB_ks!0fk#q=kaib`f>W0YTiJEd4sCsxEZAPFtc=BQ+PQj zPp8#EZm88n;WgPzk%n8LLZseR`pJCyJwx15Phe^fX_HNx1Ck*@;1Z^s!;Q@R0Ovj~ zgrth8Gh#$Yb*(!@g2ms9#ASy$AF0knZFP^ai|;3^s?NTr$*lZ4QPT7W{zqj0v#`hH zbX;2=`r=VH@_@EMw02US$VdV#2)kSe>s=z9Z(c4WYV64`Y(}%bMJx40? z-tL*9GiLE*Yg1;+zeT>vbNmWT;W&A?^OpcL-&{me&i2tWVZJfxLisNH*m%2})JLjz zTga9ZvImQC|L#oq5kj{68jYONMEC(+_|@)OqYZ_^?}Ws6)F~w+RZ`m*n9-#{B_f|M z1S3-=SKet$;&A%G?v_tU@j2R@9gny6$ecXsSByJ%^2YZEt4SkxN^{6{s0Y-Z1Z|G?NvNlZ8=Vw6`J66cNLg$rZ@spP6fm24pmh zj%UU(J!*Du!PM%IzG2IFex~|zsu{i7eD&8E&ttSjRaRL`7$=v(6q@F+!Uv!E!A}bBwKJ;RPh5U%Q*WRCQ>9X6?2-?ZapJ8{ z2hQx`1IBh@>6cilGQWKDb&4=XzJg6(*<~6|-{4aA@=r|X(y1Y)xW(rz*3Rd3_ zgw=Pc>R5hHD8HZnmtgN^8e*g6!!&MAPy&V^Jmu^+`^q}z^q61sH5;8gO)I*s^QpJD zTIBZ3Tf)1LL|aW?p&E-_62ampOkJHTHvD1f>s!5LpDKS+N4!haqBqJf2Amew1b^v8 zYJAW~6xz>P(IUG}R;YiXrGMpyk_6|&ZvOezW1){(PZzEDM!Go&(M2lODt)VN7ZVd+ zuJ^+D%n{k>d%*xf#3La?`zeX*H`ebl%W~!6{Y}f;pHt8r%bnxi+=1BNuB|=G_qRmy zklSvZ9hORV*WM=qc>Q9za<&0F*%4OP1OK*<=W?eVv+2eY^S=+g-v&`oo(j9|{ z>Nm9E;}V|`a!U_2JkAx!ej`{?`*uOL^*4dcB)MDMsZ%e8T(BHnu+IQ|L8j+B0O++> zpDHbJema=3_;c|D0sp;ltNo#j?Z%=_p+m2ZgPNB`_AnEo<8-h*QVNtv)kA=cnS@)l zIzlBdAe0&Oz=`o;%_DpD7n9(~Vxs@iN3#QUZ&_FAZs_aFkLs)!?z6|Bm%$PtrSyg1 zA%b6Z`%HMb`;#1my?&+;XnRtlV*9s5Af!h2#BB$@KBq8(Nltk=>=Umu`1c9U#-JL* zsa-oA!yb>?W!}2L{L~&4doiQtU<|N>X=|7Olim9Sb*q<3FgiX$(aaF(t91v?Q5f%t zyKQiRs{7V^8dj(KN@T%>oN(_o%Ovqc=s`pLQe%ejB(2}FU3iEAV6(m%aI37hS4fS; z$t=weYP~PQeZ#a0>Cv$3m{iE_^RIhVGC>j|2&Ha|wwr~WW`lj9vqDnNQgPC=lH{iJ z;)7;ZIC7AiUwASWZf7(bX~Tch@>gasl6LeQEVNRk5u_)^dbB%{nY$IDb5CfI2uJXU zehl~Oly1G zM@_unMF^VwYriAd^*6>Tg|@}~p4Fv8m%T$$$1N+|F3wH-Joo}^GQ8p1y4*frp*woL z87b3*NapNMSJqKIK1joNW0)*bLQ^ClVT6lLQkKi16*YDC<#gJ;~pJbI^}Nli18BMJ9`9gle~2mmiqyIs5>xv&n>V9Rut zC%*#@ZwQ`QZ7p62c=7&{x?gOW8IH`#Ivfl%mt&X0Q`UaSn287=M1Tzz0moxId{5bJ zbY=}KlA+`+Ct*>d7920wQ3m%;-)B!!3pE?|?D-fQ?%aBSQMOfeSg^b@Ha*uX#}Q}B zeHcTcMrPtZe5Qe6#imDDKtG%4LdZyK!N>-{+as;S0jj;(IzKx>6wkl;8cOY)s_j>5 z*&)YmiK6DV3@yII9UtQ*B_rJzxf#GF-Rn?&Ik7k@t8(mh`v6YE*$tQs zN@8=&6aOc;2xSpVAaCDdGk;fu3tL zTVmXohi=I^UnDyjJJRq@LbNpC^`;15^9n3gP@ek%cgf0?NVK~S!*-QAtp5tybs>Iz z;b)S1DgRz+lwIp*O}sThkRrv{lsK7Zll>@V)SJfMZzM^a$DD1fn=*nm^2FO`S7|6Q z28svYcdq+X-$V%p-wfSwj-xCB!acLmuhs()B&(v@@+&EAI&_!lU-$Kak=B<2jT;U` zJJ<|dg`SV0g}D0$+w}bYW3Z~NY9(6VV@7;HL@d+c@2AECu&yX_lL7hj>IoZ65O!GD zGw3_2S0Z~ZVO~MCD>XmP1wGM*2f^qDqX{{grt7U8m$Hi)6*tJDKtko&V!^=7p)v#Y zv~U-y0e{-#Xz)?a?=2+e!~7@Y+7?`jAuG-Y9%&OswHk7$a2SABL?#`muUSi9?>~&#+!B!USNFIBTC}ganw!G z=CfVe8cDaMP(L*aMe%M72_fxW{?5Jb86Bgag7lkldel8RAj)^hWN5v5zz zM{q@%uSH2y$vdlQ+rJZ=-R-(4WM8asR888lsZHh;L{Kk+m$$bY-1rN|BeopNn>faC zd4qGHwN-9OOh=B9ssM=a=AKlkM{v@WAJKB~WwE2lOM+N?nm9Um=Zy^C9XYj!npZ%2 zKI|S@&mw+O3X`38`lpof1B;e>Zr#?0?y&zDgP>&lbHsCV{4gPNAUh&*R4l)aD*u3- zKJ^FEj}Ig_o>lzp9XZ^KOtZY&Gpn+%PHJjNUSZDTNWO6Yy@{17^nG0HqvsiYxXpO@ zVM%r>H;{&Sz@)iYvNhTm_f1m&HUlWBq9#x6Y;tpu7sN<;0QvOyY`s>k5*vHK-})Ey ziuPL?DbjX$<1a{eA@!&-hA@A}BS?>Ie3c7*&6-_W&K~sjMdSwZHKhJ^B-`c8t9+~9 zJWPMxaGQ54YVk5=Elv(a(-Qhzy`W9Z_4}2$&uiVlnnF-HraOLqnPzeRsa(&`pYgZX zub{E9{8!>j+6_5(E}Dngynx~bF>Fg|9QnG^p!ULeJ2v71MvE5bn2b^C)v||Z)m5r3 z#18I_-Q-KhK#47^reU_I@d^Hok({KA8pn}n2`JF%)f`xvI2T`3_FlafKZBQ7+~I}j z^3?^zWdtGF`ruUD4fTr`NegXY*r(r`d>Bs`U%Sc`DJMvr^}PvWp&@N#~B(1qWrvWm@k@W5XVn zinuka<*M^a471FSR1|n5lcm&mJLxfQ*|-lnZGY&${SF|4r4t`G{P3t=ed1r64E=GEK%apjNoJgOvDSo(ymw2pT)^T=SZIIz75PRiVTwDGOv9!5pp6tyQ1Qlo z1&(MoPY+?z3Yp__Q^f524}lf%eX1 zDG2_5+b9C&88HX{$y6f#6PXIJ!>4mvmKV z#R2}|OKI?q3{uU=ghcvnxpW34GziFLO)i`N{f)n&ULaC;^0)qX6irjRS8&@>ZzHsPSni~0TN}{E(H_Q>@04#N0^cLh5}cGVGw&kc zZX?iSSnlrJb3tN*KapDUvV*dPI@8kZtVjh#NwF!1EltXy6D3)gcE`ax;3#{o%FXl5 zN3snnQ#2DZd(%@hjLb;CE$XVKYs9C)YmCI0@Qe^N@4kcb_f;1gGeNru47t9gJVM6& zBF6mR^_*ljoOAib(nd3X7>CQCIl;Rl%E?Yf$xX=0BGAdvr>*QI$Uel+g|GZ{22v(w zj8P*ax5mHc&ttSq%x5MVwY=xA51@6ezbC3PP%Ul8K5v6*Xr%lvebFBtb4l3$r;hwg z+l7tnz`(qc{)0MV`|kn})CJv2sMhu+XzgiGa_QshXhs=C4$Z}Du8<>TFDfPlUIYMn zh0MT0NM3&oh#?)dL*s4Iy%~SY0*VyrClW9GM_P=0A*0Feh@q?Zkzi;#EX};CQO) z`5JQf=#v88E8CWQ5E+otY5o;8{6ihO8ZKr2Wx2=OQHA67c!Y1C=_AK+rLbm~=`va9 z5o61DUZ#fo89W5M`*c1tRHzy`^y$8-bySy z54*gb54VV`yV!8CE}rD%-OB2MDzPEHn-2%xMd{e5cWs!lPT>J3ZiWfFzeb01*z5;d z4S|b-HUlKXIgT^TA+B%+&r00QLBRphf}c3=z)h(Vg7AWbqb%orV91`ksD5UmUz&MOHkHuiL# zxG$>HCbrcLoT2VSvEcACoNh*Ei1qj)yRf}u)b=nWekhQvK|L4p>$)Lf;16GM^0jsz zXGbji-MjE2Hf96FlQ0KO#O6PiJNvj}^GvAB;S`qD0_S$r9L84cr)a%+(I{LmkU|g- zz%lkgZeG|bl0J#niDUcxD(+ZwJb~6;V2;avvlaIIn^?wf)T*_Xvt47qK(yCAxort< zENCKzaMVX(+Yhbs@?3PbKQAW=1-kE8bXK_rl|aG@>&Ah3q$kQ;;zm3eOORV)h1o`L zB-4LkSFDortLj){D-I7*;8(Ju%uR6fKpAg>dFq0mK%RRY{fp@+-pb@Zk?gBuH%L}{ zIASV@F7z?3QoAu@TQi=Rb6&hIv@LmF&zHtSO61^(`nhz)|1Nt*kFURTAbEZK0E|k@ zc86U!gGI?L8vDc9p6^p_IRsa>*lx_6lvv9YKJ{JjtE!SX?%>A|W;|C%}uV!IBKwFnFjysX%% z^4u${Cr8{aYj|^}s-j#?AV8tAcZ6_1C?PEql_w1ob)SQ|g=+ zgs1q%yo?Gcqg;}$a6Qgl#COyT%+hY4ATth9ANVg)=NMi}DxIj8QApOHLLgMD(bizj zguUYi<4@JnJrMQFFs8ixj@o0zsHpN*FlIG5;vx&}$%BQ0x+YTEK->yukS-Y)dawT1 zh%;DPMlOqZhRVXlW^(&(4WDC%F!v*y5_+e@{}(|i$ozi*KtR902*(*{r85ml;2)cg z@B?kIKlv*%L=7E%0qf4D$X@n{!Vgk0qjKg5fyipAmu+NzxwFF8>vmhRk%_p`WQ*>~ z@5VSq3z>*JO_trKS&;1}SnO>L4ZX=Qk)P0pDV#IAJN~=cpCo@*-5+Ya*Ra(N$4`cf z5aA$xPq5T|+r7j$szHhlR9#b2PZ0Z){5?U~)NX#-Zl0#l=TEY{Brx1vHS?G@7cx|A zlh`w?KS3As(;#l;%QBxW2+9efmtrJd!eAAPbVvA9Vk-qN9d7VP6v6l&b4u17-Y9u# ziHY=gHHgksEVX3SasCHp25p#k-B1$!S^6K0FXVX3Yc#tXliAB3SNOiBz>n;*M}@8D zn3sPc`Qab*M)p13{ZQymP7%HQr+U>}x3@dpNr&VW$Eo}hF7T6~74+`@BHf^HQxo`Zf~`Vu zpjA%9dt|uiLRN!(q9rKakp+WGkj@&^rqkWymp5qUb`2^*`x@lqGVXpZHIfSy_kE!| zdmml$1Is<#Ft6%xp}-QY+spsj&O6%$KBeZUhU;Y;Y(ET~M*1AXYpTaXpkAr*??4WR zsg7Fj@v`U8?=yAtRql>w>OO8zTrIZ;#Sqh0e`R{*)tIVR%Z+?Ib5}x5eB1TP7nmQ- z7I@!Kr#m`T^vWN|{BO9Q(&2RHm)c&wLzf@*LHtykJ>akrs$bmEj<@5dI*!sQdF53) zAA=81hPJyGF?B)RD?25>_=nVpk2Rf6N&U9wm4A}mk1ccY5U33GKxx;qVAD<=>K9jY z<2}K-Mo>qH(E9uUm+>d+j&LDs)Bdx&8MfLa{tg$+eKd3r+d-7(2=2|A6W1Hz2Li^8 zZD^CA74H{=>^~rkzUfS0ZAsllj)q$G%oep=E(3Xra@c?J6$S0G2`?c%N5!hlyoFjN)P2;-945p(^IN7A9PEzqT?nEj&>e?bbkYr-e!NUEZP{5R)M3A!7ajLD%X~t0 zCo=W}UxI1Rjv_oHfXT17om$)_d99}FhX+f1CC(VDveym2+$=*( z8>JvS7qPZ%U!C;f`vQM!guutQJ8XTgG@dyt1rAoC%R2KH*c|Xq)LOtQW?MB4e09^T zRnvfA@2p=l?G)pgF<`Ix1~6;Qv_)p;jq5>TUv$1ZXU(*;-|HHKGZ%i*H3mR-HvaP@ zozm4uT=x04cV`o`ZF>iopJH|$F?S7J_s)7;@TYC>?pEF9#&!Rl*^1=fnJvhp|CQO| z3}d2I%oY`Fm)XD?OuoJh>ac>gIl`zSog?(8`5d8{mve-HBAX*vqZ9lSa_c8(NkVEV zi#Z^<$pEN$wa^#S-zT$CB?xWTti`5JrU}k3Wp*u^$ojKzBJ)9cLGhtrYt#^}1dP1` zz4`%IEZte`&*Jf{8HX5+XlKzG&ql{+=uJKp{PHoeFHKeo?wMjOdJ)&As{}rg3Fb~B z1hJN%fs26(ZSfwGb%bOrX3lJ(o@^F+L2k`3oyZ!+V&-lZw8e~Il&dn{V==R}M?|Lv zk~%{SVjOnoFR}i-^%CPdi9%4~MMtMCd{qySXd*LTVvzD${E^U~N2>+?2`p`>$e3rf zatW@BkC2)vK8WuuTBD2#i&+E1 z44F)o;oDae27ZO!Aj8K40k0WkRw3` zxLb%({i|;4P=6s#*iUbS7>=Dpbw}U@25J{YFe8;{KU)lTIMwpY4QeB~L3SpxO4C6I z!aF5%an(K1E=o}BO=j%^)fy8*NL+~fO)|gObn5ivy=9&PyW+iNTT5ZuPuBSR25j>p z)wzt-%4g8Dm+VL5O#XM>R$pOWajoI!Bpq)n_NXYi*z|CJev|}ypNJ`x^-BFkF;es6 z`68bH$%U3WQS)=YLUd+65PdUhcrwRJk4Yx=7=R0R;jV{3jlH-+lMzKr@GCI~} zzdTstKi8TsJs0l zKMV7>{7l}pjM%i3yWBRNpP*UY$&0w63^X6|*nqgBJ8C(EwcKClKW+zELR#m5c78A7 z#=jT&8?Imu)2fNQH^hF6K=aVN-l3La3Z&sA-0CPPh!3|NwIA~G4rb3%X0ED1fVI1& z{xIx?7J=P9wA<+}=8F@fFlt(^DVwtm=hu>E%3gRG)ZZ#V%7dEwkbzj@qDBPpwKBSqjf13s6W*H~U)BJEBxe-=VUfrz&P2)Ph3ih zLr{u4Y!i=}$kfdST29pXW0ue;9uw)wGh8&)B|bs}=l)SalRkktb%@s|asa(nr+*ha ziRO=F(mIJOdpO4tQ zqKm`$7|EP8%n8L>`fI2V%GK6W0^baeYtpc>QPz;$Oyvy;2KNrh z#r4D51iqfzwQ!{7Wr`Cd)SI~qanX2tf0n=Dx}VDYr+UrH>ZDH@KgF{2K9OE6J;>%4 zsJ8Y?+i$Iv-FG|8&xXKOs;};VDg1SVPj=zjen>9)dev(^+sPO8!eqVxf%@i1oFUXO zB7C-Es|GrvbqV7Vti(R;7+m!=+#*~f^C=25?$IDio$-ebm?}!L1>%=Xb#{eFlNNqX z6g6oc{b$v*U-OM;`ev<}bc(UyY4;-umlAF13E|OD?wN=yrWABaFjTo4EI3Rnk|^4=&Js-wHc^|HnJ_2wizuji_I_t z_)KEXs4QPC^v6)o%@XDYP4?&z1P9^qvwm@)2q!UfbXL;cHw)xOH(2mOXw=RZ=P)yVBJO~E!;PWSHV-blm+1`3fi3w4?dwd9(bh(e}ZkTW8(Lw zxE7CSl)4P9Rf~@T$e;;=Z0t<)qK=Ekg>3rXa$9nRY~Cmb*+lr!9}+};DDXRB;BxTA z=rnnW^#VZr=p`0JuoiSm0d?WSP@2G$mzX*oUh}UcGGPshZq0(e{-*2|;0sv}k{Ss< z%zFv8DL+6T7mtY04)ypY=4_LB2y+ZnvUCdOUBdkE5z*L8o`(7{eEJE@kNd<%e5gb= zC^}8-FIp>L2X2%4!ej#Rq`E~0<8?#jT@0LrJ9;q_)HS$t4K(1blAy03hftfZ_UD)v zL9_~Z@&dE8ZmBn(CBW?r!snS)OWwz4CF2=CLK6nk`~!i`@QSlkSV(lmB*jq=#qk!| zk0n^{)q-6Ip7v&F0Bi>F8KR|uK#8*9KLr>*n+T~FgWoK}AzCA?Fj>1@g73ty1L&q0 zb+w^Qh$BdbpFeE|Ay{{ZEIeCn#!*v+R0zBdCfy?AoB}F)vf`p&x}(b!obZ-GN;oKS zO)Z76!~-h79_SmLXu+u!luzVl9G?XYFyuyhw&+Y(x|1U%v>CdeU!g!f!B81G8kT|b zj*x6ZkxMn_1 z=&i0WaMBXjFB7$qt|s41-O!1q?<-@&)@s3cTftmcf6p<-dpIs^?UlH5k3BX99k^$U z=6I6=G~w9*mGV)`PYs#(N=|n)pBhw$_t*l5P^f?H_DZ1o&vc;TY(kgr^lflnd8HCW zyBUIUx$EqVS-48*%|4UK;7seV)fY^h0qRJPkoY}aj{YIRD*uecbvFYjiQ}W%_*H=N zCwT<~Z-e;`wfT2-!MMgC9@w`V;_-pNr^AZuNRXo4qrpXafw`g_6yq2$a2(vp=yTnN zE;jb+b{`UJ;jx-WE#WIJA5ft$E`q|j-*P7_jrd|Bd+kGET*{{H)$ONSWCM_W5mRPC zh*C)VMpG~*D$Z_#dM&lwFZ$Upq@eseB=GN-a<a#;(I!;%Eq#I4EJ>Se-|@b*{Rb@XAJSnB?j-( zY<0ZirHc*zK1ez|O?5v2NxuwPgA(MMF|C#laUK%2TYwy}`Z&qlQ6!fsSuMNkhw}YA z(l55QJ2dBH%=T_+5a;&unsNRsjUNhwkR2w$;=n0_CE>N

      ZDXp1}keVu9t&zj*Me z1r_!wy-wzrfs%IaB**ZRZZAz-{u2)zIfbOVUcyi<9O}nMO8jxr)+ThgUmL=wDBARPd^McIA3FRHPjLU_z>{}6Tkjcwt+}yje{|+b*skB`~1roS9JtP>zlo5+WhJ+(+ItA z-C_!7t(kPGvF)aHD<)OB#}(7g{?oR1)&sV7`LD2*h5rs)!^+~{VT<_bztUFJ`X_D0 zMZ%^7i&8L958!DW$ZVD{I?ZPROjd$(RtkLyS<02d=oref7uuh+>_C)L04s$>>BYaQ zm{tZ=Zq=bedtspbauVeYsU=x0j4Rm;IH7s51P1pqW{t`tGQoHt92ep|;-Obr4JHGI zc{AQ8pA8Yjb6;iJSh2s1D9n0?XgxuZ4_kvO#1(!BxPpM%wd6^hldPfyBm8ar!! zblN41A^|1YuO29?!=(vHtF@Ep111I%3NI)}P!q6#tZxI~589$&STku+*kdsxdW7J1 z!>$gtWskz`!Ury9-SMRi7blZ&@t(jiI1Tj_hziS%{%REcBK%!NlW|?5`Z3@%Dqn$i zh6M*>HbpXGL7G4rXRARMO))5%c=H=&%%xh|6#L_734<#l@F(ExCrem=9HSp+LWk0$ z0J;=(_!RK*sqD@d^*I!T;BPB}F<(SKYEOt>JX2|guxgF`K)s3#zRY@I#QEJ)71HfW zW+)Sqn4l083uD5^Gl2qbyC{A(9;A%^`v9y-Q++?3B1J(u9w?VlENLYLp)hE0omUE@ z;&KA>D6koRf)`R{;>JLmkVN4N%ulw<6#cZ+F@Tg!in|AJZUw}!C_WIJTP3)UK%q^F ztyp?9iO@jy9;0UKpqX1G_Yf(_PN(_2PW7YL755RbQ5+%x-2|1$=rG)2m0+As)Llys zlbWq^sls1FN4!kojvD^xh3BgLR@m*|0zxLU>aa; z4tg7(XzuP*vDPKH3@uXuUC)^qkiF#U*w-LkPl#5XypVHMKmUu~Ks&h!kn>(BMdHjx zezpj2#H4`U3>f#ZRmcan%uCJ`f#@_IH80sk;h*d1U`bYD z%wUnItES)qx1jR|)F530Q}k=CI!-QOL2|JROY~cd8eE|AJ(i;`?Pw;+iwxFQFvqzr z=+CyjWVwO2Lr-p|ds1XHeOv`+eI(O4kBXJCnk#b=U-4Q8D3gMWb7jA{%0PcWjL~cO z^Anu{#d0lerr~9np?FPGYR4>7uXd`2>~M(>vj8rW6fcuyu%+>$LTT{m1(?enMZY*y zG8IY^)#5W%YT*a9K^HBdkR4pAs^xT%amTi5DS1At+kHqrU9(b9H`d{wZr6DZ@4k52 zw6*I&EMIs0e44_4=)eI`WiNw!mvpo>Fz+2ju7wnuIL{=X($sW!604JE2?Mx5h1y!6 zLTI!D+vqC3OAReUyGSLyp?LX04Zl223$mjnXpARBcb4PcYX$jlHM+sFID;uVi)~cu9^y(bze~|_2-RdQrsx>we8IK zy@*q^MRl+ExSjvJA()9u$hoHs5%i%yg51$<^-**Mj-ee4K;R>y_IT&l^ODUxt=BzF z;(N5a-2m@q+A2-JHn8vlm7IrrwPqTUJ8-yUUeQ5n7TVQMrFQ-+z)%PnQr{mE;8<|K zC7UZ9d~r7v`Pei(UxmkE#;QZmt;*FkKyLGqUh3l+id*;mBvSd=_~LZ61ABt$*KITN z9}RS^`ib96%BNMX1G~%HZCh(^cyxSrufTuTWow%{yt3i(O&tP%6-MF}==ma=#Q6Tu z2Foa@UkmSs5-;XME-jhA4(}=3)Zy+w!jJzf8$gxE#acAp^oyAaT$CW&3w(OG?FPg7 zf?pBlJsyG-s(k@zsv;lz(y67U8s8Sk^FEc%Um&%>;idA z<)3Q&+uh_KaQNEFHw<*_+sdFHGTwyPsdI*mO?q<2kTIlO;7bFm za^}82(vOl(v%LXv(0;3Vt+@_2z{ijZIKYCmUIlU`MYcfxSA44~#sU}Ji2p3Af>#wt z%~~~S;d`BwNTe{=x+=T)&iYm3z*;xXSw)esKKc)V)S8T9UvsKc0B&g9SVgpq?MKWe zRNMIO9J+Pq5&uJ-SNiWc!@mUC{~q5ei+>VVQdIFRuL>7MvxTn@hPP_vm(i`5{P}bh ztXlMCXsa!MK`=Mt>|!vkJzJQN#vmm)4A8+KG&JszL_|L^-2k(Zl7^!SSik_p=yqn`m@$6P-?S;ff&PMRr$DHeAnJ`qufj(#u}`6e-;5aH>xy^aO(@;;tZ84 zvl>kxF0JMJ%3kz1JjnaBDn6;T>-`ZR)h~xi##tg1A7$G5qkJM1CT2e=!@+Q&bFD$i zSHZxTKs~;Mlgcy2W?9}LHKLs=ya@rvacctei{FcUPnm*+&6vEo4+V1q<(;X@us=*e zv?b`Zd|PqwO69k}D%)j0`>r0Oly=RRDbxTveTHbhZ;+{?Fv@nBCT%0UPlGwDl_zN) zj2sLRwB!*KWCQ}maL^v3^07IBO{jRW>65t35*(E>r#Q^}#YQqjf>f#fFg(}GZkrzD zD-2uzMDA@pUU~U1n2vUW=0{M`M@fQ##(q(CbuWUl0~DFF4C}C2$*fw^M_V!hyTcqzTS{VLRkDJZrjB zltywXqcQ_bL9hI#%*}Rz&+M=@Kw|k-y!f#iUkMN@kI~?=0l=0h=Fyt-GSix{QMDo@CCecvo>v1xWUMxCD~+Q8ZVFuV;u+I4b}ySA;b5S*aP zkESYoB6GTnZE`aU55+y;uNA8Wr!J{f@loz9@*_d)*;v)m_R>8Z`=KNU_p3biSfHIDbr&$KZ76jR42c+LH&Z)`L*Ul9)s|3 z$x_BKG+Zn2hqz!2sBX!C`F;kUU~k>@H1{@%N(Kvb+uk}eT$|9uvBze~)22I)@nr^{ zFdkmY6osGGD+D?GgzkK^;Ae}6`pN4`Bkhz(SHXG8QrzU;9)bU?;dEzEq}ioIgLpN> zYy_N#BE+nP3hprbP0SPNazQSpItK4$Kdc^G#uryw(2nH)y&5@)T-LifF%6})Ai3X zC6jG!XE=+{13w0DWn(M~Lt*OhTN(myK$m?@fk&whbo$vX4L`ok=Af%ud8g#w(%@%| z63kYoOaA?93=MuhR^&k!oXORK7p-*wiw%eYymxZfqEz+cJsfjSAFrWJWbV8kzG{d- zvqnCu4P2E)@d9O#tG>+YftI%SQM?HZG>ht{y85013K>=2aQb&LRR@K_Q?OLLbpal}&_bt`K%jLbl6_!9*j~)!UG7#&Ac%QulV zBbeew@}c0q%)oRMFoP{ls~{TS%%no^{+KfGHQ4ao1inHrJwlgo1>S2Xs1qnsI)(-t z2qW0%TK1UOALGzZ8d6`5cY!2;&nB^6+||clgYsqOsJ|DRG1!LeEd-vy&XpO~ z@rsX$b-Yw>K?%Zb(0wmckqoFWx{8N;2=3>`^%trsU8?lr#k5 zIVwfQFIRl@ojFyZNs>IJ?wZGJCN$}Z%+c~}D+MWq>5-0#n2@=t6aoiR;jPKY??52Ir@XlnZixgibzDYru^Bv*Hz}JYWeX zz7m~>5Aeug`5UZWai3<_;=1G&oi@J)UU<7su4)3tEI>oMQTZ3#LddWBg6WmN7wL6^ z@@&yknj1mqOuFpcv6=JqN-pHmKfy~<^W8H*__s2S}ZRS?`H3+7pr zH`F85xmos$$t*~QN*IKplDk25ZHFa_V1-jl*H`qhXr4 zuoKLzQZd`N+x`4Wv5}vkRmE1vNE7lny}L><1?iKbrCg=>`9AG_vX?{IWvgC5x6f4E zJstB(g)Gj_B5r#(*affmj0yw%jM^WU*E_&#JzeB0Yfg7`9iALs=kWV9W<15+Ekd12MK-AX zpl-q3Q+Ar7`t=T+@`>z+*URP&L#XIrrQrNr_mg2-fP2|m!}cglb7un@_(Q{1gEl{x zYJM5C@$2CSrjSVd-H)51`ImOeHRQ4CiZx1laPUM8qv6Fh+fOKlduEjfJGu>Hfu;-{ z@FNEf=>$eKj^{#+oXg4J%eQqmbGV3p{)WOYGNDG+L22SRe2E3=-gs;Xw<-F%;jVQ8 zxNnwvnuQ5>27KI?StIU|z|iW)`1sZx(qMh=w{-{B-ucfe5l-P2Tm8Hp&wxvN!w`tj zBxY$ZW3Xo^K9pLJUu0V9ZT8qanb(CrF;|jbUrRtKtt(s#+Vgo`m2=iQua}=PTrj?8 z!knllXdW~-Up7=QPqj#x_r){P>t#N)%^`gaKf+6Qgafxu<-TUCJNMwvcenY;fNDHK zu7&nYr}|7zRn30-*Jx?Vr)6NCH`blb6BGtauEplGUs@v7qPD5{4* z=lryrU%Ug*Salu|Yvlo;>%ZCjX;UyC>>)$c$Ei-3*VzkrEYJ z%^j@FTDPXETA#BjtBzLDxoA8a+cVi!PBAofvQ-uo?J}j*M9PeY*z7=dIc=2sCK#${+<#%RYiG8 zQ~yO4WVoQg$#r-hhs|E|UjH4v-W%mX(L94`L z#UT0=`Vhn4UzU5x{0(RkxIW#{U4YCM`Op%vsncDpl}H{J`OYGA?g6^34kZlTNA}}3 zo&Q32y3?jOp$KzR_R~})d4cY^Kt)q=Df6RQg3%+PN4r5#xb9`x5M4F~>(TQMK(S~E zf2%d}PpLw_J9=7e7SWLsRfq;r18Prz!ZBGeo;P{gym<2AS^I)h!*c z3>%ljyWA!_+Ua6d{`(WXDpX#~)IFRDMclew^2?XtWE?(NQ@0ZY&C@7L7J$pF8kD6+ z^>mQ3ApJtOv=>=3jyWPM{57DClsw{U%iUf74O3M?W?0DCdcA(rNT<}6&Hhd8G0Q+{Sf8SE@NH` zLR93Y`2?>Lp$&;z_)YD2HL}y?x6y5_quVh0xb183PL?ZP>js%B;P_KiNc5E2pF|L! zJxNO7id><1Nd}fh2S7_ur_6DhMRF$RPj_NwZGu@C$NH1B$`hG#yK}9e^=kb|wp4Mq z$~+i+1nFg=S?EuQs(1K*;S>|psZCfbKi zgTD3&=#wFmM{5t-ZFqxQsud_XedBzy$T=TCsec20BDk*l)j;Ow2))5 zi;q4T*6k;{13%*xMp2v!NGaUL{c>NKe{Pih;fIb_?4t0C>q2FJ zJ&n?pE}YBsPli>?{$zMK=5KZg<~0u4wr|6>Jy-YhGek44iYnDkJ17osYi}eUk>;&s zM@s!=Yl4fJ&c9~Z+HQh#n8e>PXoay&!B)S#&~(3VG}3QU#zzATp@IKd7oeOR?K`^r z!&(46Dezde)Tu+8McHlpQ0M&PcBdADv#+U9R0^}V7K1m+e$FSm%s@ZRQ0lNd06_Ov z8~o>}L6=_yT|o+-RUQ|cWeev5)`4w6{zDpQfU9~%M}_7SnDP=s%U^XqaVXoG+AyyQs31x1?eg(?K~i9@|5&Fm(bzJqQ!xtvT!-9ae3OZczg@$C zZ0fMIQ1y~M@KC6i-5rQtcLw!cY^?J=+XX(O!`Am~?@xh!{D21Ii-(%3J7PkShA_5Q zbeIR)1XO^Rbazb0%aj1>z|yw1`{^uNC2MYNCsCWzk)z6n=iAD#QYC9kF5U%*9_MN335% zq}4Zf@In7WJy!JZ^;mxymZ$$aR)yg|v8u>3He(JchXDdCt4@$aYw)s^DpdzT3-FXu z<#@4KXp3K<^$SBsC()dWbI1h=pq}hewA`?tD5sg`AHn$NlbJOtY7y!rlVJ?ERQGcx zv(Y73%S#6{dwdBD3kYl1EMqhKtNMw7IN4nB&quVf7Akl$qs2(te1J|-g~kZgz+Z5L zy7Q*EZyAPc`bUCS&Z3I^S@06G4}=M2NV`ttL!?IZp+LDk7~3kT0>3dRV9!QJ*-m0_ zys`?T4MZVDy5c(}svz>{H+Zn6T`2{Yv^)rdW}zJ78B~XyRVW)-)YjcnR=(38Sc^t=$3DcvMAr zjbgIm0*uQoLVrpNU6&bxd0<;*O2?{R+AHzt)DeJyCq?a3-Aj+gjX%>H$!k<2SSyEW z{b@R1EdDqx;y+5FH9{|T z_~o_jezp>?6VdAOS~iKf7rKK>S8k&9=agnFj@5h!1K%6OU6=ZcnAXD5!q7Jivknig zH$hLY#bYmUEo~2AJ>kRUMvxsWS>4(1c(W{S>GH!HbSlE4&i0CPOuyJFH`10$%gSLf z3Yd^^0&7r`AHa34N-cb}NcGmXwnXvDuQ+}L2D!T3)*gf>^iI29JSfuQk6KFG8GNeq z54$NaZfkvFe;Mzj`r(Z*sV^{00Sq%0iNhM4sXBtJUQ%n8B`opo*dNAS8E zOsiHLZ+Y3_8il;gx&}+rDlx2y8G{Bp(CZjjw@D6|H;)Y10Ty_~86TdMiZhI6D; zgQw=DKezo7@3*ApXlnsE>H4ZlfzLD?_3yTiLw5-?d!?$Qu>3{Mkg{|ab*BvUE_fiog+p}GsD>z?O{0P%2 zs=LCg3=Qtr6h0bm<#j?^cCJWgywkx0yrHM7K7m;Ru3z}16tV4#0`-bV(Wi!;=sjMx0 z&XQqCw&SbDS!NFv+v%4x=eBl!;V{9N%xG_{b`bcMz}60s-4%6ygA4s*xRaT-MsY9c@$*r75MQX- z+IMAlaSvbIEAVGEn_V_cnhgEfwB5^9zMtIVN0#fRZTx1}W+${S)vGNJT2Zz@uP|%v z%tbpTW7e;kIiKKZ&CEq6j@DB36v$}WrPclmc<0Mh1Et04To>x5;*=x~Xt2+30Ol$2Aw4cN>(aP%m(Uh)v%{>)ZN5BBg0;3~d@g zRqT8k6M6Rw!!0*1f|)E zUSTfGW3j(%k4a(toa3ZsRN^($fW);e!=ztBt$YpzMz#vJj`u$fgf}1t@l)WLcq>GF z9j5#=wNd;|a<&Tfa4V%~L(!w}&3_@|fx!HQ~ocSa*fEy_o zuTy@Hxw3>QlRphRIY((E*9i_K1E#S4Jh~5Ga2|Rj6s;Zt!wppxJwH!`!Z<*dw_q4> zkNYHAn+zq|Q(|D!k19eA(y^MwW+^g--&7wx-Y8#Z&T1gdrK%SmC|4CRp6=A~1TS35 zpxCuWev6DQ&<`|xoZ)3BNC7^~($|Z0g`b>+C*3Q#!zI04_QETeyP#UR20f5Oc(q^3 z?tyY6dQ-({*W>AGe^^Y14aEDEmos;b(38fZb1QIn4X|Pw!;|PbFG5qgQEVh5C7h)1 zAbG7Tn4iAM0-R*&x5~S=ayD(N$=BeG&lhd&e9f7v@&hQ*_7S-T+H1@s$Vr#bqH9}k zXP^iXF3bQk1@I@ zN_1KJ2eCgd7isQ7xDO+7)yy`pct)fZpMo(@CuRJA<7IEFe)k%8|IqkaLD2umoPCv`+>Rm2F+}_GAlz3G+|wm< zyWsqhY5SQI!X*5j$a`dX^E<+VmpNw3j*#%_%=}M<#O}LY%`mxJ=1v#H_ii}+Lu54c zVQ8Kg%_%ks$5yyRxMYZIuG8H_)V{4$XYkjk$mu!E&Cmv@q2WXWps+}Ew5^s`j_BlX z)qpv#X(0X$x>t^t_!~C5k2$q`d3!T#P1gK!k!U6z?sD|rYZ{uyNPG|A+|LxJo@G!c z-)i%dM==35Y0ibB{;1@)W~;8P@Mr6e_Wkzi0zXB+h5w+B(f&aB%T1cQMK$i|!cb}_ zVgzXRPSKb#TEa}!(mHLA+biLUsme_vYE17>YT~De-&}OY-uEKBa5*pD|Qd9ceHm{tk z_{k<_ibJU3!n#);!ufS=0#7<@{gEE8JWc1nHkdg>bw)_FVx!n>m2WYQ$$+VA8yLGe zieF4)jj~O4r!hUzdee~-e?St{eOeO~=u1 zlbvk}&w&%a`P(DQMh)MgSVlyws>J zxD;TwVa={+q!i^MGk*8!!QHlq=m2PQ0BFFq4FqppN6h&j;$X$U$H9gt`TwylvG^CF zg=xfQ%mwAl1r-gV*c>ny)vm|YHV#$JZ;oJ&$pA_OO>ObZg1J=~phThBQ>>;;;`A+5 z75e~fykL%@8u{LADI+W1057;pL186%5L>_)`5IgcHDit`k~vixv6<8I@F*0W*KuM> z0k$cO_N_6t3|7C-7W%W+6xO8WiH+o35tKfMETQ-XZUbIv#4z%kn6^OdFX#=@)dJvvDjc=p zP~nk?TlSUF@X9P^4@muGcpZ9fgk(-51yj$T!R+5d@v~>dzPJSpn$GI54REBv7R3c3 z=@0rG5JN61_y|56RwSNGN!J7Apx6X`cA#w4i>->EZ=l*Ea}skeV|oQn{61P^Gnvt9 z(0e5hC-^aVE^h}BVD$Qn)>P(audx8%Yt`|={RO(!j}DRtisD?+tcNE`{Y6BaKq1a` zBFt<(0Rm*81T>p#1p0{bX|ZYnRtM#E&`@1)Js;HFF2jte1GILzKTGa|-w*4(33StG z^k?ZNz{c z1*wC71LBxVTTo47{5neZX^O2|eAgmo(~fpsRWZ6{sA=s}Wo*Y0Qdn%=?S~n{=ZK=)cGTM%i9jKHj5uHmkKYA1#{uZ;f zbLa(e*`pGkzeseKv0D1H(~KgZ2i~v|UQ;34fp}oFFD%MiaC4fvt?j0w;V79O#0B$0 z(OC}3T@DKPLx&$eDt$SxVCKE}WsN^UC2k8b*D(*h_;8u;0P{Sfop@zD`U5Sr+9m?_ zZ-TR5ldE$^!gYVBn%MDRi=TX5ZDchW{U~VrDqj3Zr=On+g8YVRYoE0>qxdu6)C|Ti zs=LZwoxKHAUCXkti@OE40Kwhe-Q9z`yIU3x!3iGR-Q5Z9?(Xh^h0Eid|K9tbz3&@) z@AqKzVloEQSKT$cdUkcsE?cx*PJxZ9QTExFh@JNSdJ5nh_RK*K`#LgDV}iQ)_m=@X zpZj1}3(;@+stvUL`2q#A$6uN(^R*LI^zFdGPl&d3^Ag6}2Vq9+=pic741N}^9)%Ku zH8A)!U9-(+bl4h`(EYtgQI4d zh>(g)&q2f=OyG_v1c>^u&hcq3C^#KAPZ7;yNLdLS-;va>A@zM1mAMSi^OD>-y!|!6 zTeZCMackD+9tb-YP)J><3;D|5jJ&fxnuCmglatsSchZ>mtMxdI**-($1T53FU6vkr z4Spp(8N}A}f=dThl{m-=>aX0SCbJ{76Yo~7HX>osWtd*qf(MWYtrJjtAJeTVtTuc4 zOL+ZWElk$Uzb13N-D{H5ZqstD!W(g~n~LE%+3TN?HjZdI(bQFnTC*Ttyj^*j3^#viX0D)DCq7H=&u#dT8$LSzc zBoeBe^hxnuG8+@XTDZu)JGDw@RX}qtueJdI2Q{yH5em-Mb-kY8ovCZNsgt^c8|=MAIn?^& zmFXA|D^$ZJ{FQSGSF8s*7(#{>*` z;Le%Wwj+V>1}pw-?SeCreJ*-UfWxY#s6)Au$I`Cui`y~oL3nH>yR2FsYHe{%Qs@N1 z@hQf6)6Ccj|DOEz$ zMlYD^6BWpv@(NpJ=)$*yct#=i?*pO-nWXb0_iF86ON-wy@|kK@?ufWj*o&Y1PXb{A z5k|C5MMZvtW3NIX z-&!U=df`LwjF&f*bjRpdMNW8*uU9_!X4sm+SxDfxEOWtB9NwbzB1|s!shc|+Ql&*H zU38&_qT5_$G};S91NqJbYhK39UnrG^l~EmnlR8!FsC7Q+moJj*{ou(|Oi~?`#nQKk z5{cOm1#LS1IfDW1$lHDw*F{E?*+x*G&FU!A`4z5}yBw%^ucGN8`n<>_b~#|rGg()5 z2NZhina?=9Jv&vMwiwvSV~de*3P0kOL9G8e)k58`?z-%)MMcqTl<>@9XCs&`%kR8jSXN8MqC}wkvkB{S5~)ZWz40 z7~&rJeb1kVfU6)u3h3N-ql8pZ5V+Tfh4!KPek_&NXp-~WxE^PEg(>;Fm{tky*{o(s zuW<{?kjpHo6>q5cbZHK-*`mpe=5^T$bx(CW9$Qb!^Bx^qi|@u8k#!&oJ044?T;V)n zmL%M1j{INu_3e8{Wec5v-em6f1VQNEOyF#;8B7>9yRWy!S#y)b5sETEVvGnU+_M|v z2sNllMGg;2k3$tA5VLXWgU=&li?ITZNQY#h|8 zZ;OC~CrkOYH?7QaRqu6wIds9BmAZ|Y7HuaXt0q<9(4to zyQo$9+ud5N8KUE;LH@_Xy&+G02|{34$zp39Sd(P^X(q=nU1waT-z#O(uiUibPq1Z<+jVKf&i3X<@7 z$g?~MZ*1_&&2*3p$|dM*kSv=yUW*gM7UIC}Lu z1>@?tk4_lk4Y?zq>bI8nV%%>%9$n_YMSV?PNH2&SIW?e*cKW7?@z#DmH@jb|vGvU@ zaQQtOPd=}|W5+XXbCDzkvj|_XUg%c&A@OmuEdZu6J-Texn0uY@BQs2b>g?pv)r`o; zMnn1SHkxQau~G&>Ox+{nQNW&foq(;b&CRBF0F$hfAF;c;*RkIaGPF!TE^v4fSC1PD zDU~{fq0!iBK3+2Lx8m9}=?!uRx|snkrxd_weY z%C0c1pGEN>+1|Kxfe~sV4;(9J)L;EKY?8;9g&SC=9RzUnZ)A!+DPbv4tsj|PjZi~I zhwitrETY}MK@nzLzNAoFrOrXKt9KD5!MiBjN2P zlDNKQh-ViyYS=;J3*`M=MaRbJ7`+<79q1B4nlZPn2FSQ1fB27`#E0@vk&1nh2f!cI z=*{aGvd>!DEU+=>q|3;08}iyXi%1vJJtqD@(|l`wu{uhC*mAF9MAwl|@>N+)hyD7> zPx!84MyqYCYdL2q-+pt(br{w>_~>+=loZm79$yROxi1-)mD=!n&j+9CG}OFhwiY4; zHu*z_qEX}DMe=E$?oA_lBi>175{uq!|t<8UJX)h%X z575jT@o5xnQ|OMBC=2pOP&7idmirFBGW!|R+|O_HETlyOj~4xGOhTB@mghuoU_cRd zI^x=waYIn&$LC*V$vt5E`!;?LleqDI+Fmy_*!vrJpI)wQ>qTaQNJF}9LWtr)MwSQ? z!J4MNr^O;#2B{GhVp0&}c8A&?zr}GW;C|1E@Mre(Hc*PzhI$l<<0GeQ+-@-_N4?DK2h*NXn{_ftvLcMo)da*PzBpJiMg=Z8nN!8>5clR)isC~Fxz*5c(oqs2EgdKff5H%WOHQ8~z}0F!<9_jL6H^=WVm0_TqYYx7syZ*#9JqEl?0fOGT!qW-h)?VqkM-;i z;nQAgO+JB-%d!nW)+qcTB|?`w;*#HBDlZ4Y1k~Jb38`MGTBCgxdS7_dn(DVvL9pJ8 zf}}oAC{kI&wrzJe-9<UaBnR{2TTof*Qs9nt7NLlT(gJ4KAdD7ZxBz0Dph zfM{N)so>B24Swi$l6+IWKms55tj+@3^YA#U1#TP@;eaQAn*)xAdo2{iwlWKn^r@9l zKl{3jPTM$~P}iOtub!x--!B!A0hH)(A=+^@SlBORX`!~j!=~jKwItDS(e*`NFKR2p zh*r9Y`WD8ZX85XC!q_JPP3zCAYY16GqP;?Cv3lEt8nZ1L$Vo}rIQKg~Yow9_vfqTV z7BmVe!&#I%DW^ZfH?iXgYhbC`HG0UG z-qB>A7AWIHSaEH#EwlPl9>S^~Ghf~7qmvhbeQ~bf>CS+z>5dnQ5bv;&n1wGyoBkJ{ zp|u`i^dIsyZIcc$>bRPsx(%+qr8{0il-XY8%m&=4+BIV^dyU?7?zS1R>9m|Qp%os) z)-Ez7%EZK|7t)a+efZDms(vPyRKo*}$`l(&DWnFZazzAoQ!*qm!*pv|aru3eAD7eMjiZ~YMb8C~Ci5(Dl=AY7#~+GeSrJ%rRlK-5kpfF!$j-lWoM*>vj8$7!#MHgNyD>B04F?+ELbu zd>r+HvhrJ{t93jOT*71UGD+7~p^iJBITvdsgP<8_q8}II7;<@0tt?4Eg5$`sOw$RF zzb$Xhcf4MEL!O?_ABOzn^SSBPMp_~k^!+54bY*f3x1OhZDB){SPZ6Vx^+K9dOWDY! zu&-~%iJ%nCIUL$c3u-YG1j6MXyNDf_)yX+&hBEABZqc0Rv&pyfd3Fjc;g#+o(REIQcBnn zEjlo)q13bi#NLeBu+^)9n|;`{f1MJ_p^nH(_0x6!u1JM*Dz%|xTR8CdFkANv|1k;> z5^>Wt4n3c^@C=hOkc_R$rz(oa;FEH|;|0ij8_y%^iW*w0bf9)hV+bnAWvSk0b) zXI3Guz!cUPDD+5eQh@R78QwZSJxq@Ry399~j99bdA5iaY!(;Df#6{0U7A_AD!?X1= zv0|ub9sR{&0!Oe1lWkj7CuNolkZCgE2J~f}$RKh`rTbPSykhQVu-u&#n+Pyq@dT>< z?;i(5;RpA=;niW>D~rnnL?3S_m#G$PThu=u^Jc`H_h;h^u7+C;dH|lRc%*oJ6`=zr zl}bOth}}arT2?>Clu|9<;FpnoM?6{Arpb{iqIEUw`YO*lC+exrEx45KbJPNMnqq_L zY%Ms~x5KF*u;tn^MTRkY%RpEQzSQ5;n`zM!oV>|17nM$ss|RO?NcntGgRTUNu2>|` z7xPhH&roD>FSEEiwY{J@=RY@rb=_&~^8)-lp|5)|i(GB6je28zsYWcPNl~~n(wd)* zp9X4FyWI${{+y_{bGO0W5Vmt7SqYr;+^35P(FT&g{4CkyoU2V;mF-j!?uopZ0Rmn<=xfB?DhkkgXtuNN`v{02@v9+1O zqS>aF=XP7-?r90k ziwJaVc<{k(w{oM&Y$O35B8}6*b@=+x%@hyi*5({%x}5rcIqgfQffT_U=&y* z2ykSdo@ZUSZP}SJ*eF$)GooNe+2-|hE?9`gNRnkO>5mL_Q4zi>Me zDx|k6dPJ(rQaCDs5lSt;ULKjyIh>G^ zbXWFVQR{%th8L3dFgWcJr;4v_tK2%z#V2IkSDo> zQ9+;N>0G}c%+&Fsarmd5J)3Dhq|Rmgg16><8KLO+#p?h)gv)Y^c)TeIRpLaQB2~F9 zCcX2(YFXe8hg#_-OE>te!2pQrwbd3};mI3O!;9!}gyo5Z5y^5EU_p3lI-&+U-(Dy$ zo%AbES+SiI^V^k1-jZay#w1p_o-#*(1%2TGVRp-DNEUC|b(3bWHqhbInJsk>99vu) z!XdH|A3=q;MGiL+OF^uQ{*u;2Iv+-7I@~5xd5SxA`mb{OwxW|LNtLxGkTiR60Y~F- zOw)+IZeA;z4kcJd`G9M4NW0obIJx?!bG1@~vAo1E;F;obUKS0EzPaLcV`Yk7nao{0 zB+B>CF3sSi+DKZtljuZ0^E&dpw&juDhB-~GdkcOY7*Rc_DMbdFI&I$2T$79Fpv)jj zgUfEmj%`w3rZz5(O}EB?77`)_IMW>6=gC}#s;Qt;=Q&gcW>tDS#%!BU>3ZPyFEYa6 ziG>$HZhxB_v}dfc_!YlNjBU^8PJNm)-(#Lt24=Oq+rnCXS0k0wh@LmO~gC|xE<&TuD>!mdDanP5U|96>9pQ^r!1OUgy( z$jNm}u6NO?xk8u_Kcr%+tm&)cLyZ)QDS?8CIztnzUbFZ%-qE5Z;Oo!8`xSB%L!YcZ zIiwaHJ45Q?*>W>X`uY+TTnCYo&PN|axkV};2;1$pM8Kvj%Y&wslf;qG3TQLvco2`O zzx>9R)dnL%evjeNbBvO?85%m?(lrQ&O%Wz~^8^%{3AFXiJ4LNdCG};7bLe%nT6OE7 zKHsURPhN_(*2|grazTo_jzzje9XwM>2BT_#T6zO+phty#pl?w`cyPL9iU`ZxoBEE+ zb*~PwM(gpgOSe>9R?OTZu-V4)rsnG-sDjwoXi&YcWsT1kSA`6jycd zTW0ZB-ErE^^0zXYBgzHi-O9~7A-imZw9Ft|*`_`^Wy5g2ge6U^v}V_-~nZi?@g8?)ACa2Kc7 zfRcbDE3Ot<6OCrptwwc!sl?H{qY1neQW}s`6VK+<$4@h5QN0%$o|7$Q!27~>j0zyZ zf{HMvL`ajIy!=UuNjq`@+tVfaq!vSAu@>_qXk^e{1Z@o&Wei0;F zC_$$*gzt9x8Nb*3xFx(h56n4DRJqGjrsODgp*~X0>E!$t&_Y6VY8ykwMVP!kmYu4s zZAX{OLQ1NiAGu_GvDt&!U{l^Qtf~+&@14>!z4mN1=p#O8>{PnhnLF5I#VKN*vz)@h zZi**`NvMzu@YfOE23(;OzRRU|@uJ&RT6J=deMV_;4tZW46?tsk4p{yw1zCue;qV#ntY5}4MgqTEhd+FR5P44p{oXC`vNKR*JLJ}E!o3uJ3Ze3) z8tH1}_X61As0Y@(Q-!wLytOSh?J=$_RrGST89wL96)@1S>g3+@C_ySn3y*Rt5uYiv zG3pm&8Re-*-xFIrOr@CyRevWU`fF|*$h6Tf{LHDxi+7aE@Q8QROX<9Kz73##|NU12 z^+es+?oKdGmQFZ#wKdUa<^(1sOv<2zotY}-DsjB;!ESza#qxzuVf-HjU^2(! zci=XqFD3MVZTi%2(yaDY4MAklmq)tRu7#!yKs(7C3Abj39QB8OHD!%@^iT%@*WE|> zOa!fHL?oy3WZTbK@cO%`#w?wWMp+ECumcc!mh4S)L0t2RLz3r2A{TV2m?r8hAe>-! zHvf&8Lc8Ci(%uu+$dS!qOna9Cz=&-(@xjw&_S#`o+={ddhGE+&mrBwcBiE@K40hW3XG2zC+MR z0_w&4l(%dRLB6VZ%fR`k0M;IX0mgC9fB`2Oz+JS$=zKTHHB2>qC7x}=BCykIv| zQY$S&!C$#xYwi%?&0qWX2Ai?_32Bn~{~f1-yhumvJ)5216$Wg3Qy{d!VkaJAdJAQer9bRIw}#g%o)%8GkS z^2HfFn z*lXZ`J>%!ka7fW5t6z49W_+o<9k7mq$6P@#|8-Uk2U^EffJZR|mrdbr%M5OXBsy6)M>#0P9_&~N z=hxe98PWl$Zi3Hx*XXE^AV;QpwU$r!qp!_j?5jq+B(5f!$dI_|hn7nV)%71A3S+t9 z$RsR%(TZx`>3|k>kTmgn09~%19}Xn$+ZazbdR}QWDjf!{%0d0=%N(^;w`u=;>;4<# zx>o<~n22e1f*aTQaI~+y5TQ*@ty4N`6H)HfRiqVDv0DLm&UXJhaCB^LRd}3P5}@naAxS@FJ`iowV}#!LIqjh#Z)c7UhyD8VoxfgU*oK@4+JpQNoJ7 zEL>qltpVYlWuj$)j_xhYv2FszX?KWSmx-aP-96g|KA??Trt#7CE3NqZqf_@zWORB$ zt=NoopVAEP{P)RIpgp2m%jb#4J%&-??hRKwW#4PKQjqmV9+2rIUc*<$Ms;pRZ~UB| z9i=l8QmQ>4fBdq?ixipfh0K*m`;<)4EniOVs~3z!J);2fk)|>HeW6>L;9S;=?CFwt zwQ>+VH~GD>ZY0ngWXa2|eGg-?u{+Li?*bA8VBnd(PSmP_N-2*X?vSQhm68*u3i-%i zSaDwI0ZAcV;6oF9e$<4;&+&+c6dbf=p26S+8J&hibPlq7GHe)nnG8aEz&|f6_r$~d z_FOUoH<_*cQZhDb@nv|t@YumAGHIdRCgCF4zBuZIH`h4>vnZSF)HSN=Yae;PYJ-(j zmcxU}=V(xJ+ya$#CSS#X)%r*!#hby8lnu_?OSXOcAG0if@|3E^U4|#sgCV#aNQIMV zc5uSSRe6C`e$9p^X+`6TuL;oLRr=-!p0}$}Pv7Ds5QB?TD5PI_m8O!)0_lz-&<|qk z2fMFQcRaZ-6MnwYp>5;>68HBNRlhRKo3{qG$ZJ*2nx-J8!agZ*W^5%J#7dnX30%1X z(#DFI2dex8%dmUPR8Qz{X1`ppTV!37Zp#W{yau zfvMI>^3V7$kv9q$a6fr~J4_t{)){MG)f&WnYu8L=eb=t*4n?urpPMg^c)0W)DH3Cj zyEivg3`1{cz1nxG3JEhx(i|oBt9>9h^{nPoFRJF!akJaL&TuDpgsM`ht*Ls~eU?L9 zF51VtZ=MhDsBNg~S@Z-GhvzQG>;S8nq9lNH>@QmvNQyy+T{``VrkD| z-lV>L)5nH#@&v;Z928%z?;w-5dFrA%S|G7W@ObAK&OGV!0Z$>^jk5C7 z>(_25ukBjuIUdJUjBpObnb_Nu0(pIgKSSUj@@yPTcar^SwD>u(O;Xz$M+oh*e_?z< zJAW0#R}(9!B#cz5D7lM#sKo@l3a~^4x_55|y&F?hGK7S_OX#XUzR&k22hyN)%b|}w zYUrvb?w2zA;HcSAz*Y#!_AsFNkbM?jx5X%(afnGu&R4a7oY$&qw3RHXtQ-Pm@PW@&i*hnASE=M9pe(RDQts0@+fkeKK8WE>g zqacXF^o?ryrxF)mCPe!ScIowKVz6o^AY)41CRt7dXDH%$tW+XQG73rU@J*rG1#);M*qmxGGt{EL757Erj1BjBJ+iN|jV#^Te*_=WDUU{% zk|9qgdX?*j49mkAMiW2wE$12KEaiWmqE2(?QruCMc2@7lP`pX*;ez<>dHjhPL-$Ac zs`Qt3CC-xaQ9ucekgz;;MJta^p_bey2Y?x=C9QYt^fLJd-uL#!lOElTUoh4@s{iK(?*$vQ>ACKsX0{^kBGYdqv)iJ{~HF{Tpcz!Z4guwjAnjl**wQ%2d%vX{+K zMyjk`TX7k7odN;QSJU4-c`IN+Mh$zyvQzE99f*B81NUO{g)%eeAa7pX5K=nb@m>>S z<{_gk3)VrMb=r%b9x+mey=iVoPkzbV6Oe#Ftc)nLvs-yW1v@=R4j)elI~vX9CMIhi zg~n%M(gDj(OzPJLR)^i)6O0kLw;6x-RK4^g73p0pA7JRKyxx*k{ICl5_AkCfxb)y> zy}=*#2_trY0xi_w-$i-%&mOj30Kl9+V(xO(!yYVW$T#W4FlLh3#RtkU2{+z>r%%H- z`U%{8)9POO%ugbZAQPEHn5dTkW zL(r{?!GT%!{a!;2kp3lwgKA#qR6Q53C0kG zK!HDK6n=zB)Py8K+yfp^TF0`$20tP{b<^NUKvp2aYfaxBl8b3XNWn=xhA> zMLD?&eERp#I{v;G^2%NE+HF??XfL}hd?-eTJ4jE?;5%W5sO|<=Gk^?VjPhU}j|v)_ zw?ev<%My?4OeecUgm+B?p9GT<0gIxIpT^g9Ulfa)qn}!sm$r(Eq}NyklTwK3WYywY z>Dbz6<`cO{uGOXR+M4W_+j^4U(;lAob>ETym^yG-NXnq=BW+;pKgj^e@>iMb{!i+_ zg!ruV&&mm@xN%wu+GjtL}?WE@UV*wh&T1u8!6m7J|>WVye626+lRLV z-oVrb848kE!^{Zxy{ZZ2!D66cA?UH-6a0-a018SRN|kKvt}UhdJGXrg@kq~$J0^<= zi3%DT__H|6LEC_=>`jsLSLt3(+ol#;(j5E&_nVfn?ewykma(n$j?os`?R2uL7TK-z zOrXv{T;!R|cuEx>OQP%nYaFe=Q|`MnuJl`|Q@Xx0uHepwn?vSc$CM4)17?5c%3EigQD(QgDmL>@`<}jfd-miX z#KvV`0-3d$wfVK-@U(`iS^10xPz`9^{Xq{7CboV}4h_`a60#)oj!GkmC%MipBhLMF z!(*z&=be}!opiduV~ODg7hu8LRkIdPDyv!s?^DIc7Dd3;;Sh^-i0MbxRP&EQ0D=$VWa?ngU}98Jub-wKbj?ra_5bn^Mp!EIl2lZGV5)lYaoF{89jlYZhqYkMP6Djl4f*ntNE>F{D7Bw3?qAx|1 zoNmFox>6+MtB{iAOR{_ts^TG&<0o_<&JlF_897s{9j{skNw?fU4o!fx>lvZLu5z6CnM38%>GeHv-ocG z(OBNq+F4DS_C``Fv!RN(y6UwyXl|FOV#94AiL2Z`_Sz=v({WsyisNa3nnnoRx~x;C z>4{E#pGH&4dLZR}c+1R$=PA^)6&IBVv}t*Z|Q$?6Pl{!h9nqkIegchj@=fyVKuSuG)~X^zgwlfqy+ zzgaij_SSQD*iQxah3Rl_6uBf`wu>UQ_y}mq_-YPCblhQP{l8uPE{yF1YKX`^B*CgB1vjBt)jvyzjEmVBlt&o3xVPmH z9a2#-TvNf@7%t0e3Uyi1wsiyW7`U34Ba)N4CP;Q>{K&`8b?=A_sPGOHKVMx@irtqa% z3mIjT%F*jAP5^w}W+13ckaNK(O*cLRBJr5CE8aQ9;u>A0Lq%zjXkYMTrX-DyiZxA( znIB%cTkS+?|M|Fs0Y(qFt_1NC5h|vPvv9dx1pe`&UP8DjGSgns}ZZaiKakviLG@if-kue!HD%ZG*-U6cUYtJwk z1{w?g>#L!5l)ABM=344k7F}}VwBFP0neM0=>Y>+~VA;e^h*uTtVlpYD8iVw%P=X{K z)rNU~H9p6+T0hc2ugfDIZMoQ5FC~bloQ}~}8|iCi7~FXY;Q7oYCU9#?BYuXbHhYa( z(KD`bGLtBDtdUr(T_)x3izfphV`zbK=%vSK@)KT~pRJClbEmgi!&<+7n#Gbm(J#X;PBYF}NswbSlf0X3^brSS+6-0x*@yj0egrshDj?^xQ)Sfq{%@@={p1PO%D` z9llb)mR7vGe0g?X-dd5iXu!Ain+=@%eL%}d2g!aI*%V6$LF3xnrK=`V%@-2F~A4`(? z?7Q6za?FtfLB$dN$DwoT{xw86q1Qo~$?}f7kZjNEI29XGYer&5y&!|R7uOgKGcXD} zv)P2%#Er^xBfKq9h)tEMR+hyaW?|Ja1iGP|1BZ7(9ad!PHk1~_rpT1jo%n+1d$OcF zC*Q(&5tp}f6{EK=_C0IFwuLon<$=SmX90mkiNXt9 zpI*3zyr~J?0%l-;mX*l4;B#j!@!xmW{?bcJEDdX4SK__`QoTK`BYd8k;P&0UU#Qee zg!H0~5n1T-F}X1~J%%PR>|TkwLQk&~Oj0Ii4`R#abGi)iWO?nc1_%x)VSsIl-XctlWe*N z(obEQg2EV#PWFE(DB836ZdZ7juy;63ak6FN>8?F z^yi@AK03|10aJJhZgtbAa#0>AS9o^QsqNY}HaAjR5?j?QWXI~xu4-%7*`Fykd^d$m zoBBKh4W2;-JTl$a*rN9`G&bKUOLa^@25leCmFscQQ|QjrCr0?y58)kU4HvYkGcE~g z3%vE`q~+gH)H(0Xrp|2;unEM{Bn|h=mTV*%twXfwIkFU7L7rVIR+mC@N=YN=Mo7so zk;e2`&lSG41*O1b6=jqO@+lvaa34BxUsl$sED&%#JzZ1)ZX!D)wI-R7@`ia%C?X96 z%E77-+YSS)Wk&f$cW{t*A?q~i>t=_=nL3LgEG{G(QzIX@c;eI$<$m{~`ND(EyRmZ} z>Z|8G7TaI}9p%QZVN-A{Weq_|h;#tcV$cz)blXH~>uOsN+iGRn~rIOiY;K`gb?;&&W7(moX- zkeE3ydBEI?H)dV-VA8BFCs-HaaDFY?t$TL4{#PgR7OQX3MNf zfQz?!@QTyIF{u53PgWKa7d0RrX_w58Ie*~1$-wXD@U2TTbJ}!T4T?xN8*A-}JHRSd zaI&k@RD!Hk0KN^yUtmOLWIg(t)pRWZ{;1Py73S=%W%*UFq6dWptW-v%M0}3+m$iG9C&)A)0XWkK z-+iiKpeFm9L%#QcdeIW^d}6%7w~l#aRd8t-I<8f&;PD1V6=ItWgsVC;);O?-&DaI{ zA1Kn-`6-_9)0ADC_ytX!<~2C-9_~_^@`ZH2^=dQRlLj%OT=XyZ(vC{2Hb0hpau!y* zkp<&ElRS1o(uZp7i(i84$X;;#p6GUF<7An&3W*mgRVq7#JysfO8(>nCM0#FP z{RJy!$rMiouC+R&sU~e+%x|9;F&ckV@G?MMS|>L|iNH7fjSAD6K3pm`#=@x1!rE30 zseh|pG*BA*hBW0QqK3t@9zGC#f^i76u{G+4)9An@FvkNnYFuV;?q|el6o^UudL4cN zUN$)cz3}uh#L^z`ZXy;3`^gsF=K9vE4G}!Vn24}jgID_*f(2MnS(tnsriH~No2d0V zgG(HT0lc}0PXA`~W-1G>yMX?$v};L0E`+v2X}LGAiUYR|!@laYv>q)bA#-aO!ur?W@0x|t1F2d)mlgGoPh z>(MO`fHoV!-yT7LQdJDGq&lY<){ECh*(681t};~1ZH^+baCE7yzYhNT&3et|=E^fD zuQz2gXz5#X#__|=y$5Qkl*QH5!E$+4Ndrz^<=U#CNZPCTn|-{LW|a%?Iv#H8-d!YV z&ujQZk2bgRn&0O~&f_2Z-#rDmhGqJ9)-gFE3%#UODwtJlPU^Sy9IM;;vxNF%G?(*6 zkcOi%9nw2ALt%@pTkwA|Z82H@}kKI?6!t7A8i z@O{O%(YEUrjgZZ!ADMgu?{zcF`*7l|vyTwWXJM63cR=arzaj@`$@(Ahq&UsEArJ_Eg}3wVc4HM28y9U^nmZT5 zUeKsv2JCEuI@V3yKg9ydAMk^Je8ji7KH^)He-+5EHFYthb1`*x(PyGJF?9JW#Y`3X z?9rJ}bm2%vg(4DAag`lbR@Xjr_IH8oG=^|g;QV>&7pNtD}C^q)IW#`N2Q_qBr$R6V8>w<$`I%H;%0_a6J zP|i1?xbE*FYXj*$&-cZI6Z`V;=FsAkb|KrON2jrTiU`xPr2MMSwby{bvEyH$-YTqP2hq(%qhb`vYIvyJF#a<@=MT z(RW1M>&qdxo06Gmf3TJ(Kc5)A&8mE;i&1SDXI8n$r&ex|_(ymp=qFWn?bWhV<1+Yo z;>1fhRvPZ{Uj^_`%F3%>{m~6xD|p}Dwq~pBT8>s%wvO%p7}ESrn#n}{Ve*iVg8tto z|NmfK-55UkUO|*#Isc$$5Izi|{%{g%Twx+PVp2i4sjq04LFKh?(-*k?!m3hdlBBs? z=&b2qptBF*@IlS+Y)0);jNER=C(<~I^o{HXB@hu(LyMDN@%Xcj+L}v`|yA~>16Yx?#6$4h8BjJq#QF5TUHEJDG zIdE1IYvHvnmlNY^He=5cbK-93^(AxyC(zIY?Itogq8hZx5*% z$I3Q*HfAQwEq1sOan2AuQamYTOd?PY2StFa{boF74;K(PqEVSC-Eb%lJ48FOSQ3Fr zC$6(6F-@z$pP^PbUW)HrrqYIX;k|28fs)VhCLxED7T9E87QXCV^$M*h0}4g}4)@X1 zK|rjv%_aYR@n0X1AlUzPg#z*_^smc*#}L(2KN6OPYpkogdccE#LL7mEfc*R8&!Vig zB0~Qq;9mvqzeIi56Qtf>;J-!vI}G{P^3Xs27Utiowcmu;6@L8I^HI?MN40;+OZ#tm z&US_lfA9eROw*5l|DP|!|B?K!O2>RC{D;8lUn~3n-i|nb5V`PYk^dQs{r@8#?+@as zKD^`~H2zQVj0}Gb_`i+4+4`%0=fkoLFd!hr{{#3}V*~yg@PBY0qrzV`{r}~ZDHXh2 ze{je?IMjdOv_t)6MgBjT`v3CsPNjuKK8!8*Q3(FP`vm`&Jbiuhe_AK@IWXE1NDvSS zls_}d|BhkyPZ=Tuqy{e^b*P8&-LI6{2K=AKV|%J0r|hj{D1lSo9n+}d@LvbGe7^w)yV&3z}@~0 z<6~{|pE3TpX!xIu!M|gCEFu0g#vkWK|H%-A0R5|hS6}}hkBNVr@cbuZ_OBTK>-qSf p)##7U+W$!#_;8s2ew6sfk|96FY!DES&zSo+$vS-honJbg6p@M*j_Uf-oR0F8^_vAmt>sREj+^t<*ICXVUUm?I( zE$Jcr8D2i|i$AT9@=ev-usLQK9Tx?I zMbY6WFE~kqI=-ldg=)?8)?W40?R0?(JbcFx@{xt^_iH*j zXT|PW(MW+x`;7V=CN-fMxowytI3;7lKj=EB*Y}#^7s#dNEzwQV(QBsk@jY|jaxKyg zJ~pyKoFOqZ|C~h5L~TA{ z^2A4Z5_1V|J1~}dW!lVZ4{mzbIK1<LRth7cP@f* zWMTEuqH>a_s!v`I2{9{XR)|Lz(mUj%hY){cb@>QycF&h#WcXZD(w0U#PUQa7BI-D2ngF)w{9mLE@?By!U(a4w~}BvGylSDo=dzL zn;tCrsK*!le7gNicE~02>-t(cls#wkU|aJyb2@8RbTIyv4vs={o=k8@_PMN(5;C`F z23b9)Bjse)MiAR|x9@dmu(R1RO28(~+DV0}<5uXGAL}$<6PHS_Eo_g}n?*87&&>$D zTOEMU!0XHj^Uj5p-#>pb2JgwTNJWQNoz(hHI`#@B++Zz zaHnCnXCrCLKF$6xg-r|%tf;|e=)Yc$)I-%pQH#SQZAO3``hkyD&6@>t1ml>Ii5m&N47vOcueMSLMsB11zOpUDETa9J*RN-IdzLNy~Y|ZxBtP zw{syd13%4K2YB77GCB2wZ+w12&vZ4Kxv7a@o8a}AquTmU1}Vf~8{edqB}lT&&VYKq z*VN=SqI{z^yl_NPqfp5-*|t_=Dq;9f`ha+%Sv69ync8P_lmVIYUW?xu%=bPg8ZO2& zOPXuf6xcKIW&O(9D=WGmzU!qfRZ$LBGDd>G_@#W}Pc7djDmMFR%QeSKW2GG%6SIKc zc76rg=}2Q|nke3XuegQfjlf??tq}G6LB3J(dgIscTptQ~Ub&r-l+lNc{JDUc~5> zr#E8@$5JXsb_K{MBvJ2#;u*+WS3V9N0iQkTHgXx;f4#gld`pMN@cxm`DkVD?vgWz!4f7ew|$b50b&R5ETk_a6-8Urn@sNE?7X^;bh@ z#7C=&$Z{PVbN7jNQ6`nU3`2_-TUS?Cjuq!GKeqSH|F!(Q5#X1Q{ga$j{v;=se@;&S z4@GHD&(wijm-zcP&krfz24-YWyd?xY zKn$IOcd!=QVjs(D8r)C%G5T_{sWcB{#WSJ>AMU=RJ09!>7W63EbAq5Uc*tU9=5+5OKi(_pYpJB$ zzDCg)nIsRpsq3&utioGuWjkyNNEuy!ddFMf{TkUjCz8}2xjM(`%})u>1&~Y01jOLC zT@y|(=^kl(HJ8mJrXin$KDLo41`ZRQ6+Z&RUTLxN|(qVl@|T|@pB!&v-MpIT}a z@n!B3;;RD8!2HZtS;3%Qb2`kdUToW$kJiozEslo|n8U7i7ePnC+5%JorD8o1ANs#- zoF_NyK-L*;6Tcrx4-KxMkDzu83ZTG|Nh^HlZf_OOH@v)QIPo%h^H4lJ? zR;I^-jy-_|f5#<2pK6F}>kjkK(^=$7jI*Eaguc!Nfj(C2D^Dwonr&+IM0{D1>6{OA zU&a)}1Yxp%0e##ZZcCvVk6~-oNFy}bC4Xb5?McRi>1`9inZ}CC2<|co9xqZ8J+-z*A?1v?H)t5VW(T=l9<~;9y)d}eSVSB$CQi@x` zOg^V`(3+T^UdgTQLK&BGzrR{V!F!LUC~p@dQW^6V@TKc7dD@>%`5s zi1j6SB0W6m%BmW?_KN-#o%pkmT$okQTSvV_IsRY^xQflMli)b_Jf3>P72nT0*`}nzttE|U)1lwDd+m3c}Io8wM0tEV? zltk`wX)zbyWRV_4!4I}{N>erwReX0-ukEF;4J|)%A#1?vFv?Q-vc(%sXFA3nEXT6R z|31qb-leK$>!U!O(GEfBB~D+nPXP@|u*1<3@WwI9O^1pW{!vVOr;%|klzQYDs%&k> zD9JD~hgc5MasI#%S#>&0q`F4cUf8YZCsevbTjG-7dnKtpJ@G{8TDRA-ps1L%$rzT& z=TS$As2FTfQeNJ-UoyAvS7M?I;oyu(C!4(OhqVP?cVCA_wCe33lT!>`kh<%yR(#ym zfiA3m8d+7tMI4CF??Vt^xS;A=Igb@Gz8ZW!dRblBcRFm|l$!};t=qA818r>(n>AT* zKUN{c3F$-}#BT+E;!8;|7x-}%)4$$EI~mtG^^h!)saQo#brQ@cdm|d(vZH*n9`aZ( zZ9;zWRlG(xo%8Gh3kf4mv{r?j>4SH|1T8K$7T)zTrIDe&1;b}i56bVyM=}#s>TmLX zJluQ_?fsiG{$CPhv#Raw@Q3I*{TY=1oTtpq?f;2D{Y98LPP8+#Gt=$uu}zZAQ!}?A z8JQ_srw{{^?TqwfqrN{Z+Q(&(VLq01UfgVnaGEvi-Wm-dMMwsLmR4izI1O8LrzVCC zS@z9(Z^jY4|31l@Ud=P{qVH1VyX((L|wF+t`X$1Bo85 zCbsL#o+>NMyScYKxkN?hp93${b-VBVZ0>S4B!jQ3-}L<2-JhKdeWD6-d)nP9Qg@N` z`*k@wxnt8ykxLQ$>)|?dI_PQZxwof1@Nsdzw>Nml=Wh7E);#!rt$*wIazp*rHQ?g- zZ7w+kUEQq9zeC*G@bqRNUEgU#rA0jWy18FG*rYZqihjLE|1{3fDK7nY`E>N}G5P@P z=$t~gx%JQ+0vK&z%t|LC_WLDfnS)8t!MvzFa=lF&caBI@ee437W#k8{gIk+Urz^6d z{$aBn9y0BGTh{lL7)+Db!bxgQI`lJKjm)U^=4#Xs!f3_CS4YIhoWF_$3@wRbDF~J>!-r+OYa=8Qgcm5bL>%xV5B5EY(zT^*G$>a!UIk+BL6EU z;@2E>a#fR?&8n1ll3zorc3aN53%`1Jbqo6IDEatmE)wP2K5kNfXOi!G-0d~*-wh$@ z9+ucbXL!ZI@|#|#o4K&VRxHv6%EtithwsRKA*e$tKh}(D$RQ zhDGF}JP}g%En>v2_nT5oXEwuE*rG8lz~UAJk1!D)eb{o%babQtMs_c~GXqVQ$sivZxKunTn?t@3_a;=m zuakR08A3Q%FDwpWzU3{?UdLe)4SKep%3x=-ZM&6Tl0q%v5EVVM_HLInY#aADNzD>; z7@&M?$vw1%O$MgeqkUFyz|-L31u3!#5!bVVWQMvnqdE}__q6s|@O>QNT*Q3$n*xH2 z=G|uRXk%kE_W458C&h}vGg#;n)9k!gF??gvMltJz zBfEY@Za)~FPyA!YwIVl+Z8w@s8tyYji=`dXsX*PP9-k!_Oq%Wcw^d6J#$d&Wp<%Tp zw3`1UCFp#FT?%;%)fwBn0ymKZUp4st2o(>Qg|JiETo%1hb6o(}M(!FzvT z9OTWB7T2gbl%DwT+2=jwjhNyjeDhza(d61IL?}lIZx@7BOu2e# zgJz83!I<5S`1v!0yCdZZ&wB7~qFHU^3$JXY8=tI~p@JU%4UO6u-8R3Xqip8Oog*>y zE;KF_TXNr**NT;AHlgj}bPycxq-@>ex&J^PB?Xkqx`OP89*&DdqJHDVdU8mU(4Sg1 z8?VG_XGN|n_9ppIBF}TUSay9AdArZ_tA*FWxz?!P1;Z78tP%Q<_#W_* znX^)!n=@L7Tg_2bL|DMYT4a}h6v~sWfM}23SSr2gy>0Xb9_PkDg8(1MSPL(F+}2er zkL2B~)m?w5RQWhI7$}0gAu{$!nnG3T?3E*)#e=jZ|CdaPkOG#u#AhXnD)i`5Sbi=k zjpr7NYrzg`eU!b_XPh)H2kfS`kUlH=+v6N_ogrN7SS&t?J(SY#Yjm^LA(USN_~OQJI= zp!!=BP%U?^)?E?2{f~>KRTd@LfZtql`MMplJ&qWhdnpAm<^}@n4Lsm zT;3DhtUnP0iz-*Om2QpZ|xu)|cmY7jDdIT#ZE4_s2v`QHR97pt=qdZkkhj zF=YIy75I#`0)l)dNw_VWK_C!4vBk)XI9B?OyGi`Gn=r@lUK{z}H=R3QTYI0bAAX&f ze}6in`qy7Yr{u$QBrP?}4RUO7fK7{Os{hjU;m#7eI219QVGn-y+e%`*^9C`oRyIAN< zieVOLrliCM^rNrZL>F51YGIym`xkwQGY(@Rv&T#-$t{h6uco09N&*y^nHnf$xY?xR9PD^#itQN)+s6y-;OV&zG&r8m% zn))>_LW;7v(_p2DI%s7oT^UzEG$E$h7LD!kRXv3dbxgJLa_JLS(qkNte@rlEUcBaT zzXUGD5Gn?o1qzY+YPV?Fhk56`KytAcrXwyF#+~4Q4Wl7-N`lZI!YKN`A&oZw6vn?v zuDTk0%}cq+|D*9aX0D?L_iqd^>I%QE z@XuQ0{ybX$BZ&q5PwV;9_07!9%GB4I_`_Gd|DCTQrG!(nqSGP}*PU6vf8X$$2l`QZ z;>xZh*0!R|^&yp^Y`-Kk^H(H%V#|x|D@Q^Nf0B;7aymOsg|8BEK4$oY-*PCg;wVfYVaRyL*w)}K>T_{ZVR`q4MA!c&7PB0`&C-0nI$ zQOI$4$x1zdlWjbH&(FfwyJ`lVwZXE)%;2Y%)F_$QRHxV&x>#SOec*Sx5?NKIA9OJ+GvtNvHD0Xzjl=*mMb+DLe!^N#Zsn4 z(FrKkeLa#*{6D=}V(z*Wx}ef~(4_dD^ZD}Bva8qVJRmH=WjqJxVp{iM(pi^zu7QAn z;81QwIR@>v(6K~L&QJ5~C$O8z*iI8{mRjBPS%-5>&q$|~$eNX%eiJ<37pH=t$u3R&y1S~r`%m@%7TJlzcX)c zF^{fd+yhjUn6(;mzaB6S=&<%@pCokMUA+#Uv+;51@I1r$4Fbc|^R@+5b(Bdjzaxd0 zX`4~yUj~?xa_BOBo_^BCKYj>U(w)qG+D(*xR`3b#mUasaYS*OIuD}qzcPTa#bFEnx9gs zRypLGu2FtENLK~@cCO0aF3BuYmz{k{rLm%f-p@|2xk-n!y1N&^IavXLoRk$Ur-N9!*}ick_Vg4i5a{L-q#c8mnZw5#Q=>_1W8mqy-n>AGP%I)2*M` zSQp0s>JZ?2eUA*ObpAuXY-{kG?fLJ)c?lc;^ ztB6Zf;~dFi_f<<#d|xwL+lV-GFnlyjFiPVYeO>nfYl-C{i9FXgHSd;Siu7{TC%glP z3S6lxHe4GI1|?kIsbwJez7IItp>if%M>NWSZbT~+7%fFc!*|o1*vJ)_CEYd_D%OrX z8fsSQTRm4X&9XKG>7^bM7)r8^-9%UA5$yTNElo$Gbr-g%m6#=!>y(eGH#fYiov6?w zJWexQ9HZ&xsWdXI(g*3Xs@#lK*;|*JW$0G3&9gREr0w}x>oqq>|D~*VoR*<%*t(&U zi?84>-C>%SAp7xPa8dbzTkg1V&dV+=pqIujiTSGV^N;AZ_d~POKFX=ea(e;3r5qPA zbAuVQXvWWJ;ul!8T4oN*je2=+th3FlhMyel6`nW9KdVAGkt)RQZL7ttu>{n&(rkPk z`tqLGc*L)XsmLpuYs}ee;!;}&gktgv3dA&ilT$^#Ar$Xlg8@So9H<_S=DS#0#hC5r z!}xu|#wv^uepTg+@p}0vj06-=Vs|7lD zyI((;OX5Yl$fft`I7{-l_hmn;_DnM1y8oirnXWbPsr%=xV6daf)7qa-$a8B0dQ`>r z%SEr(@SpVGH`e>RBx9AT=ciWn)Yq2h9h-}&e%9Ca9|H&~Ud!1_)f;IP5~fzXwzijQ zH_|8|)L{v4&`+HFc#3YN^0pIh1!aWt-L`qMxwJWRvPqg^8eiJmG>o)|)ZNsX)UMRe zRkgncCC!)kemewB4vKN?xd|tv_-2pocv9N@XiFYm6m#CcqHg^m1RX9A!#Z%IODOUs zAK&qzWc+dHa*2b0fkfKGXKOvPnja@+9O{iEgV90k!mgJJ9JL4ADcPdhj9P=Olq^w! zqwm4yC9(|)Km}X{n7q3@jr^S4CpjLuZCU0t{G?Y&r-{pn1BrEs*@;n!o{8^$9yvK~ zVls!Q<`?s`q&|dFB7OMLmNnchc7Aa6Hh}>tZ)PDsL&_tR5a|q4gjk2flkd=KW@S^` zkMxG;&}n8kzg9{z)IL-SDHOAXxYvmViG81~hYiB!%qGu9$#%y&#ahZ5RGAtx8QmY< z9$gz<5}g&D6df5I812DA03_Qc1`=-L0dcmmfEe4TK;&&i`0YVLt z8Zq@qsss!yFG(CQ1xOqP3_x}3@5B3)L%onrFa?NzMZ_V={Q1iULy*{lYwmq&pKT~N zl4acb$F~U^JX7zx`Vd3GNGTW$WB~4E`?|^ZoqZei{qM{AI6}W6`D5IXp7JicxPI9T z>$?!La~<1k>LUqlL_)^IA|V$L0=n|qynXPGCm|PXvvHl;?CN_R8iSOMQA4)Q_3&XG zUjXBrbe*To$#rEjx=%OM0f`^!<;%MiDTYwh2q*?Wi4D(pt52g`DO>yfD18U9TqJfp zW4k?l2%!uNs;Ze8e1disKQ8R-=EjPfmB>G_+`#wCWtiN@-`f2JC0Cas9L*%w*%q0P-waWsZ8LkTCG7 zf-)uqJitxR0)$TNtA|znP`NZYg%~x$6!uaBKr^v3Hw;)j7;KX|XdA;#h`^3y zh|xj}N3q^M$g2%p(1*dL8C>WCez5tsr-8i~aWJ@;P{K$yEMsyUa>sZh@}rPDEkc%VONB$*9R>x1sxii}MwnBKDB*`eR`zrBWdSG0)zzj(JC}Kt zf>6~oRfQ}yfgGofWzXg#5bt-;ryN$H8$Duc-c?Yn>W3^!0r9=bwVReB%X$4EmYc?M z2x=XPJ?{jlZ4xCbNdV7@VN@K=(rIWEaT%kD#EN$r6gA16wIjfI8i|Aavnp_AKXK9` ziq6_b2fM*_T# zpw)?n3@6^<(q@=Rmao9I6X7yjlcANz94KWHl*R6SzWFw^x^5AaHR+v2At3tNCF`4j zzvJENX-d8RvWv$%hT($Ne&(!s0UO7u)$B%LP7NlLx^`QS(fx)=f~-n`uf^zCPUNd1 z?Vn`)!@aT2C`5!jJr2(`EIq#MM@*__84FM#2J{USl=V|&bqd@&p2u9R@VR|X<_wR= z${`mK^>jNto>?f!knxBl#A45h#EI-ft0rF(s`4^8uA8uE??;PF!@8y*6vEInIIJ77 zsPD&(l)wt4cofzVz4<`A$)#hF-~TqU&%LqBlX#zN-6E`CF;eq~mPJ!Pab!K#4*8HM zz!iE}H)WC5Z_oI_HL>+`-FJ)7e!9p}EN${q5r7BuoU7lWl zd9QBGBB9^RFc&5f`Hc0Ifa0}Y+7KtYu+Rrp9)wisD@mU?eh@s!3)CpT`kf7JWJ) z2fL|Jdz1Ik;hfS`r>ENyizGouJBBOCA0P^Lil?jaF3-F*WZW@bn8U+#zkT9^;ahaPCs)JWhGlE;xK_F~ z$GB-kXRIs9(LNfMxK-mW&#HCoxIwxb$BrpO8^AMTm&Y(jAG*!6Xq`FknNGsNYNFRs z7)CCn&uKD)9LKW>`t&v9=`*|x4}yEQvkl&0X2z(A=gp6SbXFLdE91ybHB z9H&no=g>E)=`isq$9WW6^=rCzM@CK%M>`W2@zL@1ylFdkmoeIOAKz=401un}L_Fpm zvuqkBPN=8c5gYQyxFz1_UboDHp(G?x;)yZ%dtMzL&rTL~D!fU^HLKaIGcQP2a7mD$ z3>1I#)$zZ%C*I_qu}mC>3-6cHVPsyDC~l4yz@YQg(_j`BdE+9Mpfe3>}tF=ts*E6YwX$5;}I9*l7dA zO@uRD)SFMKfRtUtutp~-ZNy4_0JqS6?r)Y6!`9&$l#OB_|A{N;3HP*R&oF#KIDAr` zsL+FnrtR7MP+7IQXx^O!MZmew`fb~R+r~}>48h`iS-CoI-o6BTfYGDQPtTj9T@TfQ zMs=dRAc;kZ=Rl+f&)cK@`EO;Bd8|SUOgA?qhrA;;b<@~+W)j_jj}JGu!mL2nY&?bQ zwoSL$on(|FH#w0_x5*-Td8w_AVa$Xq%2qMw@tFt5OT*2^8Jnb+Ki-;H)KBB)l}L;S zK%S~D4UZbfY&xbf^6+>jBpL%iPZL)eC%n@(J<}R_783n|YS9#ru{R`pysI{`(*}8N z5<7tm4}crt60T93hH3n~a*6o>fyb&_!-K|E8&#|=Y*(YLx@nxeB8kob@2AtNm6OJ4 zo8D-g-}WM>}6 zr?7*XY4N-#i8le6hkjtoKIwIkLtZXN%7sV|N{ZL#Z^-n)HC4k>{f zhle0np%{?VvAmCeMc_)eY5S!m2TKw-pHq@%Vc%xW@E!ln&QRkiW`Y7)MwJGk)9~!Xh*~n zu|jMbZFsJNn`U30Ly+NAr86UL(A&vf>E9Q?7DF81_~E`HI3uoOv=O)(W4tZ)9ijPo ziap0utgCpEC_w#9#xLv!~pL*o>maxu)4yaMUxvuIB=p3_L`k z;^5IT$N}P^xZM1H#BR$pQ3`B?=CKB(^=v@jfte@^oPJt)Sw~%B5^fft=KwUS9*31i zPyRGP2-}Sdm<20{>c>%_iI*pjX(MrC-S!^HiR%0WhKZ9vq8f1yXr>fQqS}bv#I|Jz zMwvdiDlSti5xR+N0|uI-GNVp_m<;Cedx=ZuHys1IJOO zXbv<>@>MYgRQ1%rqF4xq`T*e(Xlm0{#UhK?9MB29GxX(OeUJ3 zN8B>Nh#`y^63RcDV>R&^%8OQH$uJ0r&%otn069*yL9e*+fDA)#Xt8VvC}sJ;cBJ!c zrM`d|D1^HjXf)ge7A~9NFEs|-Ln*n%E&T>>;*aonX+fX~-}($_5BD>$!t!w#BZ`EW zmkH!L@g0he_5}1EI*nc-ZDjwGqT`{@+{i%h;r39OF}ZMYZZe?2kYn@;c_Tf@Ya$cc zoJ_%;Zk2%3NC&c;Xn^i0_3K;hNCgPx3+^vXVP|;5jW5SyHH!lOy7Q)44p#j?r<6x1&jbOk~ASjQ=#4W5~pTlNREpdk!VpWLqW<(0Il(WKGP|t8z89fz0D<3kQ z0S?)2Q`Y;a^UhgJ_b3=W)enqX{Tdb?c7iI1yU#G80?n&?Yf(^T zWUUF?4x>j^!GqAVsT(BK@mT1>?2o`>nPJVSC%8+Dj*83BI%W$G7$mHEv_DG5Jk1Gp z%{nWwj@LpTb{-~V}C8T-KUn+o|+N{ZaiGX(i0#T9C3TMZ=5+7HhAu%+yfkC#}cI zQ+46Q1_5i+vBy+QW-@Jo5=XTanz2ep)fGsCxV6FO>ahrVQ`vAY) zdEPv;pX6%7$?kl624h#Lc*0>a6iDUS0O2%ukv+iJN~#C*fi}m81j}A>Y^(BG!0O(- zL>|^poxOSPym4lIt(sDklr8;orS@ER+C9)%b?O@P2d(qcb(QwKo=fxJFSZ)WX*?w+ zyi_KbywNyd<1{kks%< zTNjTprOn#MH@%E!4)tzw8&x!E*|C$M*)=JPrrm98u%zfPR4!OYkLjctHY8~a*SbsX z6^_xTov;Y%P#%D|bw?l3M_HOTF?$i~gkf{PnW zts}?O>WR}BSE?HnT1Z%)bks%wW0Y!pO^69&hG~8#dhIk!74DuT^1n5{|bRki!;W%^Y#@9vb2_kAY>>kVc#ZX+J;2p=!pUyk5$(Xvm{#&Y-$p-m-c~B<_|} zg%4uSHoshaC8o_^Y6tiVE#TI+gbX>x36py8@7ukhMU=GY8G?@sCyn7FbZ}e}DtA-f zX&NGlBND1~Q``9{kC#&_BFOJ-x4zU?RFcIWdDciavsBj1#R$;<;Y z5kH5W$x>Tky_{v`kU-oSsVJZKJ~kp6XT6GL>kwsJH|Z1KorB3@Td8J|o9Pa}Gs=+2 zq?gg*`eIv|o4LGz{1Sq6D@-|gdLOE9= zc%BT$fVqq+UOAhJDKfx(MkTMhP0tj3&YsA;7wB+eA*1A)q0wvquxY`rWZtMOPR%&S zTjb13Uu-R%c+ z@|?(EPHNrW-#$O<6a0GA#n)9-8D-d>vt9ev*5o9(JXg&-DI^k^u@f8876Q-m_H9h` zHhk3Qw!((CI&eq`8V!pxTQut?l3Le2%UQ;}s>kC^hDY7!HWE&AD_M}NJl#u<$VC>^ z4hR9SFziDOK)`k|t{&9DZd(}7C3D$sn;p+3%V&7N4DSiBU|9rKWiaEeUl}08Ggj@g z2d04+8J3{K>bO1{Lq>QffG#UwH#l9@k*j`upp)S{?^O>NDd7D!KVEItG6>iS76hDf zT_u4NR8IwGqtpQbw!l`fxCtkUC5k@Y5N-{%iOR4FuII4VpRD}>SAzd413ax7V6m-& zhYTR+n+=mOIR*qcZIj_`xOZd;Ic`URFH~b`vSUfY)=+p+KVa`;J2S@8#xkzIYfB1> zRhmJ-L%~B$K@r3O;eha*8P=tt0I0O>kPfUm%r5LGY%h!;EG>*aOaV0k$LLbXf@6yV z_ASge%pc_*_ndZJ9jbl^e*LF}Afv>fyul^Jqoff7_=MGE#;4G2s-GdLeeno8K;^_X zqpPA3lk!Qv#J6DHvXkZ%lktJn#a^;(@xxBSoWjn+1W`nNTG6|dmX&;BE(NwIV5Bf3 zm~>gz(6z%nJROw`g$y+dr50NNuLZjWS7NK{Z7lAx>axtT$^*16uP)&d$AW0f z6y~434u63s@isv5A@)*u3mujoh7cASCL`G)M%?eqWwgl(`x@qgvVq$}OU^(}9Webn zk(iwu-;%BdLd?pow9Np{qIXUr=5z}kh{NsYk^&YE@PUtTQ|L<-ClcFKffWPQxI|l| zUUkRrQzX0d(WCbO*uZpZzgw9d}0zD)`Cr-!IiCAJZ> z(&wi5m~FoX6VdA_RY44B>-B)41Fnv#K8o8GUCSdQ}2?h?6+ ztzK=L9?VFen{2>x)il71JFU!?@<4l@(q<0q8Gr|ea9NplA1&#;N~d@Fx0DTn}YA@zNbID+m-=;!+nVr$MvVb zQ*uo9kpngk5Q3|5Yn8hgt~h;6fms9I-~>7e$TH^@cpwn%o^?Y10S`+lfc~m^KoECB zIe^j4U^{By9=B-WDg`kNhq#(ce#<_515(#L6zaVCZO^ERI?}au$s+yXC^m4wyjr8QL2rHZ30wi+Y$0+8fsF zcm;e2lUT(i6t;t=&LmcTnF`Yi7bKYaq89FgkHtc+^`NyC0+R?&z?aZluAN%nyShXw zdz?$n_;h^2sA|oYkTFsYs~FdWcB#AF z)N7rZJif58Jxv?U_N_+k^7Xb#$+S+FbJe}l1LM!@JF`eEf*aa45M!(Mk^!A*9gzKd zd#mHAg@J|8@{IC9+qPC)zA06*Z#KiRrAKf@!@E=R*AL1?d!#E{FMiw~Hn)2YBY&d+ zdyqF!$q9vMF1x=ly^;6l8VaoxCqCg8@riu4v$fBAHNuJ%)i?=0V{EAva=pSZQse#X zR;l^1B@a!Tx1+K!2gJfZFU3p2Kd;1_!#^*_tD9NA6syh~G>I7W-e?V)e~x;bn|O|T zlv{U>dXgJ;4nEBNd=7584bz(aESb@uHG5p4xJ-59)ezuT$&z}Sh{pNc3KSrkK8OkT zWP>Wx^Io}9>oLh!ch&xMD55F#8f?02I|rZShMg0F;`3)`RsSSZlI}Tw`vZrYDrvd9I?yDhK&9#95MnF zCT+I@lz9Dgg6cp)#1e&Z%(mjMtIP6l@tbl#EosmAp&6-5oiN*r%TQOU6c2XrS+!!gF1&pPc-Bvjj`zd(k)r`O#*Y07Lv9vBPoB zHgV4(|8!zaXNfDBy73OcjPyKh%dEXwKNGdp?!>c{-PA?-tJ0ilr)N5yDV~!mABM&CgBYZ%^H)Xp9SYZxE0aGZB!o90eduRHF#YQ$?DrvHbj z<+QEfc$MSDs-sIsoI^)o$(j)F2zhn!@w%rwXYjSBTk3YE{rKolGtr|lhp_GfjPI66 zWX-L)femBM~HwX|ie5-&3E@%@U}TJMXmBOiBta9l1Rdwd8CIJMoOE0q8QW(d{B z{RSXF)j-=Q#ARHVK#?@C$2XyzBaG5KZa3Rl-i#^WqcGkQc^A~PBRB0H@w%)>-*5}HIZP7_*-pccq>39UB7XtZd>ST?qNmMLVK zO&!F56vI1cxP7qgc!hqYa>dFmo1NlHxUgWb(ftx)R?@@1w@)cJzi{7m`*`%z9U}ld;*L6=lE;~FZ1Zfc5N4`e2NL@+=6^~iv0PVcFiXZ zyLvp3LIw4{!LocUSQH~o`J%Pf(Mv_z;i=N-Y^TaSN0S@sk-k-dV_*02xsV3e^F^1k zjg6OM?(U>bA0>e+j9F2!V0p!CR;`IBZ97jE`3lcFka5<~bL*83dfsKsJFXP<0r)^_DX^;=Z$eDQ9SAv?v#EVI<};=;Y(qtSZ{t%^o29M$2&r))Jw zY%G`Vz*a4tukY}r&SjEqymD0SYq=pZ-Zm(}918fTp-V^D`l&~e&k)c!X>Z=r!9qxR z4^|w;9W`LcnPjvq2h^BE8D!XY{prxY^lyB}-8ON{6*B*2Wb!tMmIJ?+pchYOuuh{% zg=SBEXlIb)bkKEzC_%PUN54#F)?jxNE(w(gJxQ9wINCtZiHB(1x?#(w7&_wnr2FFQ z!p0Z0jONt$0iNUAS8!{VuR*VPK*%#Y%@i&1Q;f{-kHcKUeh=JlV}*VJ2x)$C^NdyJ zamT&#pgte}p-ZsQ+G|5M6o8?0KF!0hweu9zGfxs4AN!;0K8h*qWScpd`MZ@hZN_`m zCda*BCAKOZ*i6PhHB5_zjqz0V7Y9B1@|IF9n`b)=0(q$HCj=PAmB#gBt$rQlKM(9v z1<-g@DNqOg5<3fPd`DH`>ub?-$Bjf%1W@Wp2S)$+CS(~xl)dH9$v8D$JrtsZCngGs z{*;%VWm|VF&1~KwVx&tp^}yS+>dFDV@`~FJ*t1%(2P_(U&02X0o8e8q&TLf8@T;_1 zls(z0p9=J?f0>@Pe$)}2zq2lkzEzHK(97Z7VuM1!X#>m5k1L->Ewb? zNta-!-8FEc!hXyulz_ltQ2jgq#1FV4H@VJI)SS%LpRG5BGd7TEZ2AK-v@Wyh!les6 zu%X0#+_O{d_X91#(!3mMSWsC{Q0?z+#*ri1rxkgnmF2lP8u6j27P7k(tkoTELRCl} z*4D}0OQm9#iPy>Z-;v!#zncao*YK;xX4(_tkkSLN{isMPplqSyL&%JJ8z>L z${fY+ga7Shr+_O*(3;_^SD8uwi<6yJ|8%nRtUK5s^&mr_>%dEL8l#^J>o0+^BlPDT zzatd8BSeEsp7fR)5fxV&g<27XI}BSIm3skK8kIWrEvYhU!GH!3+&pA-V&qWWT=Vy9 zsX1uLtNPZ<^lL}Y(&Wj9xY6^T4atowisAd|r%1ZUF)p#`K=XUA9LWeOo2etBNr-}x z!--i1I=fF*`QBng`myntxt&k*c^ZLgb8x>)@WXE7%=HV`ugkP}$eZ3DTjm0TF9(g4 zL^Ev2!t{nE@$&-?c%HmB;OIW6m9jvYfmXF$ONMv1KQH-fBb6Mjl6gCo@gH|vLOBd7 zD|S{SDqh0!DUh8C?gkIP7%rKC*yf@3Y78F)>_5hnw zpdqDsms#VAqzJ6?gsX+!j_E@CkBgXUs9@aXJtCFOQk(rJ_P(Q=?ve&t%`QRp;@@S#H3}UWY)5I^r67&FsPMs{Cs8i1yv4 z2yC%vS>2yIDrO}4=ckhR4rjj8{2`jorOI8T(=ts{Zq@q%P*q-j7iBh0j+<=NT5U~L zSA@NOAIdiv(-J|GSw5>tfmYVkssiS*T6UisqV!|OX|*fnJL4r}%YHxoAjn6tzU*tC zq>5Y3bdgFU+7q;OBBrl%G3%k1`zLqWM0SFoJGmqD${b>Zq(gVbkvDvq(vPB|T4XzR z=MSZ4C|Y##KK>>(-Cfgz%2NF*QAoOIcr(ide{8Zr0g$0+I3ZU)d^<)yF!HA@=1*Gw@w`U>T+?UT0dkpX&fRSW8+cdoC`5I8*CxfRyXDK(%TfF z2u#+a1|6~mFXR<1GnC}CaV7TT zar;OfUQb-A8u^1(A21kgcONDG4=d-?9tp#A>zT>Kwv&l%t243fbZpyvW81c^j&0kT zSRLEnbFhEHKB=qD>tt2cTDPORH{7MJ@VY}fsNkHk+4jo;)X>sUJOKEidx-(n#^$rk zsqa^e?mGv~%Wi5KGeUpF-dtEQuhl4l5CQA)6yba$f48A?l%{w__;0YLHSB%PN84(* z=sZcfc{uE4r9(-l&&Jn^Y`kh0QK6_BQzhK2Ca+Yv)sAhof1V%a z6~*VN8aQ@;eKc?`<2#JhvRg899RfHUC*EHvM}9I&pW0+$xW)iV`p4M`bJ2^7BD<78 zJ7Dm6`~fBUsFsW`o~~$t`f<xQYUKB(yAR5B4aU{x95a4L%S^&{BZ1ag zEXUY}%Ul;Ryg%1Oo?+DOV?mrhT~piPrBC5Ne?Ghwkzw;h)a?q#=ZPY@L6JJZ1o4$q z`TbxAyRcRv^B!q@JpZ3?Cm6rq3RvE_in>aS;#`E7!Qj51>!`ScKO*kKbN8THxtzxjA>$I*y>M( zibMe;OHrgH1t#G@&iXWySIT5kTT?Y;>184@Rh+7->er+~i|Ejbc^Xm;&n|elIFi#T zf$9l31+q@~qZw=d_3hKr8lA0EzlR&)&b=rnTy_vF2C^aaI$hxL2Uhp+_z4Q%6CccW ziA!bo&r3*ZaqDRog1@=GiO7npHh9B&m8>ZhxIyB>D1fw)KdrkXdS{_l*_Ng{Zb*jl zz-+$ylv|BUg8%g(Yo91(2byL%U}cRMz^Q{pdhTHe@C$R=LNf2j5Te5B3kCoP7u@G= zWCMH+m^**fm(;k#&16I?mP0_wf|yTH@zo%8#PB&HoxwKbgxu31~{Jf;z@l|x&(tQ88Aid&EA9;A;kOvQ^Ry>zC755v0TM~l0Ug^t@ z;Y`XR0=~KH#t$ZGCkX?!b8=M57r8oDmowb*5~d)a_SZM7H+%2={RYr)Z5jq z#n9}X6?xt3u65wvo?o%PG|NU^k`o6wq7_U`x&*=wYN5=f+KczQEwG3GZ$5LIM1z?_ zpG0F>&8FVfP;o22T7+4{TYFpRk)bD9Q!p~ggkRXb9eZ2jF#X@!Xk45y(p3l$EV{?p zPEtO@Ls$F9#vMWFkr9llg&zrzTxf=(m*9#6h@rfoZgrrxA4F0;%BB>nyF7dr6I$mG z&4w(!M9``U%U_@H-Kr-pxY&A|hiau%#_fUF{c})o8_8b1Go1p|U}bszqs=8;Q-- zUZfSKeTkA1B1$w;WQ7`^{S{!%DCiJyjg!0Yn`7Q(g?yL+^_ngn{h!lA+?vJ&t7fbz z*lJ4+`cLd#iOwZN_)F})GqR$P-~*-WBVseB{gL-|qVJyL>o3Z3Y7a3TY`%Cl^48Sn zL{PQBzCHGn;e2&VMpM~12zj_sDGQrufG<#D+{$+57GpumS`WaDQ;D|)-mCTXDZA8M{caf z(=dlo!PezXduoTIpD;R6FBexKz=&Z^uEEa{nUs^t&TK1xq->t+w5zsjvhWMzLwAWg zvb=R%HFr7L9Sc8GR^${tWI5$Mv?!8S-f!mx)H?JIEK&qg;mAriT@MJd@TyPn?LBZ@aWr&;p&uB>Q8v@Fv&$2V}g>8K@FwYm|j`)>ZmT9vNGG;KXd#L zTEz~b?re>PVfm(kS_;`ovr?OLqwhT;30tVAb&D7S=rDdr#wz6o$SGbT?E~fuF8%vk zG1b%MJRu;EPK}meFl$b4hmjz#qKlw~hUyvzTunReB_a2o*z}{6(76LdVN!CQ+ceV zN|!WN7j7lymr57X*fhf&X*P~!UkttBdz0e!5|p)9<1Q;rpNO2lcBHr4_a$uGWC<}S zJZiCTya6@i}^%*yoll! zHmN6^BtXh4T4#(NNUW#K$|-I(bJXgs@S;gE8AZllRw;4Voq2Et%KQf zDytiRKA8-sp)SQFm?m#99IMbrX>T$%#cS$2Vf(4qKp0^hot&YagM@XpE-joxx)Co5 z5kC=@Q~hYl$2$fN6jSx-p@x)3wq(W-C8{@~&f#8hthf#m*J!ol%45sft)3~`!#w5H z>71fQ)V;FYoS|inMVJ)O4-Dr`4-xm{aj;5y2!KkSVnIOP0P_|GP49RyC90Xi0~9sv4ga&{dDc>0k2rsmPo4mq&PKAC5EkAr0*H%3T|qi#y1V;KfC)2`WTPl0eIY6A zCIGU_=8q6$m9Q50izVr$vNqp4%=vhK>=Ijm{5SKxx%u!L)o1DzpMBY=Nifvae85}Hs78ELzxGQHS z;a1Do*?+-1&cBf9jmGdkp5z{TS*o7|T!o05wzXV`GNuAG-4lnnMyXmiTX>_Gb(w~? z&!`XRIs}81A4u7Il#-wRI&!pC;Ui7T`B~B3U>%8VDKKCg z5V8d<gbDgDg1I$V_O!hw@X14VuPbS_R-FPJ~9i`R5Id%!j|ue{A5W|9tTNFhWqvf6KjK5b6Q5tMFN;~@;Y z=kzSOl~R7)9vfm3-#uKb1N>}kIRXA^4Ueg4E%mU|lB9*-%gG*KS9A=pkavc!AZ>f) zISr$?MNoHMOH71kp#X|A^N1f@x^tBM9^A?+nH+wm3wAjLlg9O})~~Sq9x2!9kNK~%m`x_y*iQ-d>f;D!?bPP`an*oe#2YrtJ+Y)MCJ zoioW07E&jktOs&aV#^n@4|&P2$`qPW+1Td9)8Sdi{VR2dSSUn&0wT(Z+d6_Q%s=6 zX#MSi3Z97^V4vG;?#Z#G@-L2e?b9zF`Sq(K=!$ zb5+p}Bd8QRxT-YiQ5c9VA9?C}NHFBrA>MfA?0_4q%;j(Q(%h`y5#30rxDu85hA0Mt zdqy13y;g-UnL*t)Y4R53bV~}uUe+Z1QlsenM+g|uBc9zHbQ-CjY!sJw~ay8JnEK^tg&N@9!Q-|%Axv}j1J_oFw57f zmsY&x@VEDuoB!GfH74z>_`tFJ4jEJ9?IYB0Q-jNVRu(G`TF&SQrt^j8D))`7Qsns` zmTR3AGO&ZrVH4UxPaB&^=+hLcZ{e~~RzuMdQSJRj)kO>9=Bg|++pjq&4)Rx0fcgM z#tfxhv35|p3;*IjRHjr}5S30ku*c^A-E<3dcf*>bHldAScVjykLQ(f7+-$#g8$a*L z&T7ylGz;CGbHci81oe~WmHDNT0Q1QT_6y=Y5_Z;sGiCfvvAH_>ThV&T!r-zY){&1H>GQ*Hi;6jP>PCwOh7a93F$c_ zS*tM}#}$R6HWBKxrfU{gIWp_7_w*3^e~oj<174VjoOv855f$35xjV_a)w>fjc2v02 zNjkjtZTan6*0od1ZX))anbQKQ6-XI5Jks?omiX4pSP|H2UDJ9P8?AGMPDCSy^QsH8 zZVc= z?_&AqfiPC?fBv;W7V+`f2TH9wp{-0iWcqZX$QR|?31&Vkw%94_6yO_03S2m@Y;rKD zz-W67S+Z}od@eCvy!@H=yja3-V~XqNof@d>Z>zOHp>X0%yQS1_eA`nhOwBeB)ibYU z=UrHH>eHo`3a+FP(zqZ@wZ6t4U~)Qg z{j!cw?Ww1#-2berdq)Vh^*oTw$Hoe^c0&?fbE;QDaR%b%-FW|;h3?^D534Bh3s~SC za}osE;KhwsUyd!fl#`wGZ<#Hm^GV${;mxZdlPcP9Hbv3`iRUabo3T+Xw+Cw6U>YTM zXI1KuE(5y4t2szk+TMp5RnKjn{>eez2Kn@%SJI?<&v)3?kIBVsjxR%Owkd`!5soUh zrOl9xVUItaACip;{1t(MM`-C?5O%})0}29DpH>mI|Mf_--TMznty9l zFzq+c5}IV=gqEg#OkoXf0qhztRjmVKe2uZOf@ogXp&U$EdX_E837zyZyT7 zm3(67X_UPP&hwcH&&YjKRqQG^V(%Qrx(i-^FHL>V)GuGg=Kl2~22e3*%sRS zAGzUwYBkNY>W5#>5eQj?`9!4DT(O?7OAKa~P0r&QGckhf>Zy+d!z!k8uBV&QF*gM; z>X;s%iyBR-q#xEzk#Yni?C9Wg0++EPfAcm(hD!e48vQ2##%TiYrbQD zyq{ATODUUoOr<)e-4-3wa`?ezNyU}#9A&}?vX(dUMV4L@xh~Y0Nn5+!@DipeT0gzPN8`&E)w4m>kwk^%qyc7?_0kVmmo4#g@P-XpfYT#F zr=d8C@`Z6Nfgmq7+1h&}9KK;o-u{=)taBURIk5*U{ZSIyv$E7orN(vda^w`9QpJKo zYlbC0XvhC0cUAMtrI2l!!4is9En;y~XdZ3nFGj0;D(0oY%QI)jis)a~sFRMg{xw%b z;ZX>g$KQJ0SY(g+Ij*wdrywBfxpB&Q2671i!4+*2*l>r1ja5%a;#|ZQE^PIEH9{u| z6WW+@oL6(i{Bb9)BK~w_mu2nBluI!#hsP9iEqkrzcfBxG@W`P$Ms@9MFGDiWVfI!F z4)x#&8gLESSbE%31kHg>MO z$G9D&wnDki(P;po^YSAkyZN^}Y2TQ8ES+fgS9Rm8BBRWd8AlAA{aGW36M-1xfl8iX zy6VMjGyx;{ar*f5TRKLi@(q1=)$`Pk<2$k`z6){7?hGkWOICa`3JRx5+)({8Qyg?s zG=KztN5-yOA6o2f!1gS^YqG)zci^5OjRUrl{}2v$Kw*^+?^RB?o>>C(pW&XVgBxq( zIvncI43=~%XKH3|7XO%2zekCbD-C8Ppc@sgSX&>eHt|4=)q zb%GSPQaV;xy=!bV-m@Z?OTm>$pKlI^`N}!h3^*yE#-9~yfd~6Y*&RezMV1>K@(1+?(>Yjh4f zmWR1QQ(2Rqx-R&7MH|I+i|LX03@MlH@z$(I=fD}*SB#Egl9dPz(LGbtMo)zaiq$is zqv)}pliaj5j}4_Y7PW|128N3w7nij`#Z@NaS)%P>9PM0aT?6l1Gf?RyiLlzhVF%3f z@RdB_9h@9<)CJz5r*+Jt`<$h#T=9zenUCU#?s%=`$=FgyHn4`K4;EP1wc*0Kq#S#=h9$PLmdop1G`B2Gsf>1llCxTHifcO*%3Lf;FePLkP$B6LDPMNc;iZa~ za8whn6Hp}4F$(pRTDr}^Rf3M5+oXfBHq(c9De;HNA)2@&)F^2hbBytf^7`$7-gACQg&`TDHN#)wQZ zn*K#@$ykw!U6Q}xng^WQj;L#^Q$^Y|g>H5(Wma-Zt& z&VqWn>7I;;c0Ym*5UlN$z1nZZ9sX(#h8c43j;>>#WV9A!#U?$H;t$xIz(~{Lbz~q9 zZc+gK>J_Z&swx+uf%fqi?ApurAc!kikT?CnjhWCU}mt&khe)r6)gSYiz++Uxy~ zP1($;T;}0hT+f0E@G}APGjt(5PJgtP^Lsb;z*EksuE$3WY9iQpm^$D}A&RNpcWT+<=p3oED9zmmx9R)Q4}J{MA| zFf>N7XtjE)m{a~QuqkG<7^+xF6slt1MJ}dLuSUx^T4}|VWgzFe_4J6G9 zx>8c^plE-IU6M#ZM8AnEyds}EkGEsH6~`06>!hlpKG)MfTYbm}bQ-nZaHkkj8s(Rk4YtHjYL%51w}tX^yE^vb+!-ecNWvEhuCg zyCj=eAgc90;F%Zc&u6ZLq;u6RXlcV<$*F(ABg6|`+85V9gprkBKd!kv5?SqV|69iq zSETe(gih_)ZhzpcDlVFu5wf~?ctlby0N)F)-kPGPI04GWg5MNdUGRBYfA8+_{&AOD z-i|)TeAil2VC9k>&~VWNA8Dt#G-8jl=$pM*bx5~R{Xvc*X#{CLyz zg~H}#F48~m|6_=7oWF@9vVFUmV7pO^Q38Kn0G|rqNj7~!@ME)y+ht-ArQqB(*_IT6 zrGIe81o@E{iaH4ve#paKwp?-HzWDzM7?uSuFU!c-K{srHa`h%8qyDB^;#hc#W{kDk zMsL&tkW%`Zb)L`*7IAD#^A@nL-eNDF6i#CObNxL~VQEv}Nx)a~U2{mY> zzVfq92VEYssm1nE=+$qIQ@$1S?D z2K=UN7gSTMJEe*kX|^U1EdW2a0lEG$r8^*KBQCmw0~@I-fk?d^HmM_tAwR=*l-;PU ztPgsn-mqDo-n;U*%ybjSDtkUe{R5OEuk0N9i|D<_+J(S{H&{4LQ2v~E;!2juGuC0V zR%TUb!%S&S9i35@H`b`+$ys9NsM2v;)S@D>=-5tBAlP;Y@$gIMPE9-~Nmh}pa=}%r zY<=NfA<>RYZRaqt1x%rIXbV@p*(bs(otbsGgXEuZ6B-?hUtNsJsgj9mM$|OykzSYJ zn%Hq_->&Jw*yb@bpE$90X3?=q@=M3zL_B9 zA0s0q8=f#c#(l*y=I%^Wq|TFT1C$Gfm#?OqE-eN*ETsE)e6dBlTRWo8?ra9tw@xbtdh%3G7xG_ zSWZNQW^GP}y`byNR#b~mW_< z(}G*Y3utU(u)GZ&IE=cKYC-0e%b_r&znFTx3Oppp>xs&mysWRj-uHu%Phi+Gb1|!k zT1Fhkd_#)^Q*XnY|NGH3oyuso_h`JOv=VMkAY_gWXU8n478m}(dyQEu#uW`j-5z+l z?%*Y;B{G<;(NT_>2vgM9ozq#U1s{0wPMc>=&ou1+k)MYR#w$X^n?!_3BTc`P*)nb* zas(S-F9%h!to)Le$+G*^@>NY^g9TZp2`O5uI4{7nkDt)E4$3(|WvG}Q2S_Uk`lIPm z^6Z=RQ~I2YAGdf)a}^m$CTDJ?!170}wlh-U$Ou3-ZY)UffDm7QGLYG=0f$CxMB8Q? zmDb`t>NtVJk!++3)r5nD=7`1oFPMxNmTuhZk$f*aJhm?D42*{RXx%g8KXa4<~Ul{*#3 z6%mlagdBOXXwM>z3+DG6(tk;Xy7)~QPBG&^NHYo!S}Kx@v2efs3i#jrzcF?fUJNlB zOLZO7$4In));tI!4=!O!TH~|uIO=8iE0i^g3(l{`NEF-U^jzUQd4$t!U5f^l9xtCK zcYq6g9j*F(E=W%)-#99Wbr-d?$};ZssX5Kkxa`tfD$lOL0V!ac!Tm z<4sB1`E?|I*>s?(5=7~o=|C|S8Zj4V8FgwPSQ9k3BOnC@T`aZpE9{R#K$+JXCZKgH zUkyCSj)I3n$-@>6!7HCTS*yO1*FV(M9%sJCD4oBBM4XdsJBep@8|)mHdBtOOzzj95*z>@C)=EX8~H z3?zwCG(MFg6|Gz%4l}$I^O%_9LsZd#TK{FSp7IMJYGF+OHQlgyn zNPc4021wJGUi0* z*+*Tg^TCb|0}s~6!0pOi8yEU60Oz@hyn@s2s9J7D)Ng0@xZ3CRa-$8C!k%Y8S82zI zoH6gw&UVYG=kM0U8Zk0e&n2oeXN~1fbH~2}HP=u%;%`%j{?RH>36>ii!~i7EL0ulI4BSkqr6L#u2hbQSPLP)A=cH(>Gw*u&sgmGJXYyFMl@kp8=4HliBGvz_kQ*;cmXJYL1rc1HKFc~C`NO!23GpD+lEUhap zW=IqMIlynV#c?@1@7W9C`cLd8=SpvMTr{m=PT$y0yQPWWfBzBv@{i#; z&}4G{{^I+!Omslyfu0()HifkZ)Ks!r3{wqbzFsbavyvCWxVWzkny%t_ZgDUvVLTe9 zNV{R%gUuaW&G*IDa<6dHBF^sZ2ixB?VbMrh`!WtbW{YLCh^(uoIWoWf){Ih6UjXns=f2k|g?I*uJGQD7rQJLJLO7)&O zkjk?P&COZ8;K07krM?UzbC&xRf{i$clH*7~sz&c!>-u8^PGV9;-~sjyefq8_j%fk` z^S@gp>tobwYH2E!^092eOkU~GmR^V3bXIy`t(G3Qil@m;SIA`NIIk>QcB-9rCXb+s zt=1hG#;hDzlgdi@aXvcZTI3rLEm&ZZC!)3eQhO6@bCH_WDvaqJLoe1lBWTZdWvKS# zEg9#BO*>tO!pr2-Q}=?1d%gcys5mkymSu3qmvX&7oNQVB&Gtm<{pdV;$_}~-c&s3Y zBO2g%4Jm;}No#_%K5m`VGP{sP2af0OWVU)_ruzNqu#5$N!CVWeJ>1cb+i8AI>?ZZ_ zKP|=tDPBu-o>OaZRo?NdJc~|K&K1WmP^eKGPKVWV>J1|Kz#O7tcy2$ zp0hs_q4$CZ)xTdiDDni>1ue0Qr^>t_rIa4}B4-MLhk)EjG_wnw{(SaLUXT9qG<)9U zJu&iKAb4lNefXK|5F5Qq*bjKFJ-@e~k9`ku4}4SY0bR(a`VHEi``?8_%@W@Kqa2O$ z^y)bw;@+Ok>;|&eWn=5Kg-hPV{b$oPGhT9n#odYxn!xJc$*zG^I z`vs_?b-acpye*k~gaII=?lXfZa+-_b9?+kz3Z`Bn86+bn{cRBZgALVJt_v_qCh%XA zs#2HAfYNvVq9GXzzllB@EGA?>NHnH(9|bZ;$nr=B-&k_TcOvYh0bvtKlIP-zn=Hn! z=o#;A>IrLcu=Co}=c>E%{MWwdBfTiIn~Mq<2_+NZaI5x~1!8M;dummH2JA1YZ&VOL z>&0HH)I9_*t@y2^#f({_PJY%4BQ3E!&AYkJAF$b@I6ueV-G1qBINkj~iC>K(YC4uQvD0A=QHgvzHnsx6$B}y z$W*d>Kj)WD7ac`}-)UB^iv7bXhzZYIZrRex%=}>_Owt^xUw~bVs+ZBkT>t#>UHM*> z^He?}1)Y(*yYMqGCwdc%$3I$2?EhTq>g}8H-?`k(kNU%#a*_HRi zMt@-W3muM24V9cz8Lo^nNm_Zn=}PLOgWAK|H@6tDmbv2R;!0pQu!*!2m+x>JPh3t9 z@r{TCur(j;U!dt1IerF6gTUIg>+G;Y7t@)*b7hL6m zX9M4~_wC;P@@0PZzmVI^g6||p{2xSLk?q$Do~QUj2U&*R_;LQ>WH6vl2vzxoUig*^ z6%&r|3m6HFuOD?5HXWiw-!%_JSW{99<~`6dE@wKvE!VG*-_hZ=^U!nSQXRNilK4C# zyTfHK6I}mxB;vqW{&DxrDOT7_3bdZYCpXq3?H_w^#;+a@y& zrWYRdX?B?Gp^LrX1q2{h>)pk-Z0&Ncs&&9Q1Et9JmR$2w*BJAxG`Ik>DtIw_X%@U3 z(=2EVPXQ2(SP1OsTCXhYm5Dm3Izjl?K6^u~*~9I3CG31qgvo~UsfSE{E!>ZArr$tq zyUzHbJf&6Z|0$uVt$jEd7v4Gq?QyJD1n}qkgyLPiHv9nOa)sqhsde2(VP9cV6$k{H z&+a$8wB?aG9+{r8;&nwpUw@;xscn`0kM(4@EY+@%RPL}KZAYYbi*xGYT_D26+q>2& zVUw0>p)L6^{CRw13aC<(3EOBP{{I+_7m>CK^ni8-!-hh zwa(!Tx;-FeBMN3#>(%8P`i96bqP`0d|IM<1P5<@7Z`u9^;zJ?1HlvUOp0p%2I7qWq zlgXd3_mA2@`3nZ*waBM!=Ax3ev~A6IfPLW=e)_vZ3d^N=`nRS>%v+f1p=s9ughyu~x!5dQ0cB_<{1virW*)F+%aP)2;%F6BtY=ebaqP@5<)sx||48T>do$Ve{ z?d4svMPF$De`B>$iror=rDJ6#OQY^aI$aG8Pb#pxVRpBnd4J8WG!UpSP#abJ@B4AhuCZjpur;#+dof1zNP}nhgx0=4MNBq z%8zWL^Bo>~R!ouKNTI#?kLXjkBi#|JM9WnRVrJtXuy677IVPWYtrXzA{c2`<`B$ti ze__2>K@qBM#Wb2vTEOX=9nxwv9JHIfq*!HbZ+0hgLB z#rcCbYlCE(R`mB450-MuJsf^tK%BlpwKt9bW<>RwtD*I|gZ249S2((;CkHP4L?H7| zNoCZ2@vUa%c#QXI@34XsW-Hst$W~8&CIUMOgx7gbHG_{{GrBhZu4oTNFP(dW_%8+L z_Enn{xSk(5EWJz8LhF+SnRYSL4Pd$5Gu}5fLG*j`VtE5f^C29LQgnlCM~xV}`+|R} zX#@g{*+qH}M|E2Aj2b!s_j`4NpF}pdO~kKcvVsL@o4?NzFC?XzL#_Pbx^2*mxqn0< ztV)q{`r|`51(m*fT4#Jvt@#JYc-+AeN$WHd?v6xCO!`IbvK92ok$Gqry)pJoB=^dH zK7i%wErVU5zmV|h@s8&i$cc}mKz2s3mGT|%o+GU{6Xu66zb8+6nk!kaZyQalZwkKq z*}Z-^OIou$kzL7=KF87MJ6SL6_XMbYLu3YjM^sCcQePC9uBT)wu)GgzZ5xowYq$B# z_vXXfFL=G{D^<)Bmdf8T&E{zMqT)CBx!!@JbY4Gr0Ck@kNZvJP6;e}H(W`C|V*qKd zN0m9Y<0TS$AerD}*XL&XDwesVGk0e^P_;d<#nN$v>km~o=DS}BE}>GdhuM$15OvC) zF6#dBI@Tkq53H3@el0d^-yG$YDLsiCYCPfWivrzfnMjc=C&CEp$}UiA{aRudZomtKlUgc7$iEp4p_e!_qJ4e)@fadcMC%H+Flk!XR z_fUD>jj!i$^^@G+n+nph>%KXO3mM9}U&uo)~_**Apow(65j^ffT_Q?eP zRT|V6S}fLp_CS%{yz!NODJ|Dk<&?>BOGLnTj3k}3aa^qD%VnKm@}nrD_jF^nzdE>U ziu;1cwB5gw;X|0-A61V1Lqg(d=5<~<Hfa5 zwqW-5n2S!)-xtE6Z@jqPm_nY|laZUBK|v2DBXyp+laqWTC*enNj7kVpFS5I#XC;TF zZ8*nec=nFzUUo^xz!yaD8BHOW~naEw$}N$Bg3`K&0u4@0`_U8Rxs5VEg9Zz zsuWF{4cB}PAaAU*YkyWSEeoh@L_ECDuwYzA>Z5+YbZVCH>l(^mi})CMg(#yt|IaTh zk6o(bzldueIVdWM6a-%N0skJf!F9cx^M4bXhUlXItCJ&Um?tu!U_}J|SiP6qhX26*12IA&2^GGH_ z68uotItCX!f~x(qD|A@6&c-Dq&x*ePqY|9*Lk zIt>p5dvMyrI$DB8A%-W5EE{%Np;xU`3X`qWnxe9(lxiSmnmv|3Ah!{&^xJr*##0~b z%xwl1JngJYX?IG2<;us*yA4g=*tMLflJaKvrCOyiubmIYLn-z?XM4vCrs!0uG!@c5 zMn)3@Ao%j>>PTQ^jqxC-`)hTHjP?3FCblKG0(qDqgVt>O7u(6u6i{d?Lo7rptx&!~ z3gJ(K+e+hu25U)mIP^SI^%nWQKU77j=%AUcpYpj33mIrhk0pv>lP%zZgD!cal!Fbi zwz!y}g=DW_DKuZheK)lmrO6)t4^td&Tku0V#O4&5c&ARz9<6fOh>vEVdJZ^;P2*j0 z%SsV=3)a{jnOe0)ZnDiPk$%Qmh_Tdn-Nf2F6b8&N4Kd zabTXcxaHi;_8{p#>6M}zZW5=UzGfETS;>vIfEL%mXb4KiilMHA83t}mHd=WINfl3O z+F;zq@o{)1a+Dgaxh7{?v6@>E$K;(!+cB$Cn$w}Ei^;L=@9E;#y%B0rZsu$5V=;g$tJQquuhAVibGo38s$(p(2G)qb?4WHGV5Bquh zF#pSHSgE(5dCz=2bF^zxDI5}mW${K&OU}C5!edjq1@QkVvFu z<@-rWFI%Fa?_1fj9eTvfQBqcbm$7DTq~y~xia0G@Xk_FPrJ0?lWVN$XQ!;HhCXd4q z0z9a+w#~vuETpEE$fxzX$aS3sx%eS}i@ zB=<+V^e%oaV2*3ql1Rh38Dd)xog0W@=8+?zVSm~$~K$8?FfQc1gQ@*@eQa3ji8 z@3_(cM0t(|MNEbbd&Uw%<|_j#xc*Bx=9IENOOHZ3UBn18UgcF6IVBP3Elz2z)i-g@ zJ6KJ!_TO*qyS;4Js7GZ;%T1xbTYa{c=u^v=95-agak!WOQblr-@uCCSEpv?Wv1#U3 zN9!w=W2Ew|WP3-WgS7ATo~e4}i|VJ!Xr}Abbn1{^QJiAw(}_LQH1d~Gp94M;q^ymE zZ0L8yYA<(_Nn~sY8=^J7{RpgMHm22g6DCC(a(igrX^*#Xy?hOOhhP(`f4M$^CBqoOon4A9J|r@@>R+@gZn~U zp}S}k+b**P!-(WUdFPq5#QdBZx$v?>51d6zDbBM02=9Z9WR`AS>3L=hQxW>vS*bS6 zfF3JcO9WUimGfwJtqGRt`2A*BDM#3!d?hu_>pB%y_eSGalG9^Rb?N^}hlSKr+sY7U z-|yWT@$;Yb%9a9g`~o4CQ1~$t98`WI(Lts;=G zhyqLpbQuF6sTl{RheKaRCFIKG3nAoivFF zl8n20L%sD+B4DT%e+Z>G$w;6ordr8nB3Y9>DVhHau=;G@#hGhi@UwVLxk;6B&BLM6%EW}fKwr7jU|vDf_gj{r580yN!<*iR zM2Pzf@_S-Hf)G3H!3$Z2;NBh2k4VOybP;1JB;%3wFeQ z!8k9m7CF;A)!j(YsMm{!Z1dmv4Z)#nhM&w+wXa8;om+y%2a;J2&NFnELhlH*UmOHA zo&=^d^nc#Jy! z`^&nq=7j49VS{+tbE%<}+FgXW+lII^*H5CZjhI6Uv!wr!$H=LezH>cyBhmTyA@21N zf)rTW^DOUt9xIK^k70@gbjK9m$WkCVgO#q}bzI{Y2GSbjhM(Hsw&dmy26q2Zz)2+L z{R-A+bPvB6r1hMM^Q*Bb&rot#_#RUHs+uwEtDD4pzk`$5ujR0A9GScj^~uazsBVb# zBzYAs1Vv1f7!u>bz#GD@pprOG=L2PA$1wMw;}N1SI+f9#d zr%U372--3fAKMHP=ikozD?4HnD5)g^92cXiubP(<%n`^ebhf{kXzJ*!&z(;;#F-a! z+dUF8E{7EUDa5`jj(I(+@w@+CUcbj?_~Oxn{X#7Xk%8N1+c7rvOU^Q=pfN)Hz}+Y_ zKdkVJd5VoOT4p>|#r=+qKKLtrwgR9%H#(UtFJ%y^pKroPcyKMH045lc03M1hWih< zAmwjmZ};0!Z=Uh@l~!sM$+jq!hNIcJVL{=mbJm5={80MNHC8#+4?B}tgTHB8|9)?U zfOAXRjw4}wgM;sJXzFTB%_nqV?~|_ob%{<7tcaaxqrY3&<3PN?kN#QZ7oa>i-+4Vr8YdAufnH|hLkh1|T>3aTX- zY7;_zxm%LJZsJL21-yNsYY5y|~AI|3TdZ*PTxxG{{ zh+Xw%K*EsEd#~Mn@YZ;vj+~k|=!}#M5-QR}%r>bE+aco2N6dQ{N7deHsQM?V56tFw zUjYtvnd4GB-HRh2oL;ppke5k!b?~Y3tQKFhZRDnpe&9F3vaebAJM!h4+BNBGb*j=0 z!I%Fk>vu4Y@38r8`++2u|54t6uBEy&n0y59pI}kVeH*&;xQ!Q)wKwE9f!Hk(JIaf) z$O*~?T+1sp(|`Hkp24v9T0(#?z9f3zO)+^j%oE+=cv|9~VvbcI0o^TPa^qFVXr&lBx6oC1@WEe-k0P@a6KQD+ z(l1dPw5o8!YW2!c#v;)7ajB?Rygl2lnx5P;V??DCRSnh+suOdoSLhp>Up-;q4idTi zA}|PG)44!{^$XEJVXmmM1>2f)yCG<&`am#wpd9&T@Mse9g?)lU?23)pixQZS;ovkF zV)YxyS1?)?&nT^dOb%rQwAqTJw0r>EPie~h!h^A^8e>Z|KBLM5mIx+j;!`RbrPj8M zgmo<)@~>F*LKti&ojR$)=CQYn!0*4&S#u<%y!D*Wp$)XQ!M)hmUms@f+K#i`F=3ax zLernZE~R3Z?-~>mGzWAImLZF@yQIvn^Fz%)!UDwZDOxce56N52u7x~3#iV<4v;ZA= z)_dsDB9EE%r%6%5nqSaygeJP4n6nIYS!pyF1lKLKodG$v;eP#pp zPq+|TS#F5$6#6YZk9ZttPPZJ=#_g7Qw_c4vJgZW)mrv zSq)Hih}0H>%rQFx>_kSjC7zt@J!6SYmyf#xGcClEXD!pKqu+t+_>YphqEicexwO7r z3S;6cFmLpQ;?6BUc}JYs1oc=r52>Zv59Bh13?Xq)4X8&w-P@U_2m(BP6NtNJ(TioCwG&xlqjM(Iz*mEs4yByQ<-cK^~K!` zIlycWY{VD2-Exls8q=G0a*XspR669!ruo@J3ecMGO#Xhc*M{evHygv8Mq*ZLm4PLw z-0uU3c!JUlOjtY!a*UTEuN^33ga#^xkzSF_2dC8}JS{FIMlMB~Oh9_y7Pp?`T;aY+ zI+?#)&0-Q&qL#mjvx@+Xb=`iq!R$+d1>SN_i`T&pYEt19kTE%mqgcvt&3}C`oKmFA z!nIbfh1kQQXf3V>_rMF>JFQ&@lF>?^eKWS0AW-FvIN^6vWKk4O_LEb`e~#d0&rIVk znBBd=W8=3I>JoO62BNd{=Be?Eqp(H39V`GKtzj>vQy$=VN4SARV^xrQX_#+mZ!|bn!Xg(s;Z)vRH$%0K!#F1OPGRH3vSabRt2-FYa<_Qfs z5?$;y2kf7@x>+~>c8i)~!YDkTWWKN~m(W%-lIond>h1u21qXUue;UrCbYqMshp9Xu zrxFq4ScN|4w3R)T?(ExqXZ!cAkrek52$wqpdzfgn+is0HUBhikeW^!>U^F5iH1tA? z7T%z<>BcQ1i@n8kOi(u>F-m&I`5vHS$)uyFtD`x2bRu&U#{-#o_22)VarWEIbp#u| zWM`en<`Bw$;^?2}-;^#yE;`wn-0D0meh`vc!VE>HzEcG2bACW{=gALD%C8135;AB0 zbqG6pj_|LBrKL}6H{=?=qv_fcI}g%4VD|wN-vlf*eV~L$xb?IqlwAg6xJh$n)}utE zFwS{Zc4{_=o4ko@C$(~W><+N| z>h(i)3WZ?$25tz0kHXnk{P%qFtL;B9e)mm z;2TMXr7PsO;vj#TdeVpZCDsLOQA3ErPZ>7h&fn4xe*ISrUDoYE@fR4skPhlGx6|3C-wN$Y|dol+R8qY}%N9BHMw2Pv>l;<8zco z>p{iWq=ypjo;HNO()z~+VAl+Od>CLYdvSvqY3xDU?uT7}Z#Hcqcdrx<+QZ=jkVy}^;kbkSaIS18eu3t};Y&_3>w>dJe9iVtX8 zny893AfxNMnQ;fF7y^q_(Te>jaWj*;zi~sy(jTeadI)O|-diH~U(n6EjNMIbvx~Xe z-s7YaGpv2pE_fl2Do=t&U;RF+XFY3rzPn);u7eANep}jX{_#1ho8QPe9(72xEJNfh zgkGIp3LfKC<&rM3N9gTf%`Ym70a6hG4|Ib`ZgFj-Tk!6#hYLk08uR zC0DQ$;T_P*sE~%T8|hF|mLYc+W)oh^dFa^+1mz236Y4q(_!xo8 zDiAlpI4>v$zKCuoYg?$*eT9y*<0bP*Z`6ouR9mCOo6PWFX?K97Pm%P4i-}OFReWK! zw$6-;A+SzZqW-GLH-XBAy|D}Rma_)Xm-7lQuKApxe1Z5cZp| z$s$yKG3f~l`c$pT^@|+X?&4<+h*|PpK~}iUS$h19>U2(-J*NAi<|0}t*%iNYpMe?- zltPkVB43ugzn;MHc-YiBS1l!Q#mYMfqWYjbS>pL`uT%D|_e6ZqA;4}EC3r0nJA`cW zijILhHeAQM8Z~=kHV>$YH`^l`dmNRxlOL+6E<`{1s&Pi|j`Q6?tD0WtX`;N6mE$nn zsXZwZqFd71DTz6)V-oLeK|hQL-y3Ddi;Ml>;kOL=ShSm!IQ1y>>Q57I;A+sqiu|t4 z{C|NM#|$?r!tTqmfygEybl-6)Wp`&vBn9M^n^X}WCzYx=i|_oId_WvZil(k`7d=6|JH;O+2aiPQah zr*WADL)Z)QmRRAC(aB}4Jl_j!lPwuwIEC=K>3kvCX;YZ97WI%=Y}p96MTTw6#`sRM z=Ur7ifhA}$jo*PRs z!gH>u67&eqin?%~0BwqI%3Ha?NpsvEYNk4=Y$5BU}SzWw#@J_7GSPbmU6yOiQw@WN&Xd z`HoZc4!k->H9+QWGYRln#B3e-*-?zPtknb!#0vz%JCy}p5>GfpU-1eaw#)k3sm z?uj>w^pylh^!8(eLH(vw*62fd#L76GQ*C#`EPju>oG?1sgqBe;O7KtY|NhS8*pB^Z zAFgbQ?wWYWLh@0z6pW(h9bqPaR30}QdDU}H=#o5WG)274bDIO6rALq&9t1PqZRe)^ zRUNibRDNM%rN>z%a)p+)Aj96KhbA+9S-j`VL)LhS4nQ^{tF!GQ)Ww(S<}Zi|-r7Pj zE230GH58(8z*9p4M1pZpcF{{p5uEuRB=t%W4?GJFRPt zogB$p=6nK*{sT>Xdc4+qtzs|IeIfsw z{^Fr(MYJ||z8!60$aw8-u~LmQyyVco0iIG)kUFb4pL&#xuy7xZM`j7v7_Qxd@%vQK zZM4>KcD#ks0BQZ882(%#qGRJm>``o?tT}?dkgMciX;u^N#KZb1U^7?K{*|G@EGKd3 zB`$*m<5i0Ve_5eTaTn?7Ccp=>r8&eH`qw@jax#P*=V8NL!e=TQigg@@Q{a?PoBL$CW1rM{H-gKd@@v4tBXSBmRE zS?cI;;qTZ9`*W_gW46>!UcDxx@eIjPh)Xom4BBvi%)=$)L1gbm>gl?M>5_Ckt$dTON9DJM;nSi@CG=38JoAQfQ_nKzV@sT0_p;&dt zI=tv~vp_R(%cbEm??b@%4)fMs4)8Lz);Xu&Mpb2`M~QQ}=&k+~IYStbIm}RD0rsqN z386UYs?koGX5a(R%yy7JERksYjV`8Uy zL{`2;@K8BnLG6m_{s78TD3}>Yra~}^;OpT>RO!gro^K>Ppnpor) zUzRnnwc;SN<^^YU=Y6NuZVeuV=fVWzu<_q~{=CMpR@J1*h8$BM_9$*<%SNiM`4dy* zc2*-*XC`_-Nwihr)~3pwlA4qY9i!Q%ptTHyaE7kzXQ#$YHwpLQJZ5HwoB{Bwigjm{ zUer6et~1|WstLM^2=EY_P+P$@t@?W9NmS0l$%pMcgY~jaFNN|dW4DU=JCY7{dl3tl(4V*!w|f=L z>lt75g;Ru?~h&4DFk>Wb0bic_?y_7@V3iZ=qj;ei)SEv=?^B`rPW1W&|%*?T{L zX(MO0uuFBh`XzygD_y)RCfjl9g^9yO9zRJPrUV!eS>A6_@wvPl*DI@xQ} zx@!rw<`ufdqw#(kGsrQ9Uhlv@x}audI$s(L+C~|)Cal=7i}Q`T`8n)1`EP6z7rn$+ zgj*8%F$pD%@c^C?E9-77VLC`F&H1fDJfy%mqn4?J)|_YCjGgd*{KQVr1Nj+GN^~u! zZ1PYBUm>Um*0qn!C3t2X78~S*#S!1~`fK9{n(j#=Vw_m-5)SE0*QTS~L37MpkC3O` zA_>;SoAk3Mvv{KYZ)5j-H^C}Y*Cl~-Q1DV_XHS_CJW4f$&-2j`t1geXlCQ1_1v%e~ zrI+Rsi(J1duhzXo!>F+H(<4_Te7PZha$Y7R$4(n@CJw(K4>5d|t7%x|G?3H&(nJ_D zv7o`C@i1sKLY#Gnp|nNx{obMA4tOUP$szTtR&`KbtEO4WOFt4_qtJ^JALSxzjJjXZ z6{kA^;KKYiW)@63i+M}_r=-38@1mgpW;A0EUupi}R-q1wA7qp+sT6A zoyGVV4|kdw#(Z$j1>)dhD3iBVGfOJYC8^R>pA*%NP6j~vduM(QN?i4M-rnl%)E8q4 z{fz{wggQB}UXLJXxJn?#L^hRjbe}Jl`V>s7sU|1FY)!Z1s9e5|-L1@ond6z&a!o6# z-y1u_j8n(JJ5Fq#QOCH6_P5YrQ2$@glt-Z)Z`o{;S0uXiD%*KNg!1JvoSLVvzY&YuCsz{UE)IT2 z&SO?*7Ul{q#LS)s?z4UGl{DX+9$eCc84t|%kMiVPBsI;ro?^uOBvEB~(thRe31t)1 zCXBm389k{XXMrn$mfiiCu=t@V zwaHdnK$QX}f)p1`GmjM4A`R-|P%CAp&9SCLQ?~`*USX&LLV>2On~n!!sZg9EKoLryy_U9`^*E+K%3ZjFEK+Z`^}hAIHPzw4^BBomcU);3 z+2)+@J(SUX;iBg?21DO7be2_3vGx8s&q-(iuU_?!uN_>T`O-((YY4V4#*5i~`8sF; zprA#;U1pQa>@P(8NM8JS8J7_#`8JSzxlU(&IJ(NgLTg-;PtekyKD^*tAp)$WRj z`E=pqaggLweI>}m%@=7H)6iss56=A!;gl6a zxu-C*soU`P27`}1Ect!{%ORjqx&f@Zq)DiZdG-gBEaW2GZwJNll{~4y$*<3I6@(^E{i=@3S`|*JW1)mX!5Q+gK8kq1C0{`r z=>X^T6UAYH<0dh&zziX$J9o!@GU{0^vs0NlS#dxqV~yzoy!Z-CZ1Z3&!)CNpqbZ@; zS@d%tY}=(P0QhM*4krIEQNx$-tP{(|n3HlQ)3^r6SBHbms8?qnGFXyL#+I{^dK0UX zLvZPOnvw1UyT8GUK6*-oygr}(mi4{beB+U&;Rj*~onF1K&iM3Qy4s12$y6(c9#G&r zmTd@by_2jQFw*{en8ppf@*R;(d8L`uW0LHtRq{ehGWq1@Wc*P|a0Ab#b_cLsfAwzc zUA{-E{9sJp&QE78k!U77f6f|7f^a`fUrWZO?np{95eQW0JsskvHorQZ z*l;V*7uU!;3dPQF2z?VhW$p#@T7j%$xtg?wTTyyhw$AwjXO5F+!RN`STT>e0x+vY3 zlyA51T;OT;_o?z5h3>V`o6A+%Reuc58Qa!D?FInP&TV>f?b$$RjScH_&-$&EyV&QO z@V_Mln$foZcN!4T_3Zy4A;$kpLV`No{P7lR`8T-Tw&hssNp0L{e}mVroo>*pW19vj z5n=z>MxuHQ>qd#lfGMEJ1LPFRxCIh=x&HS03j@o_lR(2!fk2qN^DdHpWN&-r`1$!a zT|0L)+b(jextz;;f30XwFDvk0gk2NHu|4x2z9htOd9Te%;Ca1_&LeQYCh8$_A0ox< zzbxCf(x-i1EVg(1O^uHG)>yxzZ)9?J^h_di33qbve|)aTdY-*G0J^)sjyh9H0DfJU zrP#}{yifP*+Znaq8`Rg;e%EP-s8hZUIvx+N9C*}|4-bvh@~s|k zW6<}4+f5D-Qv-rt`-D~`+@~kkgux+3mI4=< z9IrR~h`0pTlaJ)1&Qpj_M-#|e4V;_|yCwSlkB5U>x^}v<$3t0;b7jl+dbi814%FT2 z%dK5ZFFeZ`#|Fol&n?If99ie1Tl*bEyI!DpE6estH~dqR&qo^%+Ede2ht{V!RV6DU zk44CvtgRtV9FCAB;9J5TA=|VTcPcDf*;9*78a14bNk_}A*@mQ>T?7J?yvz1#9fk^A z_=p}k=k#;W-0#{|={5(R`BB%f`_HXB^7d87YkMvo{yJs(k(@?;M|f9U!aa?nIk#)C zaG9K{HQy6u=daml8s0sxvtk`T#7j}Ms(ouw(`5%;eqQ04>ixAoXcWD4JM^q`t|qYt zaR_2xW-&$riU*2Wra%1SPI{+sRmdFG`yG3Z;d(t8q=BY(9beEO>lyni!5w;|Hnqqh ztXzz=?t0s0k%=da=md`dSF(N_5T`*5fcIuG$toB}(g zItNaN;%Mw1L_SvFzSHF{9Y|48NhF-VCSG^v>$Ji}_=;apuQwPM_G!r|k7+%K+L=Y0 zk6&EjG{`%%olF#r-Yc+Xs->n__*(RwZ*vGU8J(;l0MzX_&Jmb;6S(WDnR(V4oh_mR zu(j)O?I>5|ty~X%8ES%O=>Mc|Yi7dcf0H5lVU2UOizfO}bL?HDxX;gmcma#B$6Wt` zswITd^&@bLD0U1!+B2X?j;FpT2A{)cz&%5L{&2pQDW`BL-V2)!#FdMx?ORB4?RVs9(c;D4v z`NHb%kEy}hkyvQhzh->94oqMDwNrBNY6GfutlJy@NtrIYmFec%$Bs=5Qx-pS4F#vl zD{q;95{iV$BW}DOTbcOd`i_53DDO?fz}Y5|{$wjN9*wz<-RFe!tzl^Ed#Y>j=a8;@ zIelp9eS}}ywuL-L*o8gTsPfg(vPG2U@eV`%Q?8}?L)$XQA3$g~fO}mcLKCapL1N?7 zhMs(faSiP~);`!7KwAE}u~kgXD1Qvwr2XR6o4gGlv!jr^;uoRc+8ov@eLM%_ zs7L2~58uJ^J^tC2TD#0`z`1O4)9Qym#ni#YuxV3Xf=TR|x(;&;CUd>P(nl1;p8cQ| ztOD{$VNSTUDKF1Aaj!M(7ty@d$rh{2l>2BM=1qro5k=UMdp7G<@&)!x)Hk*A`V^1O z@?hb6dba*=6yX;i#_DEOlq-OB1j{~;*XAXWxFrhOdA z+}`#^CwMZR>(ddoKEiIrS-<%NJ!Rxkk!BD|qW2TVw-Pf(nlsQ`U;G)+UFtYwF!n;Vhpf@m`ou8>w-dknpD9g0*S`E$Y|NT#)wiS{4%XM+`Yr{vLD{zV9->iA+e1XFilxgHX{eW#UB21H=DM|f&6-Z!VWYM1K}WNWH@ z1we9KQrN)5+wtKv`zm5OeVS|>Zflzd$#{^L;W0SF1Nax0b&A)m)UUYG zG!g=fHMmVZRp0U9CCTXQ_9pO4F36p>JJcQ0@BAj<^uz`AKAv{^5BwXC>80kN`tpeW zOD<5+As`Sh;XRFNA}*_!L8rv&t%N4KvTAh#w(!Ra)>pG-fK?&n<+PZ{b zJqvj9I@kRhHRj2>*Z4V)-Il{S`+DVf_{`l)4pL>pdtc-izle;nh{Pe~z(>QWt^u$U zFS7*)!0~8=YVbqo`Z0J=wu&>TcN3fl-u&;5cAtXQP;d5AiNE2{1l+Td;0{DUjR*ZQ zCgrl@CvZ(HbNm+`=3RRp`%l$CJ>lKbNK0!};9^GjD!j_GXtr8Gnh+|XlZ~${vt3NY zACn>Uj|I6*T>4`yS#Gy-BHgiPaycS@8KV}4T{+uABI7x*_M44u@B+{?4{U(iyxxmA z3Il28&N10#3A7LNzt}+307UMB%9koCUnt32z96D*tmeMN%3!}EUGJR8*V&w1D^T+l zxo-qkuZj~)LI(`DR4VVF?RFq_fdKqYIcI@Haq(AD5z@(FDE$X!v_n}wLERos<&Q&f z0F-Y{g5hn-XrFm%rCNMFHVzC4yMC4Vc&j)@$C%fxV{W<}+36VA)?}0P1>GN3{p76i z*Ek1z?)0<#dVfxeaYcp22I7`lI*(f=V}3sopq)~4!UD?v+l~}H2y~|GVLT;+6tDa& z@HQ2Gf?tM(IJS^Tvt*0|`w{nOULXdv$xa;F0bXLCQFrpA<9s#HU8KB1`wwP#mez;5 zMaV_zM|c%)gDMea-cWEUoddiIG%i9OR3lcd&`cY%G-xb#nZzq8H;oq9_FPrNJtymI z5RK(a1KCI99faO+u=lBkYWFfs(x>wgPK7IkW^#jyu}DS}9%mGUfp+RYR=bQ2Vy|0E z5q@VistYTUyi=AfnW}~JS&%K?v}zIW4M~wu>!dR*8vwV!BYKy$2xh$xxlXGJnED1b z=}*j@F8MSHEGwt)M>{L{-n*=5U25Zui8`DcRV6UzxnE-+#M5&lhwncvf8blCeySZ0 z0_df&^MJ^wDG~M#I8uBezyK@L(>Rw(XHc_WrRR@eIW%5f1T+Ww15)lTBzWYx)WIQI zXHH-<1ZO)w#KZXkD|gn&!_UHHf`)YywKTVkbZmSKTy`VA?wI}>Pb{Sp_YAcid&5m? z{g}LN;Zs0B+dk#=-F>k|xLmlVf~#sxR=Z$ZKR!%Hv2qDcDObIJhrF;BMjHG0`P*g5 zj=225B?G9ho;-1({8i>iojX6T9UZnWQBt|C35d@#4E*$J0E~KNOSUzGrO`u*S~TO} ztEr0{N^0o-2Se(>CwbG9zXKL27pL@?Dz;nCO>j=Gj9p^pxO=z&z58hIi|By@rgWPy zrn=FD0mKo3)!JAc3eCr_{gpuks>xa!hS!!&nQOG@Zmp-`Oz)_(V1Tn{uK}AYw4GYL zPoX&02@bDbkMlBnMTk)|{~~rp4J&V!^j1u0$o1(3diJs#My4Q?bOU z>}mjR2+TPuSd$*h;&z$sie+sioV=fM4KCJkeUNynPad~R+|Yn3f2IS!HDBQ|#ArFj zdQV(agO?Gf>Y!FY(NxIViAJVVl4n#G;ZBkulx+}YHt|g=T$5viWT-w*Hs=-ct#p!U z&nawykP+%kuYpC=&!7_pVB%!fW(I=M0CpgZc{4VEMV*Tc%X#;)KG`tLAA#ppW7RfC zO5H7uUs4B>+h|*muAZX$u(1YGJj{$F&cAXN^N@Xujo1zVXq;8y@@fzFz=oGNVUui| zwEFf@-3vxxN#S)W*LQbn%KR;flq~JP7bl+vW#6lQz&)4Jo^JIa8hfhG;QL-%?< zQ&e8Aql{!vlflX7xRrID70QRfB5(3wD#!(MD?;qqFO zr)xUb=6^G5_Ivnk1@a^%h(w%X@@O%d3o7a{uRb>Sy3Y2LLjZGj8i?WS&0stD=}WF z5I4AmVPhQLx?Z!|9Jj{U<>QVHjt+1U0=uToy@+O`VXD*;h}~{*$1yo8w}~|5n?#_Q zmr$^NnFJydoW^z;&rEx-8=I?WyB>V5Tf}=?$3bk?VnVUGI znmGXnAKf0eNO(2W6CW>*;#u8vbEif?i4ATED4)-ahfH`Lk6pIIXHfn2b`$dV1Y~|X zFr3(n)nSmzmVv@bq;E!-;}|y&aD~Iqu;i#B1}wilFa0C)W>3en$Vr*G#4EVx@6%}i zr0t^V`wpq@dZ>HY2K^TWlq?NIOE*LBgKaJ1mkp}F2qL1v+FHxK=CecSq#9Sb<2w4H z#~BC%lbN?cdRBC6EH`Bi{Z5~1QcJP* z1;zWVM5dOML4X>V?n$Ik+6tB?RcUrM-V5=NMmHFUBobH-T$c3){Tk!#A{G&Sr?6u| zY|9p1FSU>uP*ly%3-IR2@NcvVoVeD|E@NvpMd^=*y<#SXS9s-4ciA4i*(yf(1(}(7 zxj_oJMNlrN-y1o-ROxpDsbkcoJNP8SH2GzEkU%#i*Qx`aOv0|}l4k<(8OCoWif2M# zPemOp+V-r@GCZ1MjTcMw4DS4gXb=vZU*=ntyk#g&Y|GE(wCp>_gJpv{OTGE$OS zrWvgk9{ADadQU+zsGQ!`dGwocBH~jBOS$XGT={z}YwGv*X2xxo;4G$7Jmm4GpL1na zXK5gDt(NZe+}~aKimTpwf32!k?_YfW7#IC{<^KX-EzAdrBe0x*2;D?VfYoaQAs_4I zJ2B6c5Unmdt-*F|lGjP~-~!Dh-?hw9rL)o0m2{LBcrh90w*Ny-c8=OJmjGh+A$xr_ zbH|}f8W!#^l_&r&Q31=2yIl8Y8xq3N-vfpnhKi;G0hI(u`^QHp_4dE#A6bHUX+db8 zqEAS>(l&P3)F&OVps~ZnI+C16f{0)PpzylZ$u=(Xdjs+B@1g6|9sbuVbh~>c=#1KA^VUjwi-W|x4Uwz@6l7JpW?slq?Ij|Dm!w8Z z42iw%y_2U7%@$1owDu<*H1*GG^k(S-NWr%Rm-@9*ck_t7z#zaQRz9H`6Y*!a%;i3* zrJ4Q?aMPt8!qGR0l^;_BwPwr&bN1Bs6KN5+T_p1#BsfMDCSdOly`XcqffY=qLjP=? zljJ3n@X7bw|E`4vP@+G@Ow|t5B>6c ztu1$Aa~4)VDN4a@S!gVtjLd_&L)`H|4^1SK+k9&>{&*Z!-(Rrj_KoM!I*+dEJwUBy+az+eaSV$6aMCG&wIa!xqd zP&8Vn$z)cj%|S|}UaVE}h6tRgWlgPQS&rTwqOw3SY%1soyY6j9=YfE}E*G%!yv0Iw z$3a_E7i`*m(M$6IQw)I)x#H?~-go&apxAo3C3<(!&oXQ8RZ~N>9z<-Xm(i>80`59s zFUc05N3u#iX|cHJt1xr5!+mICXccZh0tZsnaNT*aHjHtdRI6dhcJS7krI6smYH z%H_IM9dnsv0@GJsTpB$wCJEEJcSz6q|w04u;7y|Cc(_pYFe3^L*7s!7q3kps|LleMl44t10dTQ-ucXS60#8&|1 z$H2s6_ho{O?tN{Ew>Y}WhEP-HHZ(?kQ0STNyS}C4nR~N7=5Tys_HmPWX92@qW1q7B z0C4DSae{D(&hB!=3&y%#|6t_V&iLxiQq}s}&QkS#Vilq`r7!qE0H z{W`q4Drux63V4L6mC6B&NRu`Vzc*0d%#SCYc66A?Fea;{rPp@`f*0B)s@tbc5nS8~ zOjzupZn1`J7#Us3786&v1$Np~6>f`AXI4QHg#D5T!+l5?qXt3`U}1+Lc?)Ki~}nNceA9<*4wZ{n?-_RR=t5P{(Vj{>CBQ7 zzWeRp?vWE(wL2)pEBtEtK;Xb1DoRq2wQ5oB9D3EgL6$WlPwRhur!7~c^1dQMDj>g= zI>4gea2J&Y=HK!^bO*-`*I`A5U+f!XFzmp#y4+SdSvf>87u3ax?V`Z}V z;8K{O5@h04?`5|7gJrbDGa8V$L(BVf|D&u%DJcbFejXeJn~BOGzo%JUB7gKPouzfa zEN$3{1=+oOum++HQ^U0H1KEmq!?2$unuUc7AF78iEk_|-*|O@RBR+)f(!rxjlDjx^ z&6Z0rs^f5YqL0VypP*`G0~mw|U}v#jXGz_sMU3v&SFpXy;$_Y#SJ=H1A+4yZHvqO7hdHlpR^u8@X+#}lY=-0?h zi7=(|HP3PkV}ZqT(4xG=sGvj;HF)%&5aFpNlyPfTSf$&_5O1c&nXI0HBn5Ko5M3PG zkx_~1buJ*HS$c4{twJ$WzN=dQhOoH5#KN=^N4lgUmS1jY8*xg{zye@wYXx7%6&tht zQf6x2?p>Q-j7l_eMfG>-FA2u!5oTB1fSVA3Gdr}-WYR+ueE{B_{i|?kn$@|dtl^i| zj*n2A&p6c+<@u+w2|oYsFJw`p0N4OO8s?CUjsR}InvfjkARr~*SxU_@=S`WIQg>>I zQ($y9VhDhx;ZvzAJ{KsArr5Th6)$D!1xvqr&YA0d^7W>>1cZBn!Jn(?{&4nVm^6Od z^fSu%3oH;Q zo`hJTQBN)Nt|$|ILfyNfmT(aqpk=*Yx++49fVVwQMPz(!I35=q=Tm80uBgAP( z4LJHCdBaAFYhS3C$v`??cn>a#crQbwWlf(jG`-Pqov4N`61fe!2uWQ>ZURh;zPm0<&!5Ys;kL;+gwa z3SUh&J+#Y0QENYxmD=XqVt0#S7B*EAl0#ssu zlWJhD*9V%!m}~4i3H`LXFdBfZ7+@890)Rcg1u?Ng?<3Oy75gg)lQl}T+@w~^^!#&W~F+7d59<7*P zN&w<6kcBvLpO1oyQN^1OQUIPwivr#K&!Z#!N?XqA5$wYYE%}yCNIIekDZdR8f}+S`@mE)Nx3VYkjcqFR zRCGLHKg)8ig_q-sjjWSAZ#>hVAKqua2jtuZ<)*QW3ySF*K@I$`gprntC-PNR7_BNE z*-9U5gWyTmSukC7iRKg_izp#_{qo-~5qH9N5{&m==FEbt8u(IkgsA@Pi`7o9Iek}Z zty?4~HsFp3n4AA(Oglj!-W{>`=Pqb;Iu4Ap+0lq%e8?DqRx!!;neH&u5^niQ{}R>G z3+Z$+k-(Zo?0da-mIM2`U@J@|u+#}k9p=O+m;0W!q7b^S}ulz`mOXAy_|4d_Wr28tX>VWxu0wEyAKl83k+BFx4%{kC~@ zWA-dCQej(M28OlA`xY+6tXESm60F5hB9^+4;E?FzHzTy1ZW&c*RdMl@{kiJb z9cTdwYOG8Rqv(`^M&{z~T|o8V)+D_k%yP>qzN2vp3>*5{fIcd*tMpP0y%b7->354Z z80=J>FmP%?<3=f*qR_jL0BtI-%joZo1GXaWIi(21d)+nd8UCayB;q=}pvd;DisJ{! zC;_{GAM=?R?wsEyNMoyF6p7<0EK(!YDmFp8KAWL|VHSCkNn^3~PeOTjjGgDgvr^5? zjO3wp>;3aID43-1BF?=K5kk@VZzRqTg6%^I_VLqQ|_{kQ=N-zly1 zw;1pPvxnsUQp>1Re85FJ^fKl*py7peUj!rZ&^9s&+k1vKG*A;7L@7+g0eG*Iz&Et* zVE~iju?3+f*o68B&Ts54dOF!ys&7f z2*Br1`Hk}tQylcYq5r)V#gtuyxc&Z;41EI5b3EwPd-*iU&=h!&G`AWqf1=0X#r9FL z1oh!ky|5&p;Yo4B%CyO$p?Y?HKRttiWPa|yVvu=NbnHhYXY|yfcq|gSF62~|{>Q3q zZ~KFQxH@88*J4V22r0VK=zYJt;N-{iQmwICm5BFgIRW42T^U9>$yvE7ia89{p?!3w zh)$|=qVzm9k%l+gH`7^#6hVz~O1*THg;=v$>UhXzSUNd?lM1LR4KY-keeA#3&JZt~ zTqG6aTr2mOB~0{WQVK~eL)vX`7kWEetzem~7|%_m7_D*Q8=5aaB${KA-x-<3$=5n_ zp7I4Cc|inh@`CiJyy__!;H+c3x5;%;=xCAg+PS(r=$TdkVf&r5+^VMG=Q*+`U@;6b&0(pZc2&ji{In0@j$ttJY8b`Bx;D1f?+ig&Ifv9*V|hGXVPsjV z6&t->QgQ=evUZbr4ApRsoenVm(eTTmRTW@uX+T+Rlm(?h@z}JRy=f4XQ)4Rry7qL` zLB7jprRI~=(p)a_S3^@TY}rXqItAyVsZ~F2Z-M=yo^SfP4yqt|$23sS{?;*2$MX%Y zm}|*(&(4sCtg%uqR|QjS0ASs>%N_eO0PH)BYWn*dqi~akSjRP03}nlc!MV#>K4jyX z=hUzeZ_tE($%vmd0L8!^9tb>)b%ubqvHpr^CA63@d5zz$ya3kHIrqwxW!2 zXNX_gk=dmH|NLEIwW-+^QJFkgfrH}1z5S9W517({5J!Kb&4zMIVcr=M7l=e0BM-r3 zqr62XW@qb9nP?M`@|XmBZaE6~a#09zc=d-}PYTmi_%lSKi6UxY?f6h})Z+wj7c$-x z+~}muDas)x;-Hm2)c*%rK&HRTKr(To0U(Gca0$KQuCfn%X;$N%0J31@^VuLd-vBU+ zmV%#uL-LZ@q90R+&gqn{^|(Nol_PGYUB*L8d->gBJ-Jc_C{m9CY(Y!1Gj4+uDwLu^ z4f)G_JzB=Gx=7wJk33)tc>OZs%GLaB@{57105dLy!oN#wP@)>Ekq4*->y3G|3nu4v zoN?_$g4!20`G=R{QurSOJCGk-2 zSK}`(DZFO71%0vaWzw7%b}ZESZ?NL=XV&nWCYom>T$x`*x-&nSL=v&8AI z@FaaXH4)4KmBaT`Lt=-NXoN2b0V1>AWXhlxNXz<31E6uU-6-N$cs5Anj7bjVwE?(0 zyUEVmvJS|4qckdL;<3eG(TnaC{d^WQ@nokwj?ZO&G#H>}oZ%LK*1@+=QQ4c0t!xG? z2173$Qkyw@Atdb}@WEe;tcSYpYqF_6YBsXs9D$w3IAgQzyw0ol;oGL`UO2u~rSgGN zU)DTW^W&>vI)_n&p%T7epbcgaGtvMl1yC^B(E{P3@cG%yRzNGWCd58E!eWygs{&z0 z1BmK)o)(|z9MHmJf^)i1$<``p`#Rp}Qp1>K z6D58LuxdBJ;Eyr$?yw-N@+M5_><@yM%~Om`90e@4<3rJq-qddFDx;wW8ew1A0Xqeb za)L@PBHthlWX)i%!qzE}i@Qa$mi$4mCEE#5VHTR>SbeudMZiIOME4zZwk*4*gmi|f38}}6AZ^wWG|+P z2;KweRZQT~_8S2HyTp2WtV*7M+buL*&a=f%i|4!wN-$M+{#Nm#S9znQ&x%hZ=(9U4 zuee7vB%74dEBu~O$WPg&=FyU#J4H*H+A%hx=WLPuWWa%x@HVglk(GPfY>Gn@Aort( z02e^HD)G*EzvwxGZ6&w#1%@p*F#u6wc+ovLZ6a@~Q)CyKls|tXUgh0j0odRID24HS8Lhn$XJE#T=ezMI`&>hJuCOt2pEJDRJ>7W)91-*ME>Za@ z3BZFUU2H(?_m`^C<>=CLm>&~Ri#tt)z1{eA8(5}#s@Vu5 zfW%WweHpPs%uaTn%QZ6WD;buZnGRmqvB5RTj%l<_6r{-ws2c0iRG1qc=>85%eE@KJ_)zBrwO-sP z5vgFf_-%-7Oeu!8@~cW;mV8(95=}BrR3VzG&H*tTEIRkQY{Cf0|1JiF zl@iFv)g5de#ZUeMzDt2|#b!?moKdf1K1gSe1g6T;Y>v`OVlUlFrORm>rB$8%LFX;4 zsNZ|jSGIgjgqu3w+mS&v{htn~v)|wL9z_ek=)7e*?$Ux?gs>A`%+pq00 zxRaPN7ZdSOMb1TP%?Y4*S_XqN$uIxZ#0iNXh2PX@oY>CL zeDCRfdH9LsIcxy< zPGhz@fp<#})hoR5cUm1+N!oBh^T|$%iuuKP3ZD9f))zE~n@(!5qq{7nUvY!^hI3mR zdzjhP0F);%H1cKoAxH!9{~Yt#%W2IGmP{ z3TX8Ouoa~ok}nNXiX(P6EMyE|1*`p-P^R4D+t_7b>5a~@d1Pn~;8+3RS}hEG*Hj9# z=w~Yg<*v2ea9@cyfbmh8rBE!EaZ4}zwaT8tSA~(`26uH)Mwa1q+ci;n9HQ`Ti(L+I zx5mJPy_k27>|*V099scBVOwr?w90Y^Q+_YoQjQSIGyE88Gjs?_c#y^L)f#4)Me zu0~@8H}O?3f3nl~u0=t_8zpLB!%VyY7+A|o zQ8ABk1t6#K-8v;4&8zk0>6KOI4}yN83Dm59R&ieT$$!SC11AQt;-%n1({1*lZK;R1 z(T}&capYhBm{Z@9?RLvcFEW_WZmFQknB44|LA-vXXurrSFZ<9#wC)k+Zh!J?ss~#x z8khqdn!PET%7Z=S84fMcnNE%|^*=4=M}yd`c1YuJlcrTJNcKxSrKz!>L3t;cQ6>rg zSf^uKc?r(|LuQ{=D9wsoP87N+qkooW{Du&{+UP8~FE}N8U8adPuE|fCGThli*CDs~ z=)r1F@PdQCC`Hs#E|cA29M4t^gCd`ASjwYAU~*0u*h(PF+lN}@qjn|^XFtN=j_ASA zkHpWr>FiaE!{05ZSD+bE>?=8vWNfVvy^j~-j=e)X@J!<|!QM$wwrkFpt`|*|jiDxa z+Rg`q{H0Z9VrQ?)miU+eK$REy9Cb(RuS_k!zJx*qem>~ju{f}|l}1HSh`dv1TmY2y z3}sq3JFOH1b{{C3!A!0# zd5MxgX_CUstN>=(1QfpJyA(%D83ltm2`ka8hp( zeN!Ud^{LcoCO9brpUo(kfDdSrqhkU2GJ3?)4=}!5tyYk8VH&v`s(4SgIaDQcQ$7y8 zM}c5k;Fwb`%4K}H`iYLiw`D(@t61tXO4pn6b)6vYUOL3gQyXF4R6s&oDSgVt=*@#P zb}^n@ydhI{2$1tU?-jgoj#5v@Q4WDjK$)6~7YOIt%8NXXHv!?=nosN1$bS}vieHQo zICMA(OgdGDtzj2L%Fa20G6rY7;ds$!5+f6tog=vUFEsX3spJ9G{MTand;Sy9 zfMWnR$Qk%^jU1o#2&U~Nh|^BEWN`WN>=MJK^h6tncTy^nE?G5N-FAUO=^?POno$j3 z#vJ=p@&Q)Jq34_-DEENl&sSIqay$c1VsCYqMcrJg-j5!T1=B0 zWn}Cm70P=aEy$QAyD5Mv$gv+U04~c%nQnTARidi=5SWyr0TcPCNjX@?v2D&VVl~%b zM>UBsqu#EO2PisAXr#&YI(rr`vNOON^hSEVLNvoXpB7laU_L`J$b!cj0>ru z^ybZ=2}zIM*|LbqWq!$9DR zp?zdzS(nX`=eRAHRMz9@$7`P?8cTZ|P^o@_nr_kA*9>rp+aVN!WS@*BK*i; zPYmOX8*8*cQL4Gcmtap{Lg4PSZ2}Yv_?}{@#tuej-!j-XhHB0b(OJaTqd9`TnNz8_ z$vezTws6Lm5_?AAxI=VtD`aJOdz+s=*lrPv{ZRJ8R~6%xE?5};X+c`fIoFq8caCbc z6==ZQ{7z)gKt(0zs3w1&wMZiz6EvfIj#h%P)vbkrE2k}>kl*Sl7Z`CbFd2?!uU$Fq z0)VPnE7921yJq#YE71tRh2Q7`I&;nH2@3{w_0FQ}x{jhngZ0}t(glO!(Q8*vBNR4s zHQjpltmU|%bK~0W_WxW$7&QMY6RT@SCe~l;|G(Mql!^7#M-=?jSwK|4N~GvVB>*ma z#Z7+KRtkH$ig0vB(IpI2UlvmIWjSYz4$1|8lzJP@0Q3UJF=}MRDVE5c&KnE(vb;m- z8@rqzl|y{oa0N`?M7}o*A<3`tqjM_P5K#ppi ziivz*P`u99vj;@`1hH0Z#^joexY7$qS6oD0s8zuB`

      hbG5rbl;t zIgb`Ud`9J1yJXg~A-HI>>}BT(^#BuT3qE70*a(U_0^6YQdijF-TZ-G9#W{0u&Y=<- zx|74Cp5S&&%%wc1O)k@Mw6`!AD>){T!)m>Nvuf#cYG0DCzdPgubAOhUjFGJqDZ*uNB4y&0z#rjn8EC zS2bA-$v}$-31?e4OU+drzmHd&4CJu8g6~x84!LY_%5c%oZ^l>+a=sNb6MmY`*iRUy z*J^S%J}Tm@kr0pVK-#B@Y&*v(PiihY#>)p(eLf{IX+JlpfxT>zm+o%KRIlI%npt=^ zhItp&*xPhgahk*2>_FKWr!tisb)&_;X6EVaKp7g&wzTo>jVy5t=Pt?k1#>oY3O=%# zxjGAAWz)}OOIy(Hrm%Q%D6sP>%j6~1O`n8K_)In+>o?2Yl=*n1&!n=g8rAsKIOK|21e!N;XwV7=$4*gs=fP6$CzcCuVKIni!=Zx!2`z>Cj(CvW5 zP6uM$+*Wt9NMJw3+3(Qe>$G-%n;IIvm>%kKc^DodOP1k&M+9x`i@IqZT$o#Aj^a zp=Q)mfRQIdo*ou$uqSP9hg%%NScu2n@8C0ggXXECjdvc=%^5#8Fo0^GU4FzAo}&>( zPuwiK$#{z?GG{!dp&!(^23f%nMO{!f7*qv)6*TeO?c^$wWhhN`J<0CsL0-nUvG>Sx zIHJ2R2u^~4{Xo;NR4DmMxm`jNP|-y2vWX?qJW?-P*eR61hS7e5R}ZC=u2lTErrY%J zH`>c%wy2$B7-me8J%fawytB=Ag(?4%8OrY<%E1HOUa_0WD!QfomX2MqBo@E#q`aiX zO4-7`O+o*YHCve}_XXiPfTr^Vc4@oSO*L+gbexGwJv>fyo)*-t6`U}cXAh#AhPwbm zI+z5GpR;%tsFrqZn;R}xSZAHXp{*S(Id|0A@gm3m0hUojP|hcJvjZ8YFX3@nH9ot; z3pP~QI*h|m(b>SOHyU`r`Kq7K#6&sGw$$AQ4+LTG7v@2cLP^-QXshgod$h6LhCs>f zBRku;YB3MfJ=|CJ(nmBS7VIk^ad%2?Sa;YJnE%g`FS(X4QOXiT>!5ZIWKa?4z-VKe zADs{Ay@D*9xh2zwRf&Skg z!p613h|l6ItxI>~n}KU~|4xMf7#^)dyHfkt2`#Ep$@bU1aVWA**pOq@KfeY=4-{N zqVuy6Uhs&>-ZL>DtIiX{a4qP~ce?DwWJ2t(ne2rg(7m~Wsd}Awi*NbPP?4PoEI9|m zva7;AXyc4ah62Y`)6ZF7kUj*_S&0cWiH9MPPuFG+4}vtFqwgr+Kp1Mq>M56fQNpDi zD+n+9fIt`E(tf?OmIF!wJguCvz=+475fH2wxa>-tNxzjx^v~=Tg6k!i1s)*1q{ zl}L>KSy10T`~S$+O7edx-;gK&e&u2OPwW+>RK6jg@(l?sIx6u{>1RqQe7FNVt>8w7 zQsWtCCMV*d6Q;U51!YT0af{+5$5O!59)~t zpiJU>Q&@2_-BBpOionGEhiINCQiMPU9K*}f>aTjZxO5c7q@B+Pl3M}=ezItvFVq0w zz|LSOHc)!#7MR#Ye4_|9<6R#R`zgL@mf)%Y{K@bJ@q;1^{7JCp(W(+%`1nSv7D{rX zAgC8bg1(hLx;vc0+pB>k4wZ~`0@;fAA;JIv_fwR5GL#%`wUEbU=w@e&T#we!-0V(2 z2Ha~aQIgT5z_E&fl*%}>LD`4%eaDXo$n9u>JcRFt7C(W^A6I9j2Gyp>)MQM2YZZ zP9GN8`K@Iz>KlA{xF7ls_?FN;nHezJE6Lq)iU{Rg1Cn%;1E5s*H~ipD-qJpi>6l%p z^7>Wuwy-&kBx^xr<4y`mbpxH)nd;>yi}f550sR)qhMUg=n;&hn=V2j^&|UoM#aqaV zq9;cN=?m7_5SZJ!S*{m9GQnKc1E{i7uSROpPd5;3CF%bRs`W=ETf$qT!+Z;{$IWs( zdrq*`!46J8%l06h^*9!sD|qz-1^1jS+24@s>7%7%GdWx9qb8tsdtLgkiBO zu*shUAh~T6G&@G>3!`&2b}c#rvTt7`)S`EJH#l1Ivg37Q8@I#whQU(ai@Sm2_oCtg zKb2c80j72Fd)?~B2lf<(?8S>L_Itv?Hx(@1>cmwZ-Z3c^h_kgrPIJR<) zCcrq`+QzX91V^(#@lVjb;1B4_)ttSJ)1DI=&Eg@4O@)Y=Oo(iUe`bYFj^np^A|B0uY zG;32b36m`LkjNQtw3qYveOS-;6)!kUGM>`NZS*l76}{2Uo)$R$FnYKKfZ3cxUTjSV zjCq|`CtzTkJ887UJ>5p@Yqcg$0Y;PBFp(xpmg4dkFJICp4EqGqEgmoNwz3)2VpoR?e7P62ELc;C@A7$9SMNpU6PvH_C1b{u4xkQCNW3 zwc;Nm>`a@MJ6q})+f9d&|E`{V%;BF}a=YYczL&2P>i7h(2+ahonPz&y`QVS; zbsF^%S|cOaDjR-ur*3MmLA4AK*>F&Z1$mQWGndpNB86nkQpk+uWbnzjQn;ovRN4q)pC#%F*`OVBrC4(I}$Wo_PsZ!4F z=<#<=KLnbo1#M8)+hq3Z;T*flw8=1mUS2X35_PeKKdS*r0nYHiTk9}w&S6q5e@Aii z%yMA2mXh?T+bexx47C-U3Re6GG-$9?anZm&9LX8S4u^hJ@2ZatqF{{}F!7|B1T7m~bur>nE=k01YO~II!;(+>oHTfHPKc{c*8R>GayC zEF8ZN41T&A3>#Y59F;d`ag8LJ1>^+V*q`FitO12IG8}rzD$Wm~zDI}QDj13@`MxxU zEr8;tnBzPAD5`r3tOER$zsvUr(6wd_7O$_s|JX$W*PXYpcknjv7H8Np6V4xC)rfK2Q$r!mh!zxw3YlbFz!7C!OCPS1Z^n~6+be8#8VkBa#2yr z*b*LwVgps@q73|q*6+n536X%n2nR*Yke~V}h~Ws4@B?&rvWc@FhW0vA@}q@3@RlaK z`CO_9u$f9E$}lp(o!MmGE4JWc@g9Lt!KXpG37Xs`)svviGJKOSoBWJs;G4Xu4;6cw z;Gm_k+DuV{Q0NN@?G;^8J$;L}BS9S|q6uapdr&m~tYD^Bvy;Suu!!E|sVbRYOs3qL z#r)VPfF4{g)Kd^7d`l1rW23M`f?lh~qD2F5PK_ zo0~rcl!X?>SddUrS&ZPP5+JBCgDdOpZ6W>v48Itfb}|_euv2`*&m}MYw(ci;L0uPr z2F*QAV z3MfK(LcE!la2%_FcpMAE@+QXSSj`Cq75JceqU|NTT+d5s(H|CVG*AhQ-)fe6yVDnu z8#M1%>ku_^gnUQGUEVO+V!#lLIS2*h%%zq)q*9}+8{2TkS`~&LC)wIBb(fAN@qRQ^ zv{WMEi+C>^D*6x>s}|i3quwF0Z4CVMBp8TzuIff_lRY-}mF+jc=7)-I)Gm2wtY=SK z1V}PKiTGTLJ#F!94F(X1HtvwT;8qTDIp=@WyR*a4J3_qb+H<<(uG(1&+;F?H!E z&C))_8MAeE12E+SHfiv8O*c8P%ggp1RzdnDFxlY0aj0d~BRrl)s+HS+pBg!sC%M?`xkjdgeO z`jZ;Lr)xk&VBrQVV4ckrxYF(e6Zx~kEQ2!|8fj4?G=EmS{8Z6Tu7Ru^CAs7y4v6tA z<;!lr0*v)kk-d%2Ij_Ugtgc3$xSdX)rf;Swp!#yviyAfd3j^O5V(iCTZoa6UU2k%F zC_o3*(qnbDbB%MLx+Ky-wLpaGyjo#T);I$_@H(#r9iYHL2fLb9KG?v9&#R!oy8+>X zQx2vTm1_iNXF$b$WqmY#n|7sA-t&7p>(#7UzN_M_>0uvZ&^*Vkg_hyW>0$3EybeUV zmFtH6an=dW6A0M@RS z$=^A--OU#BUigq$k4lB34exZbBP5o95Mn^IRc&6jly?%<$BX0Zbi>857Xr$R678h` ziw}!#HdvF8Pz?cQ!H<8eyxw8nna4&~WnsIxsU{#~6NM@fQ_ zV5#4))3KL*q!?p0;<|wHUa-g_2X+3SV=KQhaD$^IHVOo2(@1BeC{N`9#G*WyV}4b6 zt3*o^E*R9gafh#}6-p$>7h$e7rT?oJr%)*I8FFrQ&BiGU29+MKRwCGxa|Xrbf=0>! zD{G-{T(f%G0^Adjtt=3+nhJN#p0(ZjpPBXlS_C4C$5yy6y8r*&U(J8wuqY49AY(S2 z>n)l)vgl`UqiOYDS$8W36sl9yYB@KG>Xrgw%`9`l%gR8ba&V^xxxxDKumvB?Q3>$C zo@vW1mXlwNB8^|U#b$AB$&QbMt(XER1`KD8%<;T|I7p`QeL=jf zEc?|$h&?Ih0XeQ#(8RCeh)QTJ=lYUpIdt+8ibahkQ%^B_*!2D=BSV+XgFpo9p;b2U zAaNFS`#>CmoI02Q$s!8FRm1bCEdc$CU}qNbPN!5&AEIhmhrxO;=6fTU3Nqg@EDg1# zEAhmK3B71@ga$CVKu@d#2SM*EC(@NeL=V$Uzl~}K)Y9{D76gm>$R^FF+(9L~0B8Gh zuqm3*B>`RqE4>5=EhSIeC$gpNhDnbW0*wZ~a06cp=Tg~dnY<=`NFk3Au!oqQ(Wi1f zfX0}@b83uJ?t}t=8_?)Z$pr7|$)80x6iWaP06n`Ym5*FYFg3lJtxB>RU!<}lB-|2D zRj*JQF?64gZ{l=lq);I4jSI3wttZ39(&(Nn!t{VqqYwS!lN|^&`b36)WT)Y#0F}u} zA{!=hegrmK%P#=skz{~cjY&A2Y6FbW2ni%_@TN*Rg}0U|0)${Jbt%3)ew}x=flEIl zIBim`xD6EPHi_*cb0CLyE%}q$hye%KRzTU^O@N2R6gr+GU<_y|>>;8)OSao2FF8T1 zhx1h$E+4(sCb9V{XAGh5=*~AN_Wh!lj?j$#4d+c>BUjfJNaSQd6yVxg`UX$=3r>Vj zKSUz@5N!b-`?L&#Qc@%p+Uk(p@CA!HzaAY*w!J%@s@QJ<*k2ZU()$$YR4lJ;Pz5n^SaSiBiu+ z$`R>KA@OLz^fcZz$%BwI1sF+O1c77Mv=N}Smna?(kN%7R;@%zh3-%SJADt_3`kM-+ zyhSJAwqV(>^PF)KVOL-%>XmqvTnFneW5csJw-971;8>4GXk=S<>cA)6d4ZrQvctM4CCFG-fyvbh%T3R)z3?WP9j$k} zSziZxx&s{z1Ga@C{9bmY;@l+r>1fSVXH>?<*=*ezAsRb;FgMF?28ghS3E6#5F}tHJ zoIS>7_fmR0j%(5Tri%&Sg`YD&Ttq^0GECw|lP#)pEK_au2O1rE;U8?a2S#8y=Uk;Y zp8!`^b^AO)>*;j-ZYtJqTt#$Bdme}YavHA@^sxdvPoM;|MvH|qLeuHO6>KvQxSlU9~b|fGkj{U7}D{EDp?MRd(8vjxv*^sSzc}ov4)(=ji z_`7VNu>QK|@mN(92a| zj5olgK+tZLEb1cbb0Yy!B{qkWU+#V z`$bB~WB+d{hX|y>^sz>EYNA10eTP!lY zG_crz>W(rQJshmJ*dpLMK$=>-Qg(xR7P=0$ZYpo7KWf7X+qkho^BvyUjR&;TfuBy+ zoT)si3i3Vw%EKuqYwX8uoPIL-N}<=BUwU4_8;pmhJ<0A%!dWs~Y48MSCqni>`2W+R z+Vg(?CYXLgF-|CvAGML=!B*#CRO$FxO2K=-%~bX^yaX3OXxM7QR$G^EP$*Jq)6_pj z^OL23!(c&g@+zoL)TLA8v!32rhVa8KP_h-J!mFo2D^jygrL(;7Mo8=m&N)%+j_~Xe zHf68*q2*c1Co=5ri&bZsW?Wo_4b?W7ZhVr)uJ<`MSFL)}gepuyMQ?R;3~m!luGn<^o-T5A`t*+lt;gO+JZ9dIE_-i z*5DtCDXyk?BRR#&fBtuY_~c(U;2By3|K8bE{Fk#UiF1Bevh%R%y*av(+)y*QQB)wl zk%0Pn!Ah<-fi1z@8J$r12Fk2TGC&OcLTGsvM|o9VycIAlgM@)nfCWvoCqXFm8a5WrR%CnnmnAydw2}64#`|d*;D5O{lIXh zWw35^3`Exhe9#u#qA^VLfqX0q12v*%Qy`;WPg`y>V2TDsB>iv;SNgD*@S`HUY9k6m z%Ifj~oy<`w-fYjf;HRvC9vdP;YnkfxT$vyhw@H2oA*hAFk{U^HIp0?#YXn-fR?9A@ zZGSH3*{>w7KP^^rCXsoHh`ac=4)kdE3;KUbWi&K}!HyrG<%Zz@l)ptj-F#{3rr3hl z0bqg>5&R|@^HhJ|L&3w*tzPB;Z3`nc6D3k!6&8L^&w1 zE96JW)~h9~2?!Ygjb%%r56>6;=s75_2=p+2PVI-9!K|J_DR~q*I|a8mLh|vijaRp? zP#A7niw*~3i16HEu0YiqY!!b41UgHB1#hBCV<2xi&P*^ggsruaS|d-NRlWE|n8P=D zQ-cn;Q)h1g$^dQHhe@N7V1(x8XnPRDp1n(9A&s2m!$k53)(dFQ+of6%fW@AsvD+oi zxX2>WNcRIX8!9?$1wVUthu;dvq*f>m0bF#u>=wH!>^YTZ`x};ezetDW_DMw`kW-1E zY0>R?*`bn=Sv!ImUIB;@zN?U*)1JatlMQ@d+SH*3!P$bt03HuA2GVBq5MbR@p6#Ys z$~r3Ew1mgt7za|AgxGZ2yj`M-D1=QW6YjGYm#K<=u!S6CM>{wJm=`dMjnXWr=bju8 zZxYYx>j*|^{ep8dbNVq#Kd>!%mfV*HzmmLYL2I>YalF`Shy#@YhE~HsEgPIpoxRFi zWnc$Cr+tF)IBly@&1cXpV5N%JAh;{!lFu|hWnQ{LalRwze-zmFMVMP0LYjCe^pZ1h zkf-K%@!%!AajygApg=^E>_Ar5(+DurszKQ@()KbMigM6xus&M!sef08HPpV==B5D7 z=LtThn^h~CDjj>6fPORqLl;fr_e>Lh+J)`xEwcrTDXPD(!DC zp0H!}iyI#-v0dBH9cxrm^>N0*j+gvODiQs1PVKk)(s-g-7U-inJXYw2P6Qb8^7}RY zcT}m1ywJbU9b2krzo9x6)o3B+=+%SR1yxAv=2D!8;KsJ(L5r}ls-*_)R3hs*kv*ew zx=$5W)wS&y;&%oy&*pt7Y)kotgob4@`vpv4_6y0>CMactSG?I)2Z^LaC`_zik_n7} zQ`#g`1y~P%DLGG6=Arv-8u*WNG zPP?VP-sY!DopX-0jY(y0NboxECbxEaIRrXCN42y$mYW=+v+p>Zap7QR6;~~MtuIaf zfyuavW9uwSeXi=I-7Y(_n={_xDb?UXyAkFQ#Ad5(0ImUPukPwgL$KAn(=nzHg|4RC zjJ_1|;7qC1vkhfc+O?`vbJ>9&$3CmuC45}l?gjHyV`+sln$m|fH@XqbL+WDtF}oXj z-M&qyoF_lIQ8v}>taNP$Z!5-rtW235F8a|OPJ12tkAU$~b8H&F+Xe!{&g!wWyTR3t zl-R%!5T*)QhpdX1M9U-StxE@!N9=&E@|06;ULT{;Ee-?-n=g2Il@{Mx`U}8E&@A6~ zIreMaDQPIx|{t> zW|vkeMZm7*zn0wWPXhbFFrNKdvbAlTjia_Y$M$ZsyF(n-CAJM3A>>R`mV5xLG)w2$ zEr31ipZ*&M~5mGg_{%S*;APSyXgBK59)X zA~};X>~TTqyPDlv%Q*X)`y=Krw+&wnqt^`)N|{~ln4|K-I>v(k+MUf54TL34Bw z%p!2K>nd(+2xH!zwJ;aZRYXbjfDxoEdMOBru^N?-6N+MrZP8fXoJx8XBQ`oGj}`7j zc)(A|5t?noc_Cplp#T0j-A2qm0XV}`*{(e8?2E^Dr@QjBr12eEXts(gF~i$@Er^9i z8pXg^Zv%<_i7*!M*=FFgxRq^IkKW?Te2c|YK$P5Uo65}xK{ijN@}Y~$>Ro<*sz^z; z9%Zu36M91mxIHV3E`UaQ@VL?gk+foz>QugwV>a~_m>y-CR(JZW(34(4d8KvON^thl zSCm6`50bh5xH*-e5y&=Lz-q4K`;&YhSc}@;QEbV)%uW$0or}F@+12b@ zM%myMkx&ob;!D;3C$-fUF6G&~imd@EeQA)lHCR#t=&w=Pqenz`kijuXVC%S7^TG zPJ*ex{8c?en>~5Ax@n!B)H~FE93sf)oi=n?w)h)p18&o0BD^|+1)Y(*NkuN|%{!2+8 z&pQkFZd!6{OYBH;9xcZSue4)SU6K7=;@B(gmNr-Q!bXR4u0Z})vzDKa{_;@Pl%CV1S&Hw=20EDxl+^4|VB-ewAj-J&w$@W^6_1XYq z8O2DoPYgG{A3gnvVXUP!joRyq4HHe3o;!3*kSk3uSwnHzdN`M88|AFJ2@rJrpk|Eh zrf??IXHG zZB8|5=N98zU9_@bUo^}ElM-Zo32oq~-Ko0)KDsH?3)!%5leFjC-GtWim>^O!X~UOV zI!|TSR*3u?YXzEWw4&l9@J-uAPlUAE)0ioOwN7ex+jE;@gxXdWS>} zi3c3l*INMB$T{F7eY}yct=+|Mf@S)o2;Da~HWxzDPsNK{qQt8-B zKaq_uyNEd_r)kcb9tSV_so`uc=MAx|Xtd^MpFpGk!FC~F6!|vpcj>DS_4=~HWL^T= z8ojHSlr(Agc6h<#9bjG*+dK^V`e=uB59j>o1q&kkC)F*+bgfL&&Ef+zr9otRp-it45XP=L-wQr#-rsBOH5{+K+^Qrx#`?BnFgPq&K8AlI?rPn?O zblXoR1;!o{t*%5j0JXBLnB%n4wx%tpKU=Ow)VKEn9nJqDrktz`%241G@=(oM*$NsG za_ZbzrqIV!D58W@t!(+3trd=ID3`01S3jup{h4Ky@s_o#%VhgCt0$C6^Z!T5_K!_fLcv90bVwnt+J$sPf?>PSm{9CB#4IB0mR%ykEy1= z7ttDlI2UbFJs(mUm84uCP{PJ75Zy(zK<+`&%b+Sx68$(3Y-JWVkcTnkDpF4sV^sA@noutKDW&Fj_9n6*50iXeB9m|Sp+9T>O&~*;$+be zp{BaCt(*I~EWvMcdM+A0LLW(148P=V6T0}W$z%6Kj zwyDNfVX^>?deJT6}q$<_CX*;muUc%4&WTx++Q^!5dZ4 zDbER%@*F%XG@|SgBGw>s7NLbaqKs7N@;Avjf^NZK9tebEXw6zkNjo*2-2&)EgQcwi zs~-kZn|vz!cv}my;RlQOwz8N|HNPFYnlN*x9E{X zd4nrAk@M5XZA(2xE01S#oB@FO2Hzb{;%(qb$Y?z-*K*c?+1+q4@5kRXIQ>D<4g_r- zpvyYJ3r`a3F$@8L^oPVovxFUBwxI?fSo&}yftieBZ}FzKrwORPL}4e1955XvJE>HXoO{IA-H<+1@kfh&|)YB+qx6X@H1?$ z06l+LD|2Hb!7YY~Y_B$7+uvvu>2|yVrbhOlXsUwje9*?8fHwWLV^89>Zif~lIq32I z@YxY94vy=c6xVqTpM5`&5Wac_rik8UlUFPLmE-~K8~p+h2jzjscROB4d(6!zoAIpL zi1Wz`(`Z2ydS{Kmsgo5qmJQ&GGSM7@UWR}p)ywCp^_E}0wFEc-GdM(|1$0`wQw%NqweMDk1=Bbry>+^*M9jn_OT zLgAJCwn>gXToE&?uJRHaG7aCyh@$^&HPH6Oin1&wl5z8HM)!IBC>g6HS&$3(#!4Ky zQK>yez|@=}WLfv%RW=ZKcB|_CMQ_B^<&`3|y3kEPlykXsPlL1$0g$!5M`J>EvFOA0 zlQnVEbRVMlKE#TYye^J`=nbKv-I}%m3BvC;c=Z8W z|GVTM#@0@%%AVf?aBk}Cq=PLZFWY8qg=|Ykembl(3`W9J4LVlG-{oYs_ zu8O|2xZ-g8c6*2rlaS7~!*sj(`SDawlg@Kfyp!)!IdjSd+^c zV6QtQ7hYe&?}a!)pEFK&6t3``!C_RBppRT0kQ*^UU8v7{U^$sqYz40I4v5i8d!1uN zR`8dA2>S$SjdzPW_|d9G+l|cw zQ}oLB2lGQPfy1pT?M71;hj>su%#-SC1phiz1@ZU=L zAm=M?=FkNM9KM~QMa8)5?Q(wWnK4umn3|Xh^*Ai+lRe{^oVN?2=ofs zRUK3Bp0jf}8a=ugA+%yViBE?d_X{d&kuc?C4%nnVEXii&bq)0~;13x0C#LYBToqSD zwd9x4-&{TUro7FXyg|l)f>b~mNuF)YGS7&sqZWLB@t!RULHc z?7R)PgDT#P((Kx0QnSCv^DM0jNWmV{%Ow@fvJnJAyXaH0LAZt`U&KVZ{?TcS-(yvP zUyO`*spEFVx?}7JZoY)gxexX~;=c<1fHj6GNPOammW7bpYvb!r6M{5S(0q}P9fkWY$=SkC2eHm^8Ebiy&wAy z?)-}?$LZx&JyIiFz!D(<6U=FYq|MFDnwEkh{ZhG(J4_Z?ON?;|Vm6M6($b&`w@K<( zPNo*6P_+^9{lVRcLnmaNv>5>y>B7T9Xr7+u4_d!2p3e&EbqIul?DyfaRdd6b)00q=WBuZD$#LL~jUg|8Rz_;8uq(pZAv7fz| zw59|ulqMA6iL@v=bdx3frS;Q+vmn}DCeAv9kK7=J!eF$45M=Z(UA?%=8oVe8;t>92 z&ByY(FIE?ABOO?+xkZ3Y`*MiMm9mkvmO76G6wvOq|_nJW<+;4 zKis4N#k|U@ZBvQ;hsMF95H;`;P6J!VGc^_Y^xr!7JtBep2zxfBpT%NgRlQOkdnb&t zbMt6pNJTOUfX1gux7sN~5)+P>ts#l7S&aNiD?lkDBQw7)r(WwA4xO#YJJYm**##kv zO~%$cd{H(dyl>P_N?#}02g2BizLoIPG53T1ur#P6w=YN!{shAATkq*pbhD~Z+IXs! z;{l@1E+J^Tb%uNnRT+cOTKt0S35u#{$O!vq{a#wM#ORye4GAk@lAZ~ zc=KR!010?23f0v%)#L6x$3Je%W_w0O#4$F2-VlC+5G3F7QeP2`_Bo*%$8@1#1Y^T0 zF}GYt97qYCN~S6A!FURwUWZAP|J%NX_~Lv?KBd-|kS;`2HO;%&Z~{L*g}k+ZOfvuZ z&$^Ke=KumFbF$&ueOtbDTFz4xhI76?d(S5Am>*JV8`+Gckf@JbByn|&Q?utv(*wvm<@ag#%aoue_p)@jB1|d4hyKnmXpN!4k}Zr^Q%+ z;AS)U`{LQtz|Eq2xwEstixcH-=t1^M~kohY%W_Zmi|c&o@`)^GQD@|EXa=7;ovX^@JPt ziA0I*QnCjd+MfZKkM0H(>?u$1=e@{B=CG>*isA4V1jbwQ?4>S zD86(^JW(?n=qdv1+O%?+$zYo_DX@BxIvOJ{+Cqhh<9xz~lpkfxsBFq#Iv+3aJf^hE z#AN=xBl6m5CKK^x1+MyiT)s1q7utk^nlq&bMv3GXA$|nL4<2#o2tOn*3VRr?F_9I$ zYCNO>5+n;xL1VgazH?>y#;Ri_R2Z9fVEA^4V^2nAfG!LtdgXrSp?97LU9tL0q#Z+# zCC=@FsBmxy&ZJ{FB#(J5!`ws-G+-SykCZZ4t82pd#wM1SO26RFoY3+q(#R>Sls4Ek zPt?Kmu@fEq_1_5$-tAwefDE28r=Gi_OR8Y!9P^bTJwF1?UfS>*tGaj5$?MO?uaSc> zR!o1z_tMn$E-;f&0}G07KQk z-(U&5jp#J`)bV1UYg~kKWb(>g7=!lMYA!;3=3NlXPpo4Ed-Tl=cSCl-SKGPXFpe5s zrR9Uy33l#<@LtX^$eY-HLVg*6w#;6JTMh8f^dd=Je-Mpw#y1qaRY_a$g7gzLm>Dh> zs3=RjVePJpY9;%Wj_Qm2$cISM_OMZ6lDU@wHCyl=d<9!zR47Gx-29>p4sb<6|?UxY7?y;Gz-7GY(cY%Sa-n-H{QhtM-hynb= z2l3cTmJT!O5`9GtEBtp>0wrur!_f6X>ywI0Or)@&Ki{~7)R;0lpIu;jr)gkqcTBv- z6q1dZfQn5Tj$3Uy2$%h|@<#amT*HO# zng2M1{`0dX5G~5mq&1Li<6-`XUKAaqPf`DRk=9R~2i2}h%%e(ecb)vZPzS{?9TFvG zY?r!Am$BO+bKtS_on9t)x^60~l{KU*GG`)YacY&FeufO-5#Ec2HeN_l=d*bXB3v0` zba(Cx02?ECi->U*Ey89o_Qp<6S>vJMyQpx~N*~dHvD~G}&xIN6@g}6*5Yn+|Q&{_c z#=jCS8NBa&f401aDvwEw@?Qw8z$A4f*_6B^KlO6XxnUF`U9`ryt?)B!niuw=9ugRo z|8|WkU+=EgxK*UHO5ui1fJ6+wIrEz+lc8gxS+bq@DTrtdq%8Nx^<@`pk?9XR(g}3n z2oLUs zuy4XhbG&q4q)^)6eZZL;B5a;pop$*(_|PY9oLnPR;-QR$_4%8Ca2iLzLf><9AtgI@%b6a~a<98oB zxXF2$un!qA|4uli;sKVCFVomoV+TxvChWp@apfwYD4VjQ*3_%ZX9BMV%CvPZ69s)< zRvj8b$7D^1T=^(gVwa}#9o8>v|BxxLazTECNiKBSk2tZ{epX<21~Prd^Gk`GZ!2H* z2_4&qNSQ_cML55@jrUMJeJ!-D?mgp(cKJDiUZqyZ_)~mw;f?FfTy01dOGWq8;=pq2 z(M|b{dG;Ur9H0Kfz+JCePgTd^=5x1&U&RLgjBQPjX|!VjJ0UpJ`oiJS@TYx(zx{6( z9*(euk&7?Z@;2Xx96K7l*GNC9m<^hn`$US}u1qJqQz}AJz?=}2&SfV(!j@ooSdk&> z&kUB9cGK>fl!^L%l?n$B>mh~jC3P#OkxmbWK98Yc4N>k5oJz)mW(dHo-w!UbJ! zKJi{3JM=08e$Rk$Q?Y^Ks*9P_;R%E5CPH+Qx9(w$o0wAW%bgL`;o|z_yEj< zrR5AA>uJ`_*uU}l@%``gjSv4uga6eepgqG!V$A=0UFdYPQls&cm($~{^ZD_e#(EC% zNIJRZkgOi8i3-im5To5(E^x2*UYnAO!t7M6_!uz0Y4fC?rrj#jtCBVTR20Zqq^{Es zg&AlLtKYgE{}`^*9~T=nRh6Kp)=1Ru%wZW*%oPEaoc=Ik%N}aXh9kpr+}kXS_E4W= zsnxr+35;GK!Li2R6_9F7dJw?ZUTYyMHC@~@EyzQdOQAcgUZ;><9_R(!_ysH=|DxVJ|B-bbCsit zA+>o?yuQje{7bv2mpJ)!>@tiYg_rq9$T@!Sqd!@92BND2xrpkSaAk4@ZGkNo>xY5G zU8p*mJzzzDa$q%@jAkv%e)md4irsgZnH{E-P5zN2GKGl#A#pTq-Pmf1bNe0YsG=Ta z?7ESH9kcTg8?T|BhN&tY&W6!LZH-wp`ME=Ix*NIlfidlP&FSf(Bt1FGi_APZ#)hZ}a-1j3Ckk89Ru5ywAd+$a-$_;3lE#MA51H zI17ELF=M2KkdQ6)Q1{oNHV?J(AI9PD!Ils-Uf?fjWBHf<@kFu0W*U&hn3L`fz6xx8hX2Y&`Q5$=Sp}E*0!7 zee-XD^OcLfqjzMrC7jdG_V}?eJ@gt;Da`e&TM33yqbqOvfXx>`5KW=7FTRfXKW>q3 zh5aTu@X%6L^=_u)==3jjAs2UAbgPG-xcOh2RgX)m=IS$!&7#!b-ZNda5@l}c;Rub| zQ4noABN4PFgj0k#`MzsKr|{DqIeZh?78cT_`d;pg?@hUMWI!2hWsK$&pD0ycDppjs z@m_0Kp1i?n-a&txcxa|Hy zbkZA1x(!RF+n(MepOVp21ty9$II&ujDW*U&a;r7JPa3lzf3e=7k(6eF&yvB#xCvH= zI;-dy61KM^ET)9t(Nflj1ij;suWIJ@(efs9CwX2cM373Bc%+|eJSXf6W?XHe5Ld3g zZU~Mx-=$i-NAfw|H02UnS@qrq>pUE2NNQy9ts+^XT5u6_xPE2Z!P<_qKt0aXM}EXz z(`soFlZW#?Q(+j+E*3H_g~cuNS92|tZRndZYGBf=Xe1!$CF}cUmZs1XbtJ|0xs3wB zap=QfN2a6sURtEdpQSUvYc6IM^TLfbicbPbAO8p_YHrgze|Awg6fa{%uOHdg4Ecu( zGHj5wq7udMGd}wa4zEb%RVZ*geWrhFM=hH85dTKM+CAovU%0|J>A7e7s$zw_gQLRg z=CU&X23gm9S!_~eA2|(JtZ=T{dRzqk=sF$IWF=uypJxHJWZOSU^|93%sNK1)^J#WW zc^X}qe(!}-$8MU+Z1yBP0`o0^wS<68TfdPO+#%m=b(vPHLAYfxm3*tF+7N5iOcQCU zTwuG}Sdoh%Uqpm0V;M;9OEq^Nu7gXL2_TFIy4j_a|I^zJ<#+x*)HYWM+B~-6k>+c-@+#Its zl7FN+t>IQpJX`_ok*qGdnyOR}ChIHdi`ug2^u5y9!juuRX^DTf28Z zzbv^^fsobVpp>(Kw4||U2D>*-$wp4!0D@QyiO=5fai0;O<`H@{u+xVG2|caC+Rrs7 z1TGP(Qx>tF4-f0eaGf-*<9&Zk2q zQNt&on87&kUpg-H;}W=W3$6^--TWay%w3=INn(#8J~^tST6b#6J_88y%i&RK z>|n=Ej8_sn>9kYrGIAxOSTeQY3?7tAU+PB5CE1SitCWmi1HgoK8%d zEy%8T);cnQ6dkMwhbpAd$XHZI=0L3V@p~&o!~*Jd0OhMLf}KN&50H#RB*=byJPD)a znK|3=o6GSBY7(UU^lay0qjJe5wMjs(ncvqU#J8KT?L|3xe0QPjq(?AIWNoesxWHz$ zJg$T=SR2mH1xA|NVP$UN3IUs5_E{olxgw{sV!#C>cCI_4_bHU8O9Wa1k-o;tb$P zLT{oyNbu9s94=>-s#Qc?KTMb4RvvJxU4SJf*qp4=Z+I5$F^1+4Uu7e#H-K$iQ9q~W z9p*wo<1CW9McCP`)I5TC?jJZE2QXLGJik$rZc8xXvlPj%^JnzSLIij9G1988rM8F~ zu1^dR9=a=Mh^wg0t(6(4sU0aXz~8wDba<<_@sG~G!n7c)Cy}t$|De|2=;um*(cpf= zJmp*!rB7$jhREUr-tKEw9nw8y^S7H&Io)0WS(nv&_^kXC-O;o9Z;}4kKP*?*IXkr; zf``JY^n7@0FSvemy*rILrG8ZA8gbxx@+z8D%u%xH6*rg7%h;x8wQIcIT+eP@`UMFs zPBr)g99Wu9XD4QG$KRcMKXgM=ipWN056ze`c z;@3>Xzf^FSytJpe>}|jaJNhfFjpI<0E00p>>sD8N2Kd=OA~l3A1$^{l^y zv@GL%Pc_tS*wtiljs)yk@^?t|Nwz-Rly;=-G5DPO2K?K%@dqV!2h=zhc?d03av`b4 z3xj>_0$o4lXFTf9t?kgi_%z869r+Gh&*t$l3WJgQomOgN7zgEjc=rBZEDyZmp)l31 zah=F#4R0i(++{Ow+~BbFW^{00!R#(pU3Pr$z!Iu}&4+xEAU6`4wDsCOx65!<-oR-K z-OZ?E!KyD_bm$G~e&Ul!HatdEwJNZ>=mJE)X&cr8v)0fwWzS>WV4*+o*aWOV8e8J? zUsZB>u{TtkH0ABal(e+KSI(E-!7limG65|z)|g<79-#AG`gN$l29{xU81BN%2?FS7 z6_;6`FRomD|J#OjTQ7C8P_{OGwQ`dZ@n|6}Kb%53?)g0U;FadT`zKu z4lG>?=%0vsZCbFQHLP+5=Z#?m#ckm>^0gy;9%+H}QISJ)w_ik6V3oxMdP~u~Py8cUr%V`~>sW8+TCwlTn|Q%Y zXH!r4y)t(z)+%SSc5=vy*=?upJy%%)wvRze)yn0Iw!`35fI`zL3tSjditA;HdKtS& zF=C*ZvGnc@@0`k521#d_>%QZ_{0A{b)hei+LX-RlYNgDFxNd_Z;!>O$m_aFtHF!*( zvH;s%DNjT>8zZ{@)V#Yz5q}UQ%4uK53w)4c1gv zIBK5kj^vQATqQ`~oF@mSR(#DGxJ!Q>zv-7My)=&8y@{Ql+jwP4uoPZ;QiA^HG}fw) z7GNZ~=|+DT5xL^d zLV&Hhr>v1k)pHQ9un*sAw9DD4QM-uz$Y#(uaVDzy^8V~GHovX$ngDS6a>!5BuS}xZQSE!P6iMoMH2zL^ zBD8)zGK-w485Lm8y??9~>(7u4*yVHsBT3_?oyrUfUW9&g7d%qfzw}+tOB<>MTwy z8_SnK+J z>>BvOt{7L8-sv$s@70o;A>-QxnO6Km6Df4SiA<+KG31l?^&9oxQ|$VJY`S({b4n{* zMTT&&Q^i`dBM8q ze4!8^A@wnVeW~EaY)VGzgrt|NybGmOGF;z^zM?9?U(vxYp zrID7iC3W!{Vz1#h+n+Cx_$V1MSl-2i7Ft#u6Fk9~{k}%yz7d*QSt@1t?5*kE*mazw;Vx z(07Xu|GB$!Yc(|0Xm0K66-|Nw%JuD(qS>tG&69^Z6szZP<=+wylM$&MNXKUx8-W%P z*HUqj5BnwcLh`_!iu^OW_@n{=uNdvl@zTMZrj=RA&%x$dcM|0ni?p#5+7p{(Jt@Zn zG!HGFk?A1?JC3cxqI3yGzerFyqLx)Cm%}t z?tJmnk9%yrX0|1l1#J69sHq6-o7>>}9+k?%9wYgMXg%3q*zU~d5}B|m$W~4A1~Ugq&&Qz3B`qTAVQRnvgWoff5VQnvqj*8^dbIa zA()_m732dqg^X|$bR^UtHt9wD7Z2sfA>d{cF9-cg%Z zOr}*gO8*Wa#>#u7NR4`EFkzq2KuL7Of-c5lpH$?*Oy4yG{p-ZS1j)_ zbMQ*JJ}UpD1TUACFIZF$rplc6BFyJunrLBiow!}Ca$>MW$y+k7{xkrBV5-up&QU;O zmGuz=FhCO9qs9giI*mitRSas*b_)@W&5pto z`3^NenzHylJR;vOBzzx15On-Bx6BvNnHiY@3ktZoXuOlar?A#c8&Qal3OToqiCxev z>bX)XZ>~k7@@7z}Xd{qnBml%vWS!~1qpC<0MC|N!{gL0EsXkOfOy)#PZLPC@L{$L{ zXR}5Z>$QW@o;4&NYfnqR=kFtFcv zZazbG2%+oQxOO0FuJFr*C@PIK_-z)^>2t?EM3BsHFV~GP z@jdK#!DWSAN+|BHg{r@8b39K447Nt$M+H5k(ni17j!j_Xc3l z34ERZ(|-Zr;gf4%-xWkE3&r5}OFDRNcvQ0`Se?3rsWvz7A+)oBrzD$}vwmM=Ag;y{ zJNymr;4hQC1fMZ=3G7{_mg6x1OLS}qwp3{d@Ey_-uwPLkbB<*mH(ju;L`zQ+670+# z%~{LE_t%PaZr?Xn6DF7>1G4zbu0AFDlZey+8U8dy=(}O+GZ@%_XJ+pVw5(5eOnRHQ zN}gPXJZKRUnZbaarz<$j$zfk5$US|3qOgxW2O)Wjmn|JaN7&s5j^Z!^YgW{=s&A|} zFXJ=mBT09+Y{^YD1g8qekw7pI3r@Y@ zyq!#m~10$kJjyjGIbiKW7?W=>Xm4wavw+~;OYp5jYAgV9Lo zb`seK+|J7e`|ep0AGIqa>w_TJk-t_t;KA>@U&gmfNJR}yx;VtI1v6k5o+=DzaC&fb zKmT=jBd^;;p@&ve`?30)`9fVUN==ObIfT@G`_D@(@bJ$9*NNG9u3L~Vwb0>Lg|ELWWbjIqpGAhbNRB1 z`%x^kh6gR**D5YK*a)XJyIf}-ZzJuhF}d!NH6`y^f_k$51I6P3eR@^({DcdTQeZEl zKd2$C+j18`=$qM`wQYcd0OK?YD%iBT6vMz%@u)7FRj2tzr75fZ#Nl*lXU7p>;LPp3 zZk(DS!4&x!Gwe)wq2p@In5MTPt4IqqPc>jz7Sd;!Tvsg?w9mjadotuEWPCp+X zBqr79s$NL&ga7A_x~U9Z1hdy)>Z@c}Ck|g;lSF3%`&HculK#q06S@qc%x}gVS9%^T z%28>7c3h8e8`VQCZ0pj!-h``o$VwfB=bhZC_BZ5E8{Q*HCdJORVFzG;^;#c3JNNi|@!%Ck-iZuCa6t_}$c`;R-dP(s4AMb(C7IIIAR}+kK9Q*%i8%Ry!VZP+9nGmfXA?Q9!ev zG5v-t8^0R4N?i}4iS>0;#%}*<3q3P^tf-nk{$><>|DstWCYIOVO_nY8z-{R?EGu6p zOow#094x8li`QHHy@}aunE`S=xl_@MGZiuzWF6%4g~vs9E%A8{sGrAl0ML-ViJICx zg^#ZaPhcn@j)xd_#IWyBTsjqj4Tush{zDoHeGtdTUy-wnOt1tUMixcK%oq}MSt71G zJPNRIA!DvrGN_<>vX}-=2ca-#hhf|dZZqj9ugz+{2QDBQ6A?lbsp*QylNHaJ-r?us z{t=@Q3w<*ykZUC?MhUvcorrW3YRvcpn_VK_JFfpOx*Sar#NP4o4uP=>m0r?jWa4<& z*o$UmiJ&5&q;}0*70wV<#Ix|=B}+BC9lbR~Q%zfN3k|1GZmM*h{N~* zO>n1GCcub99!NL4KF&j@Rtd}({o0s45=ZZyh&d8mRnMVoYKUn) z@w}_2;(2e1UWnh?og~97m9fCPQI0_JS&HyHh8>1rFA_`afotMSdO51~WI9Q*pW%$7aSF~)9foGIlJm=s}bb<@iv3yPHN40j`16VMwjFiC^om$tf z|AI<#fA;spwmF_O+-cnNwZvxElEQ5&p8aDq$>xqRHQ1UWA=Fpjmcf8=qIpwji9Z)D zcmW?6Y?{T~`&Nu_@52k*J4>gu!0Ze4iHYxpv%y#$G@8zRqByi7z${Cc4?=KiIq^m7 z4Nr{L-@RWziUJ(J1*e0~N{L>Nx=Ou2!RPzJjqX-wmD$j0`ID@w(CcrLSB0CVHmef! z4=;SGvuA1v;WK7CU_K9&rO+*nyl-zr9jMLo{Uq{~c~um5MV+X2itbCwI1{2aHz;f2 zFg&(H5cL_`^mWQ*lP90J&3!`aS?W5G;DO)jI8jpJb9~@tt+>C60>_KDEM$RMT&w|F zxgb@L1$rLxpT|gOITqUiE7FX`eVDvm+zZndk%5Nk54) zqQ(4o0=5-Guf*nWymv^>dgj)g z5Lu_b(I1!S+saMGpdL0vcpgIkZ;LA`E5?P9&YuU(IHmkWsU%;50aLMqrT(s+?{6e$p2O`a(dKV_`-F6v5$Ld3MUYFW%7@U>71xc!ML%atTNd>IP~}LF7l41l8diWe z95)e(w^o&a1fC{24^t=B)RCvm)b~{2=)$JYijEca4mc0&*07yxlUFTFG!b zg{_&2OoAEZ0&UikB+*9yn{|-iks5&VlocF5TwZdVtC6_koD{(-# z8{1T`Pivgm-po;^fA~n)UYMaWiZ&X?+KUNI)=^)~KVxI4eUYTJOvQNZgSKuS8x-W-M{5ecNNgnrAe~5?J&Wkh!Jf?+f)=0Jp|lmkuzgju1@85Ln9zVzSMyzoT?9mzne{Q^`6FGY+ISgOg0N8G_)5gr(D| z^|0f=-FVHH_W|ay9kSDjmmIx>TR7-Cj06d7k-aa0f6%2~MmCa#0dblWij}d+v!y%r zE!d40qe;a}a zfHSj(==-PR@h6bxe-`cZFGJAHTqdm@Gm&K31N|vB@0e6OE1TdLFd1B=sJ(fREKqN% zHEhmHfYfR3x#6dy5&r}QtO>SKbH5LS#|-Fz4NONo`Pw!1WVmA53HEC9i5*foBc6w6 zas8#<>YGk9?%GIcuaP4Cx4y|ddK_k>Cx>j2C#?|Mz*E}`< zBGy0-c8ix?Llv?uRf{h4i|VLrv`M+Qg#UVIAgV&BG;&?p)x`xqyiG4cLKGd+MJ&oB@=R(r$Jbg%&TrkelY^Z9*i(u) zqZfp;Cd4tvP5nGGCWxlnxN|0ur`O(6C$Oe<+cPI56a1U-@7&2sP6Sfws3_lDIF2)h z6{(*-lXw+YQ-X`klwAqrv%_2rnq3KGZ(ZmCC?@8AKeD&PNo)x24yFu_>)EEuej8Xg zO&4z&6gy2%{xhI)Qa&(c`GyyM1`6N(2a5p(i!ng$;n^ zHXj`wzFo=iF2Xp_l(T5I&hf?`RONDMFy^%U>(D>ge9wbBI(4iDV;^0`|6Nliy(L^jp9+im)Ybo~DZc+xQ`Z{lndvDuHoKWQ`tio; zDf{W#>fkiJNdWYdmL2<4EBn>~$o{6Y-rKmC{ZH*_Q4yF;G>|03>~JqGJPpG!S3BMq z8M`I8{K9f>f^i}gF*^iN8lW`(1CgCx+$x#bDN!+yrl^;*&M?%*0ibIPvF8*bu7oJo zBwUBd(1XeFJBlJFAqP3>A8e9H2!(M1LKD~(Zcz=v>J|pwSs02ZS3rHUOZ5|Nkfs@BdbutJJmMP=>lAmc>_Y z#wA(T3;z6u#aY+Bmc_m$Su7k|CB7%vsv&sINI^(Ss+hf_>A8Uf;31^=L(P!ggj_O& z2v+z}986hB7^*#LykDm?NPl5!-i?I$_{LnPUAt#a1n*OfFWnT4Z@iAFXTXtNDZ<<> zxIO!6DGv^;JrF9d$8RF)K6|)3Utd#JIxn|s{GRR{mp=TCLKBQ%0e_AE;RZAKhWCj} z)Xo(>ev+0J5Bjc_y6<(MS)Al7Wp|T{B5!q}eEuy-x8)~p9AAIStXoz#imWQ53oA!d z65dJ1lB=2bt|F)Z+b?J@KA2R3=~-bdgbK_2_S>(ouvF;22-5a(NW(j4!Ak-dCDhvf zl2iyth>TEP>Mhvw0m@+TW@bqF(~ziddKY-aGnhuHRrvUB}4Y%@4GS= ziZ2A07W$gPD_=_gY+7;k^4!xVO2IO@C(kORu{e7eQxc|=as1XWlBIMS7g}i6d3*f(E{z7zZ`CA zv7F98%BS$PBD+!Ife=X+jj>TB?Td^rMR7UaUHTaWHmMzYBUa&1772`xWgLqRNF9iX zvOTKMmVh{AwvfHDfFubUmnu<1;P8|+ycz|8dPJ2Qe)XPPCmJF8ws=RvtC98iva;pX zI%=0?#$-#gdOA2*ZJ3|LV~wR0+vRvN-ZCqjpUrlTqa; zv}CN$ujO%URJF16wYhccvbj8_;;&?5*r0QtynNii@9DYB_@Y?%l`Y91b}OebL#Oik zE1xJ2LKv9lYB`xr^}eyOr!RETx|)MUC!@OtWFTQQxo~mo(ThAFO4=@`7(fV1e7+aI z@>>qt*$N83ts!Tlp~-=c5G5}|ZYD60OT(3hT9D_GxBPw|KD&`~Qms=STTQB4i@7|+ z!Oat2xiKxlf8(lGvI{E8cZx#Zs*B7XZJ6VP!qe7VCN6OZ>G-Pbl7c8H`VliVE@{GO z#!trfyq+2-R|Vn(BFU;vsJ+N59%`d#neqG*yJvGj=y0h8ya7c=y$ z&8P%lsMXe^dUvciq>y%lE>d=jk=n1`aOwDQM#5y0sf|^WUL#V8Xo8jB8qDAAevw8t ziTyp?Y{vpGcrs?&YYH{>Fol?pwpAeGomp*4`+vH>n|H-Kzau{om#Br+o!`)zGRVsb z9k~To+eBg3U);a@ef4;aDt5>uq^u@MxlA!_;bX{PD!@|BI?B+U=Z3H#a=}&#<(&p9 zBe|v4prKb|2{a2dDHJe-46yu=L1nQQ$Spvj$z~uCCf$f-`_kjEQLPYw&C;l0D&xi- z+l!Q67;vbLUxddA(Y(1XXmgZLZepmg+Epf;UWRW7W#U_Up^yL9012D+Ywre&R*4T& zCB#Yx$R0tl_uHz_qUGU0UyWJaQFTWwt9)Dk9Hk6)r57FstUP)bP7u1;mkDzGa_wNUx4)$ONGq2Flq1(^bEp zQn=4cB2rYVAYU5s4gVf6J|r$bWWJw!DVHP-ix^ezXjMRr7IJsDTu2qeUA8dZ#zp6ip7#RhpaOe zj8*!#-pS?OH2Hv;hC1g)YN-GBL+9%Cpx7MB->2V zeLD>Em8GfBv~Kp@Sj9vo_3)e1;%bzYt|88Zjbn0o+ET;QP2FSu8fg0gjoC9}uoiYC zspO&%_!Jh5lwNJkzNOhb#OnF=D@`~{)BSvY--Bh%9GVT!A71j&J-3}`=8JZ4EJ`_8 zCm-FqG1i*Sw#bJg?ICU6!1Vj%Ya7-2YS9lwS1Loz5ycnuf!rUJASVsE036Pd0*Do< z>tFJ{^x7kFrD8bAXjxTdhA12zoZpO>^KaY@TgVl;yjF5au>#~3f7YI+N;!6FDrETa z6@h=6){mg8XjTUbH4L*YP~leU)r@;+wu~PSm*>(Mu%n5TN=Gpembh5{YPOb+nocB9 z>gxzC1(0(sE~EDAk?Lpj0m(+Rkc}KC2z+kId>3}G%d_v^zX<^N?&35&{TOrt8`&1?Z#f~qxgRFl*Ua16GhZb%-?&kgh7Gp9v z8$W87eDhC(E6~ex`O5 zUTx8hoI}P#XmCYJw&zq6!B-B8Quz96J}ZaAU)aT1I9|P>G`v)q(72$s-Ez2;ZHF)_ z^rdr%VS$%68WNpcf~^BVd7N|2P~5FJh!n?e6q@4Zt8d1`mQg`5+~ zl5huu@>qYbM((Dl;UbHYo`e$p(nWPzK?27>R#8x*e<|(IFB98>$F5gVkNDxSK3?5B z;zq2s5F2OWN9b1S+=^O|taDzx%2bpGxg%UHd@9C5Z3Q>i(w!BW_Q<=xwu$nqcJ;PX z3jJ4AD_Qn1HIZf2ea=9IC^rJ%J-oRVvoAZ5it~YSB-7 z19jZ0b{-<9$1kqpwF&Jia1m|rT%-A2NO&xX^Co6%mC9{Hw}mb0KdL*+sJNP?Q6s_K zB?J-z!QI_mLU12ExXfTdg9Hx{JU9fG1PN}z-2)^@fWaZS`<=Ys`<@f>oduk`?w>o0 zX6-*yPj}Vc-PKjKdx~r-(PA?9YLSR2_6s{W7gp6osaNurq}5J@sq^SOg47j#J8N+Y zt`aIYxTpPMqb#!NU6~LbV!qHW-O3D6*F@hs=_L@L{nlaG-jn+lu|VATL*$G=uVyOL z*(zbdp4}|bfdO4uD@Q749R`Uns3j2Mb8&exGwv#`4 z7GJ2RpSq}t3Z3s9j8Rhwkjb968`#&MVOh8^(S2(@*6$ZFDrNO=}D9M@85+UrO) z{wJ*kjU5NJ&n5*O@pR8-3$8~NlJgZp_b70wv z&3DJ9X7WvxKvW#5G3fX(L9c;jtbByZ*L`4$a5RCUnC-0;T=CSe3{-JcJEBf58Yb62 z4vyWzdrMksg(!u`5UBmJ@4(PrAc?3Q<$b*~D{FGj2Jxyhmub>Fm2kC1Q@TO` zRqsH*(>RIgG4@c}vjZ8OUgc>&`@tYfjhz70uj4OY*W%;VW1D@LCIT!)Bpf-``Ono= zeaFy?_l%80)>^H0o1E60eo6p?6S6pCq;Jx^o*uoTHZmRCzyDYk4kIbLgQkMffs?de zWX-Z?)~I=kbu`4Hdo5P}Bk?Pm)-%!3z8u}AEt27%GQ_FLUD}>9h(tE_7SDx|n!ihs z5CwXHzgRp85Q_}y`ss+yU>YSwm0DxV-#J;#Er(8QL_%!pa=2zu4{v5WF$d#}oz$Ea zUCe7c1!Y>n<~$WF6t^r6OIf*m{vi0=La$`X7Qnt_VM*k?gksohlsRlCtl|1?wXXHU zj%S6sy#R}Dg%I~;1+{IJf-yc_-)RC2zEeZ6A)Ex7gqVlfOd=#;uKwb^(R0OqZl-1_ zHTQSVW4CAoNXF<)Ld-!zJ^IW{NAlJ3l`I3g%1OQEqo>vitxSv??FA_ySGLZdSR)dxRs$(K z3ZZVF*(XkHFY}nUA{l?s4Fajt^WhzFx^ng4mA_U#dSGg25<|z#$yBU%s+K3TQ1`Vm zCo&{`&gi^on!g3lWnhz=dmp1fS~&(Y0bKLZb`+7OHSuJ0h6}YpGaXm?L-Y}=lOs4} zn5SlppuY!ak?aUwdbh{CC)%W@@&|k@@F7jjnUYWj_uff(p+8S1yL`8j*rMVRMHx#z zN2*&(7P#Mfc*B-!#L`QvhAaJQRd>rQMXf4HWE=+5 zx}X%P&81;)E;5p*ZP@z97rbQ~7+j7M!KE?cYHg=EtD521kgdPWzHHOD2Ezz+jC zxrZ5O#bCIfBA`G)q7Yg*2~6`S%i`|9$4kgF-3V;?99#+<6A7U9vBD~vG}@`P<300yx|NKCY_O9u%O43TTBWYe_EG0^CVlBy&chIh-DJX z6gsy%p_cjWB8BOesI;JX7a4yM|MjB9BW;mzsE7s6%w;S^QHKS?dlb2`_du(vxPamSLxsAJ(d^Ck%CC( zE^HkE1(a2reX?T_GvZRkI#1S*NRxnkWLtQSlzAQ#(~x7?xjl~tqwwK?$MlQepM5;8 z%~%!~R%bQfF_YEMoSGD?J5oC}Yh$M4)0$0e=Cq{4dd-gdV=b!6O*0(cM;9nmxe!wF zB)=yqLw?9dr-w>$!E$lG`w5c$1#Q$&UJ#$GCLw3SNck$VYKqWf1L!-=MZK#f;1VU9Eviw~*nyq(1XHwp4M!)jy0@I6!dm=% z%mH4G_>(9?f09N;?5imfAn&&6qvi>kGLspL^3hIg05ZupNOq>ckBKR#tfuI-iv24e z;ioe3+L0V~yC3!3R<6F+e87Ln;R&V*aAxRjg@5-6d8p-G|9SJ~qoSUN-wNBpglDw$ z;{~va&-A6y_UO2PjNcPq1gU3p9jUkW(~ePmoRRM+L|QMmnug}qPVR(j#_Y^G@rdSH zB?VC)nU{5>PMA8j59UVfRh}Iz^CW6{fakpniTOZYPtoN5Qqv=1dqX*8; zYqhgKIS*pmA+GpR-X+jvTbFtfJ^!gveo`lPiUv(LegGL(a$iUyvh&i_6s$9LmKQ= z71V+7Mf|f!z3bR0SXuQ0&?w+ zT1QS+nYW|>iNP6CM{Wgj^-oK?I(~F$J`oZ(=+2C!jXsQAxH~Pry>F0p6!nEs8WR`+%m*1Z^ zdR@=WQl@!lWL)Kt)vwps;iLdu-))Abxo)87t1LI|%5GHsxEQdznnd|hRX=m796SGZ zcfbl-Vj~SKB%<5lH9sf9S z+d1|VYV)sxy9QH{ov)BJD4NrIUxL!a$Y;+%XM?HkN6TwX*AjzDLRa%ESJk@(O&%+` z`nu;0+n)rLNN5_rT^+wvY`UDabKQJiynQB~M|?TueNin|coMxb)e?U&v+ccy<_$U- zAFSQm+gI71ZCW0)bDh#a@mhl$^g7$zM)Wc~luWyF+Mng{KK?EQ^q$8^2|stQ0-O&- z3$bqFX>@}ej3HFucE23CKQTEDF*!K}dC})G`sd|kdmr`%lSF@}Ke1Gr^1bqn^Tl^? zu-IZkybefbFfS}lZwPZ&Jadg-Nj*MYJ9Rd_m}kVvohx%&i+h#|*78Pb@FmzVN0OH??Kdl^D=Dv5T78%(a8& zfiv4ywZp(4`f}IC|mg94=8nQ7p>&8B~4uh#y~G+;&o zbfezK>yzlmXhmt`MrwBdhg^ewvufujScug`4Lb7HU& z12+i2W}QB6Mn+8H!=okiU0%k`yHL#Yfa+3E5+PO0OFx8u+s%}24L{N9dHqMyaX={8 zlos+Ry51*2#D1r0e*R;&~1aJhzjxLNE+Alih&PBJf2)Z)>p8HSG(Gam4~SHBqY`w!SeYAH znpqfFSesj0TDSC(rm5$|C&wXYhkh4!ba?iGpMOqvFH~7&npFj)95S;P3dE5+0Pd?O z*CgxuiGlrKb@lYn&E>tCq2Mr5oPn@^+?=aZ)HXEtwE)}yGYb&^V}Zwg|9=^v`esImGSc?PMSi2a=@*9&hbKaF&>ug7D6wh+TY-PgGcZWn+J<&)PC-4sdlP<~R&QAd1Mac}Hl?q7OMcFfSBsEm;0T+=EGikt@;u zMHq?`-WRX=+!?2(6i;cIrh9BkB__!Y@#N&2z#DI@yXi~R8TL7PhGi5)Uh`8_1|<_? zt`sfP_!g?}{sgP9xH_L7>!$P%z|3nDw|F>=&}XwADwaVDl=cSd(=4IdLC$MzJf0dP zj^Bt632hBuS!uf}z^EU32>20Q#!%Qj`(l}cat`RQa`hcX7s1HPjBqtRpYTLCD$RT)<(2lY#s zcskvqPrue{sa^-_EQf26c>}L?T+GMm;&kaqdxnarI7DAR)(zHo4pv@&3|49q^F!Zs z$(tV{;@NpjDXcfI1k7$>O^6VS|2=Qb|o#NVpG36uMeYo$2M?p~G=R}b7 zN$ethOCz65@uC{lj{;Tv!)o!6f)WjCB@e8`6*>%MBvhmT3c|R@uk06V3VxPqAH(xrJ?_A3qam zfmX;2?eH+bnKqA3;T*QkoT6w2D~gIh>pudkC2Qc+CoYaz6MUt~uzH=mV4NiN@XKbE z&!*fDcyNms0eY={6UswI{)I6T~IGwFM9fwd+T|8ms zz(+O?c4k7^+A(pPSE+`snn$q60ZiwdEWB;U7X*zKFcsF$c5dYZ>Q~7?-Z91e;_C`7 z0-|TUCk*wi5oVo+>Mws5Eyi7MrD-&Qs%E`FNLO$-y-}y0MuAI+$M^X@I}DcF=Jdb3 z(Zzr2UgF*-M*nhZ0Db?-t3ie(aQATVvUmZcRf?Mz!euV$h*!nWVr6A1`{J)@0;?SQ z#SCtdAl_s-o8a$dD%*z!J;4dcY|41rZ5Ygxo&Rm@{T%DZYIX*Oa$l?}9%eZD-R$=? z^73K!QfydjqN1Fjw)kv(yqC)>EU>L8w%nyx0R6=HDBna*aY;jzs(Bw5pZQLuuK%snpYZ@leOi)sR6>+yRM&rS@0m;3 zE&^ZUqr+3@K{=daKwK$kobTE|Fv1`GTb3tZl7)CurI1E_TLGPosiPr-qbbM{z`|%^ z=y+RC^}?GJTbVICT3`DUYHAU!3CHI6bJD98P!~+1#|j&TCNG*Z@Hmt_N~cq}4rd6O zFcb!3xkBNSW;o%KBnKxo)}3$F&9)Qp@PBy$^_=PnBJN`!)Yfl2Oq<(1*{M3Fh1NZ5 zu*~5W2k(GB57;eGhr|3Zxryy-&dTFF;iajg3xVl6JQaf^qxp%>#q0GJQZfFd%<(^J zP>p;&+a4}&T;q|E4y390K9(R}M7dlzzUS~dnOS-}k>1AaY zbxgL#&*UC}=Gx;)Z`5s*cVP4J=`EF_d1uTLk`XH>{Gt-o9E@epV=8*Y=oH_N^cKFo z=>3Lu3!F-6Po_ixOf@ypTLCeR97{e=ae7sgGEBVp%d;nDeDmyvf?WrK(40~Gm`L8M zu8@og_%eD$*20SGX zf*i0Hjf`fa2$rzHd%#3{K$7#_qP#lEe@4LubJrjVe-hk$yaPr#fUZAB)^Aw**-nu~ zu)@sd!FI7f*q!@rCeHk|4|JykXE=s)rMY^PWH2>Z<_Led;IfZ&g~(Da&C8kItsPm? znM%8@@+<0%{0%b%c)xr}V^{W}19iJPT17dqFQx-jjuOy86_u#Aq70)W4>WiJrPpl&Gqr=pkVPFTp5qD-hb!%&=t61yk@1TrI9n`4XxHceMM z;PBqZeR0GhaPvaiaur?brIX&5!1WU^@5u{TB{^sqQdnfjGYj=sndx5_fB%DnBK-Rb z6Y5gr*X3VDr)sKj|5ID~M_uWkqO7z-1Ahtl)sX)d1+gd8XCI+|MEzB_db_-TSHKds zU7cq`zS#n4xc}q&G3 zH$+c-|4UcLkh<7g887e42-?Lh$%M$4fQ;p)Jp%FXV3^&P;hu96;!6VsM9&E1Kw&6C)bYI5JR~df4cVl>qBqe`l=n-NAqqy5Em+vqJoL z2Aseh3`o8B{TMfsA-^-cl<#0bQX}_c+zihD&QMmpg8>QE-;Z%K$o@NH>n;W) Date: Mon, 14 Mar 2016 09:06:11 +0100 Subject: [PATCH 210/320] Fix test bug: norms are on by default on _all. --- .../elasticsearch/index/mapper/all/SimpleAllMapperTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index 762a62f37561..501c538b8707 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -223,7 +223,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { } public void testRandom() throws Exception { - boolean norms = false; + boolean norms = true; boolean stored = false; boolean enabled = true; boolean tv_stored = false; From c90b4f3bae4a87c8618b91db087cd85ec2e1eab9 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Mon, 14 Mar 2016 09:58:46 +0100 Subject: [PATCH 211/320] Docs: Added note about upgrading from 1.x to 5.x --- docs/reference/migration/index.asciidoc | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/docs/reference/migration/index.asciidoc b/docs/reference/migration/index.asciidoc index f8d742b3b673..0dda43da7132 100644 --- a/docs/reference/migration/index.asciidoc +++ b/docs/reference/migration/index.asciidoc @@ -8,11 +8,14 @@ your application from one version of Elasticsearch to another. As a general rule: -* Migration between major versions -- e.g. `1.x` to `2.x` -- +* Migration between minor versions -- e.g. `5.x` to `5.y` -- can be + performed by <>. + +* Migration between consecutive major versions -- e.g. `2.x` to `5.x` -- requires a <>. -* Migration between minor versions -- e.g. `1.x` to `1.y` -- can be - performed by <>. +* Migration between non-consecutive major versions -- e.g. `1.x` to `5.x` -- + is not supported. See <> for more info. -- From 8e6b2b390981bc240d7a204fb7250286c953b590 Mon Sep 17 00:00:00 2001 From: Alexander Kazakov Date: Mon, 14 Mar 2016 12:04:06 +0300 Subject: [PATCH 212/320] Check that _value is used in aggregations script before setting value to specialValue #14262 --- .../script/expression/ExpressionSearchScript.java | 12 +++++++----- .../script/expression/MoreExpressionTests.java | 11 ++++++++++- 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java index 0f56adeea55a..3944090cef21 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java @@ -112,14 +112,16 @@ class ExpressionSearchScript implements SearchScript { @Override public void setNextVar(String name, Object value) { - assert(specialValue != null); // this should only be used for the special "_value" variable used in aggregations assert(name.equals("_value")); - if (value instanceof Number) { - specialValue.setValue(((Number)value).doubleValue()); - } else { - throw new ScriptException("Cannot use expression with text variable using " + compiledScript); + // _value isn't used in script if specialValue == null + if (specialValue != null) { + if (value instanceof Number) { + specialValue.setValue(((Number)value).doubleValue()); + } else { + throw new ScriptException("Cannot use expression with text variable using " + compiledScript); + } } } }; diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java index 5246d0dc3064..1260919bfab6 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java @@ -383,7 +383,11 @@ public class MoreExpressionTests extends ESIntegTestCase { .script(new Script("_value * 3", ScriptType.INLINE, ExpressionScriptEngineService.NAME, null))) .addAggregation( AggregationBuilders.stats("double_agg").field("y") - .script(new Script("_value - 1.1", ScriptType.INLINE, ExpressionScriptEngineService.NAME, null))); + .script(new Script("_value - 1.1", ScriptType.INLINE, ExpressionScriptEngineService.NAME, null))) + .addAggregation( + AggregationBuilders.stats("const_agg").field("x") + .script(new Script("3.0", ScriptType.INLINE, ExpressionScriptEngineService.NAME, null)) + ); SearchResponse rsp = req.get(); assertEquals(3, rsp.getHits().getTotalHits()); @@ -395,6 +399,11 @@ public class MoreExpressionTests extends ESIntegTestCase { stats = rsp.getAggregations().get("double_agg"); assertEquals(0.7, stats.getMax(), 0.0001); assertEquals(0.1, stats.getMin(), 0.0001); + + stats = rsp.getAggregations().get("const_agg"); + assertThat(stats.getMax(), equalTo(3.0)); + assertThat(stats.getMin(), equalTo(3.0)); + assertThat(stats.getAvg(), equalTo(3.0)); } public void testStringSpecialValueVariable() throws Exception { From c3cd8564df0ff836d996a618397a00a8a583abca Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Mon, 14 Mar 2016 10:46:31 +0100 Subject: [PATCH 213/320] Corrected regexp syntax docs for COMPLEMENT --- docs/reference/query-dsl/regexp-syntax.asciidoc | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/docs/reference/query-dsl/regexp-syntax.asciidoc b/docs/reference/query-dsl/regexp-syntax.asciidoc index e57d0e1c7790..68ca5912458d 100644 --- a/docs/reference/query-dsl/regexp-syntax.asciidoc +++ b/docs/reference/query-dsl/regexp-syntax.asciidoc @@ -220,12 +220,20 @@ Complement:: -- The complement is probably the most useful option. The shortest pattern that -follows a tilde `"~"` is negated. For the string `"abcdef"`: +follows a tilde `"~"` is negated. For instance, `"ab~cd" means: + +* Starts with `a` +* Followed by `b` +* Followed by a string of any length that it anything but `c` +* Ends with `d` + +For the string `"abcdef"`: ab~df # match - ab~cf # no match - a~(cd)f # match - a~(bc)f # no match + ab~cf # match + ab~cdef # no match + a~(cb)def # match + a~(bc)def # no match Enabled with the `COMPLEMENT` or `ALL` flags. From 31740e279f812f27d7f064e14a361a1213b3cb0f Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 8 Mar 2016 12:50:41 +0100 Subject: [PATCH 214/320] Resolve index names to Index instances early Today index names are often resolved lazily, only when they are really needed. This can be problematic especially when it gets to mapping updates etc. when a node sends a mapping update to the master but while the request is in-flight the index changes for whatever reason we would still apply the update since we use the name of the index to identify the index in the clusterstate. The problem is that index names can be reused which happens in practice and sometimes even in a automated way rendering this problem as realistic. In this change we resolve the index including it's UUID as early as possible in places where changes to the clusterstate are possible. For instance mapping updates on a node use a concrete index rather than it's name and the master will fail the mapping update iff the index can't be found by it's tuple. Closes #17048 --- .../health/TransportClusterHealthAction.java | 4 +- .../admin/cluster/node/stats/NodeStats.java | 2 +- .../shards/ClusterSearchShardsGroup.java | 2 +- .../TransportClusterSearchShardsAction.java | 4 +- .../create/TransportCreateSnapshotAction.java | 2 +- .../state/TransportClusterStateAction.java | 2 +- .../alias/TransportIndicesAliasesAction.java | 4 +- .../exists/TransportAliasesExistAction.java | 4 +- .../alias/get/TransportGetAliasesAction.java | 4 +- .../close/TransportCloseIndexAction.java | 5 +- .../delete/TransportDeleteIndexAction.java | 11 +- .../indices/TransportIndicesExistsAction.java | 4 +- .../types/TransportTypesExistsAction.java | 4 +- .../flush/TransportShardFlushAction.java | 7 +- .../indices/get/TransportGetIndexAction.java | 2 +- .../get/TransportGetFieldMappingsAction.java | 2 +- .../get/TransportGetMappingsAction.java | 2 +- .../mapping/put/PutMappingRequest.java | 24 +++ .../mapping/put/PutMappingRequestBuilder.java | 6 + .../put/TransportPutMappingAction.java | 11 +- .../open/TransportOpenIndexAction.java | 5 +- .../refresh/TransportShardRefreshAction.java | 7 +- .../get/TransportGetSettingsAction.java | 9 +- .../put/TransportUpdateSettingsAction.java | 5 +- .../TransportIndicesShardStoresAction.java | 4 +- .../action/bulk/TransportBulkAction.java | 47 ++-- .../action/bulk/TransportShardBulkAction.java | 7 +- .../action/delete/TransportDeleteAction.java | 4 +- .../action/get/TransportMultiGetAction.java | 2 +- .../action/index/IndexRequest.java | 14 +- .../action/index/TransportIndexAction.java | 11 +- .../TransportMultiPercolateAction.java | 2 +- .../search/AbstractSearchAsyncAction.java | 2 +- .../action/search/TransportSearchAction.java | 2 +- .../broadcast/TransportBroadcastAction.java | 2 +- .../node/TransportBroadcastByNodeAction.java | 3 +- .../info/TransportClusterInfoAction.java | 2 +- .../TransportBroadcastReplicationAction.java | 4 +- .../TransportReplicationAction.java | 6 +- ...ransportInstanceSingleOperationAction.java | 2 +- .../shard/TransportSingleShardAction.java | 2 +- .../TransportMultiTermVectorsAction.java | 2 +- .../cluster/ClusterChangedEvent.java | 4 +- .../ack/IndicesClusterStateUpdateRequest.java | 8 +- .../action/index/MappingUpdatedAction.java | 53 +---- .../action/index/NodeIndexDeletedAction.java | 24 +-- .../metadata/IndexNameExpressionResolver.java | 63 ++++-- .../cluster/metadata/MappingMetaData.java | 14 +- .../cluster/metadata/MetaData.java | 51 +++-- .../metadata/MetaDataDeleteIndexService.java | 36 ++-- .../metadata/MetaDataIndexStateService.java | 47 ++-- .../metadata/MetaDataMappingService.java | 72 +++---- .../MetaDataUpdateSettingsService.java | 62 +++--- .../cluster/routing/IndexRoutingTable.java | 2 +- .../routing/IndexShardRoutingTable.java | 2 +- .../cluster/routing/ShardRouting.java | 2 +- .../routing/allocation/AllocationService.java | 4 +- .../allocator/BalancedShardsAllocator.java | 2 +- ...AllocateStalePrimaryAllocationCommand.java | 2 +- .../decider/AwarenessAllocationDecider.java | 2 +- .../decider/DiskThresholdDecider.java | 2 +- .../decider/EnableAllocationDecider.java | 4 +- .../decider/FilterAllocationDecider.java | 2 +- .../decider/ShardsLimitAllocationDecider.java | 4 +- .../common/io/stream/StreamInput.java | 4 +- .../common/io/stream/Writeable.java | 11 + .../gateway/AsyncShardFetch.java | 2 +- .../org/elasticsearch/gateway/Gateway.java | 7 +- .../gateway/GatewayMetaState.java | 40 ++-- .../gateway/PrimaryShardAllocator.java | 2 +- .../gateway/PriorityComparator.java | 11 +- .../gateway/ReplicaShardAllocator.java | 4 +- ...ransportNodesListGatewayStartedShards.java | 2 +- .../java/org/elasticsearch/index/Index.java | 14 +- .../elasticsearch/index/shard/ShardId.java | 2 +- .../indices/NodeIndicesStats.java | 2 +- .../cluster/IndicesClusterStateService.java | 23 +- .../indices/flush/SyncedFlushService.java | 16 +- .../indices/store/IndicesStore.java | 11 +- .../TransportNodesListShardStoreMetaData.java | 4 +- .../indices/ttl/IndicesTTLService.java | 2 +- .../rest/action/cat/RestIndicesAction.java | 4 +- .../rest/action/cat/RestShardsAction.java | 2 +- .../elasticsearch/search/SearchService.java | 2 +- .../snapshots/RestoreService.java | 9 +- .../snapshots/SnapshotsService.java | 27 +-- .../org/elasticsearch/tribe/TribeService.java | 8 +- .../bulk/TransportBulkActionTookTests.java | 2 +- .../TransportBroadcastByNodeActionTests.java | 2 +- .../TransportReplicationActionTests.java | 4 +- ...ortInstanceSingleOperationActionTests.java | 2 +- .../health/ClusterStateHealthTests.java | 4 +- .../IndexNameExpressionResolverTests.java | 202 +++++++++--------- .../gateway/GatewayMetaStateTests.java | 5 +- .../gateway/PriorityComparatorTests.java | 19 +- .../timestamp/TimestampMappingTests.java | 2 +- .../indices/IndicesOptionsIntegrationIT.java | 3 +- .../indices/settings/UpdateSettingsIT.java | 3 +- .../SharedClusterSnapshotRestoreIT.java | 7 +- .../messy/tests/IndicesRequestTests.java | 4 +- .../index/reindex/TransportReindexAction.java | 4 +- 101 files changed, 629 insertions(+), 555 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java index b5c9577aff7d..39d6a8daeb04 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java @@ -213,7 +213,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction< } if (request.indices() != null && request.indices().length > 0) { try { - indexNameExpressionResolver.concreteIndices(clusterState, IndicesOptions.strictExpand(), request.indices()); + indexNameExpressionResolver.concreteIndexNames(clusterState, IndicesOptions.strictExpand(), request.indices()); waitForCounter++; } catch (IndexNotFoundException e) { response.setStatus(ClusterHealthStatus.RED); // no indices, make sure its RED @@ -280,7 +280,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction< String[] concreteIndices; try { - concreteIndices = indexNameExpressionResolver.concreteIndices(clusterState, request); + concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request); } catch (IndexNotFoundException e) { // one of the specified indices is not there - treat it as RED. ClusterHealthResponse response = new ClusterHealthResponse(clusterName.value(), Strings.EMPTY_ARRAY, clusterState, diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java index c1d4bb78ba3c..db017fd3f6a0 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java @@ -235,7 +235,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContent { breaker = AllCircuitBreakerStats.readOptionalAllCircuitBreakerStats(in); scriptStats = in.readOptionalStreamable(ScriptStats::new); discoveryStats = in.readOptionalStreamable(() -> new DiscoveryStats(null)); - ingestStats = in.readOptionalWritable(IngestStats.PROTO); + ingestStats = in.readOptionalWritable(IngestStats.PROTO::readFrom); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsGroup.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsGroup.java index 9f3ccac8f642..1b329d172895 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsGroup.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsGroup.java @@ -67,7 +67,7 @@ public class ClusterSearchShardsGroup implements Streamable, ToXContent { @Override public void readFrom(StreamInput in) throws IOException { - index = Index.readIndex(in); + index = new Index(in); shardId = in.readVInt(); shards = new ShardRouting[in.readVInt()]; for (int i = 0; i < shards.length; i++) { diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java index e6a9d98eb178..f8868e94bf0d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java @@ -59,7 +59,7 @@ public class TransportClusterSearchShardsAction extends TransportMasterNodeReadA @Override protected ClusterBlockException checkBlock(ClusterSearchShardsRequest request, ClusterState state) { - return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndices(state, request)); + return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndexNames(state, request)); } @Override @@ -70,7 +70,7 @@ public class TransportClusterSearchShardsAction extends TransportMasterNodeReadA @Override protected void masterOperation(final ClusterSearchShardsRequest request, final ClusterState state, final ActionListener listener) { ClusterState clusterState = clusterService.state(); - String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterState, request); + String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request); Map> routingMap = indexNameExpressionResolver.resolveSearchRouting(state, request.routing(), request.indices()); Set nodeIds = new HashSet<>(); GroupShardsIterator groupShardsIterator = clusterService.operationRouting().searchShards(clusterState, concreteIndices, routingMap, request.preference()); diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java index 457b6e693839..3bee1a74e1d0 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java @@ -66,7 +66,7 @@ public class TransportCreateSnapshotAction extends TransportMasterNodeAction 0) { - String[] indices = indexNameExpressionResolver.concreteIndices(currentState, request); + String[] indices = indexNameExpressionResolver.concreteIndexNames(currentState, request); for (String filteredIndex : indices) { IndexMetaData indexMetaData = currentState.metaData().index(filteredIndex); if (indexMetaData != null) { diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java index d37053e056b2..7cb7b225895e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java @@ -90,11 +90,11 @@ public class TransportIndicesAliasesAction extends TransportMasterNodeAction aliases = new HashSet<>(); for (AliasActions action : actions) { //expand indices - String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request.indicesOptions(), action.indices()); + String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request.indicesOptions(), action.indices()); //collect the aliases Collections.addAll(aliases, action.aliases()); for (String index : concreteIndices) { - for (String alias : action.concreteAliases(state.metaData(), index)) { + for (String alias : action.concreteAliases(state.metaData(), index)) { AliasAction finalAction = new AliasAction(action.aliasAction()); finalAction.index(index); finalAction.alias(alias); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/TransportAliasesExistAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/TransportAliasesExistAction.java index 5f92587f1385..ed14c51d442c 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/TransportAliasesExistAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/TransportAliasesExistAction.java @@ -50,7 +50,7 @@ public class TransportAliasesExistAction extends TransportMasterNodeReadAction listener) { - String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); + String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request); boolean result = state.metaData().hasAliases(request.aliases(), concreteIndices); listener.onResponse(new AliasesExistResponse(result)); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java index 9c2c2f03b573..ae9916810d37 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java @@ -53,7 +53,7 @@ public class TransportGetAliasesAction extends TransportMasterNodeReadAction listener) { - String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); + String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request); @SuppressWarnings("unchecked") ImmutableOpenMap> result = (ImmutableOpenMap) state.metaData().findAliases(request.aliases(), concreteIndices); listener.onResponse(new GetAliasesResponse(result)); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java index 5c88a8be3d38..7f77424e84c2 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -88,12 +89,12 @@ public class TransportCloseIndexAction extends TransportMasterNodeAction listener) { - final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); + final Index[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); CloseIndexClusterStateUpdateRequest updateRequest = new CloseIndexClusterStateUpdateRequest() .ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout()) .indices(concreteIndices); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java index 28bf46f798f0..c5e504b744f0 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java @@ -31,10 +31,15 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataDeleteIndexService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + /** * Delete index action. */ @@ -70,13 +75,13 @@ public class TransportDeleteIndexAction extends TransportMasterNodeAction listener) { - final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); - if (concreteIndices.length == 0) { + final Set concreteIndices = new HashSet<>(Arrays.asList(indexNameExpressionResolver.concreteIndices(state, request))); + if (concreteIndices.isEmpty()) { listener.onResponse(new DeleteIndexResponse(true)); return; } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/TransportIndicesExistsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/TransportIndicesExistsAction.java index acda370d7ff1..08edd16e671d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/TransportIndicesExistsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/TransportIndicesExistsAction.java @@ -60,7 +60,7 @@ public class TransportIndicesExistsAction extends TransportMasterNodeReadAction< protected ClusterBlockException checkBlock(IndicesExistsRequest request, ClusterState state) { //make sure through indices options that the concrete indices call never throws IndexMissingException IndicesOptions indicesOptions = IndicesOptions.fromOptions(true, true, request.indicesOptions().expandWildcardsOpen(), request.indicesOptions().expandWildcardsClosed()); - return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndices(state, indicesOptions, request.indices())); + return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndexNames(state, indicesOptions, request.indices())); } @Override @@ -68,7 +68,7 @@ public class TransportIndicesExistsAction extends TransportMasterNodeReadAction< boolean exists; try { // Similar as the previous behaviour, but now also aliases and wildcards are supported. - indexNameExpressionResolver.concreteIndices(state, request); + indexNameExpressionResolver.concreteIndexNames(state, request); exists = true; } catch (IndexNotFoundException e) { exists = false; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TransportTypesExistsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TransportTypesExistsAction.java index 2fd92451752b..f76b3eb213a9 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TransportTypesExistsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TransportTypesExistsAction.java @@ -57,12 +57,12 @@ public class TransportTypesExistsAction extends TransportMasterNodeReadAction listener) { - String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request.indicesOptions(), request.indices()); + String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request.indicesOptions(), request.indices()); if (concreteIndices.length == 0) { listener.onResponse(new TypesExistsResponse(false)); return; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java index 302bdafc471a..5df7e9ad69b8 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java @@ -46,10 +46,9 @@ public class TransportShardFlushAction extends TransportReplicationAction listener) { ClusterState clusterState = clusterService.state(); - String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterState, request); + String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request); final AtomicInteger indexCounter = new AtomicInteger(); final AtomicInteger completionCounter = new AtomicInteger(concreteIndices.length); final AtomicReferenceArray indexResponses = new AtomicReferenceArray<>(concreteIndices.length); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java index bff9b4e2ab66..cec337dd54d6 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java @@ -52,7 +52,7 @@ public class TransportGetMappingsAction extends TransportClusterInfoAction im private String source; private boolean updateAllTypes = false; + private Index concreteIndex; public PutMappingRequest() { } @@ -90,6 +93,9 @@ public class PutMappingRequest extends AcknowledgedRequest im } else if (source.isEmpty()) { validationException = addValidationError("mapping source is empty", validationException); } + if (concreteIndex != null && (indices != null && indices.length > 0)) { + validationException = addValidationError("either concreteIndices or unresolved indices can be set", validationException); + } return validationException; } @@ -102,6 +108,22 @@ public class PutMappingRequest extends AcknowledgedRequest im return this; } + /** + * Sets a concrete index for this put mapping request. + */ + public PutMappingRequest setConcreteIndex(Index index) { + Objects.requireNonNull(indices, "index must not be null"); + this.concreteIndex = index; + return this; + } + + /** + * Returns a concrete index for this mapping or null if no concrete index is defined + */ + public Index getConcreteIndex() { + return concreteIndex; + } + /** * The indices the mappings will be put. */ @@ -259,6 +281,7 @@ public class PutMappingRequest extends AcknowledgedRequest im source = in.readString(); updateAllTypes = in.readBoolean(); readTimeout(in); + concreteIndex = in.readOptionalWritable(Index::new); } @Override @@ -270,5 +293,6 @@ public class PutMappingRequest extends AcknowledgedRequest im out.writeString(source); out.writeBoolean(updateAllTypes); writeTimeout(out); + out.writeOptionalWriteable(concreteIndex); } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java index 28f289b86c66..c21c40cf041e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.Index; import java.util.Map; @@ -40,6 +41,11 @@ public class PutMappingRequestBuilder extends AcknowledgedRequestBuilder diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java index b82c5d3a6268..0d8accd77556 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataMappingService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -63,13 +64,19 @@ public class TransportPutMappingAction extends TransportMasterNodeAction listener) { try { - final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); + final Index[] concreteIndices = request.getConcreteIndex() == null ? indexNameExpressionResolver.concreteIndices(state, request) : new Index[] {request.getConcreteIndex()}; PutMappingClusterStateUpdateRequest updateRequest = new PutMappingClusterStateUpdateRequest() .ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout()) .indices(concreteIndices).type(request.type()) diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java index 7ffb30b95346..b354c8bbfce9 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java @@ -32,6 +32,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -73,12 +74,12 @@ public class TransportOpenIndexAction extends TransportMasterNodeAction listener) { - final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); + final Index[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); OpenIndexClusterStateUpdateRequest updateRequest = new OpenIndexClusterStateUpdateRequest() .ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout()) .indices(concreteIndices); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java index 2dd41f7801d6..f5149ed8b23c 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java @@ -48,10 +48,9 @@ public class TransportShardRefreshAction extends TransportReplicationAction listener) { - String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); + Index[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); ImmutableOpenMap.Builder indexToSettingsBuilder = ImmutableOpenMap.builder(); - for (String concreteIndex : concreteIndices) { + for (Index concreteIndex : concreteIndices) { IndexMetaData indexMetaData = state.getMetaData().index(concreteIndex); if (indexMetaData == null) { continue; @@ -93,7 +94,7 @@ public class TransportGetSettingsAction extends TransportMasterNodeReadAction listener) { - final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); + final Index[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); UpdateSettingsClusterStateUpdateRequest clusterStateUpdateRequest = new UpdateSettingsClusterStateUpdateRequest() .indices(concreteIndices) .settings(request.settings()) diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java index 5e22bc891447..c040085a89e3 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java @@ -87,7 +87,7 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc protected void masterOperation(IndicesShardStoresRequest request, ClusterState state, ActionListener listener) { final RoutingTable routingTables = state.routingTable(); final RoutingNodes routingNodes = state.getRoutingNodes(); - final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); + final String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request); final Set shardIdsToFetch = new HashSet<>(); logger.trace("using cluster state version [{}] to determine shards", state.version()); @@ -115,7 +115,7 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc @Override protected ClusterBlockException checkBlock(IndicesShardStoresRequest request, ClusterState state) { - return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndices(state, request)); + return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndexNames(state, request)); } private class AsyncShardStoresInfoFetches { diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 707bf8de57f9..135147d824c4 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -47,6 +47,7 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndexAlreadyExistsException; @@ -245,17 +246,18 @@ public class TransportBulkAction extends HandledTransportAction list = requestsByShard.get(shardId); if (list == null) { @@ -304,7 +306,7 @@ public class TransportBulkAction extends HandledTransportAction list = requestsByShard.get(shardId); if (list == null) { @@ -314,7 +316,7 @@ public class TransportBulkAction extends HandledTransportAction list = requestsByShard.get(shardId); if (list == null) { @@ -356,18 +358,19 @@ public class TransportBulkAction extends HandledTransportAction responses, int idx, final ConcreteIndices concreteIndices, final MetaData metaData) { - String concreteIndex = concreteIndices.getConcreteIndex(request.index()); + Index concreteIndex = concreteIndices.getConcreteIndex(request.index()); Exception unavailableException = null; if (concreteIndex == null) { try { @@ -397,9 +400,9 @@ public class TransportBulkAction extends HandledTransportAction indices = new HashMap<>(); + private final Map indices = new HashMap<>(); ConcreteIndices(ClusterState state, IndexNameExpressionResolver indexNameExpressionResolver) { this.state = state; this.indexNameExpressionResolver = indexNameExpressionResolver; } - String getConcreteIndex(String indexOrAlias) { + Index getConcreteIndex(String indexOrAlias) { return indices.get(indexOrAlias); } - String resolveIfAbsent(DocumentRequest request) { - String concreteIndex = indices.get(request.index()); + Index resolveIfAbsent(DocumentRequest request) { + Index concreteIndex = indices.get(request.index()); if (concreteIndex == null) { concreteIndex = indexNameExpressionResolver.concreteSingleIndex(state, request); indices.put(request.index(), concreteIndex); diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index f1eeae35e08a..463f4ac23ac4 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -47,7 +47,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; @@ -75,17 +74,19 @@ public class TransportShardBulkAction extends TransportReplicationAction implements Do return this.versionType; } - private Version getVersion(MetaData metaData, String concreteIndex) { - // this can go away in 3.0 but is here now for easy backporting - since in 2.x we need the version on the timestamp stuff - final IndexMetaData indexMetaData = metaData.getIndices().get(concreteIndex); - if (indexMetaData == null) { - throw new IndexNotFoundException(concreteIndex); - } - return Version.indexCreated(indexMetaData.getSettings()); - } public void process(MetaData metaData, @Nullable MappingMetaData mappingMd, boolean allowIdGeneration, String concreteIndex) { // resolve the routing if needed @@ -600,8 +593,7 @@ public class IndexRequest extends ReplicationRequest implements Do // resolve timestamp if provided externally if (timestamp != null) { timestamp = MappingMetaData.Timestamp.parseStringTimestamp(timestamp, - mappingMd != null ? mappingMd.timestamp().dateTimeFormatter() : TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER, - getVersion(metaData, concreteIndex)); + mappingMd != null ? mappingMd.timestamp().dateTimeFormatter() : TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER); } if (mappingMd != null) { // might as well check for routing here @@ -645,7 +637,7 @@ public class IndexRequest extends ReplicationRequest implements Do // assigned again because mappingMd and // mappingMd#timestamp() are not null assert mappingMd != null; - timestamp = MappingMetaData.Timestamp.parseStringTimestamp(defaultTimestamp, mappingMd.timestamp().dateTimeFormatter(), getVersion(metaData, concreteIndex)); + timestamp = MappingMetaData.Timestamp.parseStringTimestamp(defaultTimestamp, mappingMd.timestamp().dateTimeFormatter()); } } } diff --git a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java index fdd018c51f2f..0d2e7c2e0743 100644 --- a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java @@ -69,6 +69,7 @@ public class TransportIndexAction extends TransportReplicationAction shardOperationOnPrimary(MetaData metaData, IndexRequest request) throws Exception { // validate, if routing is required, that we got routing - IndexMetaData indexMetaData = metaData.index(request.shardId().getIndex()); + IndexMetaData indexMetaData = metaData.getIndexSafe(request.shardId().getIndex()); MappingMetaData mappingMd = indexMetaData.mappingOrDefault(request.type()); if (mappingMd != null && mappingMd.routing().required()) { if (request.routing() == null) { @@ -205,8 +207,7 @@ public class TransportIndexAction extends TransportReplicationAction // TODO: I think startTime() should become part of ActionRequest and that should be used both for index name // date math expressions and $now in scripts. This way all apis will deal with now in the same way instead // of just for the _search api - String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterState, request.indicesOptions(), + String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request.indicesOptions(), startTime(), request.indices()); for (String index : concreteIndices) { diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index e87fa2a345a4..0b53008ddf54 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -64,7 +64,7 @@ public class TransportSearchAction extends HandledTransportAction> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(), searchRequest.indices()); int shardCount = clusterService.operationRouting().searchShardsCount(clusterState, concreteIndices, routingMap); diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index e39fb0288acb..f2d7e306c387 100644 --- a/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -125,7 +125,7 @@ public abstract class TransportBroadcastAction listener) { - String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); + String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request); doMasterOperation(request, concreteIndices, state, listener); } diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java index a15819e82c7e..8ace072fa88a 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java @@ -97,7 +97,7 @@ public abstract class TransportBroadcastReplicationAction shards(Request request, ClusterState clusterState) { List shardIds = new ArrayList<>(); - String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterState, request); + String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request); for (String index : concreteIndices) { IndexMetaData indexMetaData = clusterState.metaData().getIndices().get(index); if (indexMetaData != null) { diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 69df4e617875..28d21c6dbd9c 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -103,7 +103,6 @@ public abstract class TransportReplicationAction request, Supplier replicaRequest, String executor) { super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, transportService.getTaskManager()); @@ -121,7 +120,6 @@ public abstract class TransportReplicationAction deleted = null; for (ObjectCursor cursor : previousState.metaData().indices().values()) { IndexMetaData index = cursor.value; - IndexMetaData current = state.metaData().index(index.getIndex().getName()); - if (current == null || index.getIndexUUID().equals(current.getIndexUUID()) == false) { + IndexMetaData current = state.metaData().index(index.getIndex()); + if (current == null) { if (deleted == null) { deleted = new ArrayList<>(); } diff --git a/core/src/main/java/org/elasticsearch/cluster/ack/IndicesClusterStateUpdateRequest.java b/core/src/main/java/org/elasticsearch/cluster/ack/IndicesClusterStateUpdateRequest.java index c691abe59066..33d716cb9651 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ack/IndicesClusterStateUpdateRequest.java +++ b/core/src/main/java/org/elasticsearch/cluster/ack/IndicesClusterStateUpdateRequest.java @@ -18,17 +18,19 @@ */ package org.elasticsearch.cluster.ack; +import org.elasticsearch.index.Index; + /** * Base cluster state update request that allows to execute update against multiple indices */ public abstract class IndicesClusterStateUpdateRequest> extends ClusterStateUpdateRequest { - private String[] indices; + private Index[] indices; /** * Returns the indices the operation needs to be executed on */ - public String[] indices() { + public Index[] indices() { return indices; } @@ -36,7 +38,7 @@ public abstract class IndicesClusterStateUpdateRequest actionListener = new ActionListener() { - @Override - public void onResponse(PutMappingResponse response) { - if (response.isAcknowledged()) { - listener.onMappingUpdate(); - } else { - listener.onFailure(new TimeoutException("Failed to acknowledge the mapping response within [" + timeout + "]")); - } - } - - @Override - public void onFailure(Throwable e) { - listener.onFailure(e); - } - }; - request.execute(actionListener); - } - } - - public void updateMappingOnMasterAsynchronously(String index, String type, Mapping mappingUpdate) throws Exception { - updateMappingOnMaster(index, type, mappingUpdate, dynamicMappingUpdateTimeout, null); - } - /** - * Same as {@link #updateMappingOnMasterSynchronously(String, String, Mapping, TimeValue)} + * Same as {@link #updateMappingOnMaster(Index, String, Mapping, TimeValue)} * using the default timeout. */ - public void updateMappingOnMasterSynchronously(String index, String type, Mapping mappingUpdate) throws Exception { - updateMappingOnMasterSynchronously(index, type, mappingUpdate, dynamicMappingUpdateTimeout); + public void updateMappingOnMaster(Index index, String type, Mapping mappingUpdate) throws Exception { + updateMappingOnMaster(index, type, mappingUpdate, dynamicMappingUpdateTimeout); } /** @@ -114,19 +85,9 @@ public class MappingUpdatedAction extends AbstractComponent { * {@code timeout}. When this method returns successfully mappings have * been applied to the master node and propagated to data nodes. */ - public void updateMappingOnMasterSynchronously(String index, String type, Mapping mappingUpdate, TimeValue timeout) throws Exception { + public void updateMappingOnMaster(Index index, String type, Mapping mappingUpdate, TimeValue timeout) throws Exception { if (updateMappingRequest(index, type, mappingUpdate, timeout).get().isAcknowledged() == false) { throw new TimeoutException("Failed to acknowledge mapping update within [" + timeout + "]"); } } - - /** - * A listener to be notified when the mappings were updated - */ - public static interface MappingUpdateListener { - - void onMappingUpdate(); - - void onFailure(Throwable t); - } } diff --git a/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java b/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java index c2c1b468f1b0..93fce95fc234 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java @@ -76,7 +76,7 @@ public class NodeIndexDeletedAction extends AbstractComponent { listeners.remove(listener); } - public void nodeIndexDeleted(final ClusterState clusterState, final String index, final IndexSettings indexSettings, final String nodeId) { + public void nodeIndexDeleted(final ClusterState clusterState, final Index index, final IndexSettings indexSettings, final String nodeId) { final DiscoveryNodes nodes = clusterState.nodes(); transportService.sendRequest(clusterState.nodes().masterNode(), INDEX_DELETED_ACTION_NAME, new NodeIndexDeletedMessage(index, nodeId), EmptyTransportResponseHandler.INSTANCE_SAME); @@ -97,7 +97,7 @@ public class NodeIndexDeletedAction extends AbstractComponent { }); } - private void lockIndexAndAck(String index, DiscoveryNodes nodes, String nodeId, ClusterState clusterState, IndexSettings indexSettings) throws IOException { + private void lockIndexAndAck(Index index, DiscoveryNodes nodes, String nodeId, ClusterState clusterState, IndexSettings indexSettings) throws IOException { try { // we are waiting until we can lock the index / all shards on the node and then we ack the delete of the store to the // master. If we can't acquire the locks here immediately there might be a shard of this index still holding on to the lock @@ -114,9 +114,9 @@ public class NodeIndexDeletedAction extends AbstractComponent { } public interface Listener { - void onNodeIndexDeleted(String index, String nodeId); + void onNodeIndexDeleted(Index index, String nodeId); - void onNodeIndexStoreDeleted(String index, String nodeId); + void onNodeIndexStoreDeleted(Index index, String nodeId); } private class NodeIndexDeletedTransportHandler implements TransportRequestHandler { @@ -143,13 +143,13 @@ public class NodeIndexDeletedAction extends AbstractComponent { public static class NodeIndexDeletedMessage extends TransportRequest { - String index; + Index index; String nodeId; public NodeIndexDeletedMessage() { } - NodeIndexDeletedMessage(String index, String nodeId) { + NodeIndexDeletedMessage(Index index, String nodeId) { this.index = index; this.nodeId = nodeId; } @@ -157,27 +157,27 @@ public class NodeIndexDeletedAction extends AbstractComponent { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(index); + index.writeTo(out); out.writeString(nodeId); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - index = in.readString(); + index = new Index(in); nodeId = in.readString(); } } public static class NodeIndexStoreDeletedMessage extends TransportRequest { - String index; + Index index; String nodeId; public NodeIndexStoreDeletedMessage() { } - NodeIndexStoreDeletedMessage(String index, String nodeId) { + NodeIndexStoreDeletedMessage(Index index, String nodeId) { this.index = index; this.nodeId = nodeId; } @@ -185,14 +185,14 @@ public class NodeIndexDeletedAction extends AbstractComponent { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(index); + index.writeTo(out); out.writeString(nodeId); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - index = in.readString(); + index = new Index(in); nodeId = in.readString(); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 9bd4ba6112bb..49c057f104db 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.IndexClosedException; import org.joda.time.DateTimeZone; @@ -65,11 +66,20 @@ public class IndexNameExpressionResolver extends AbstractComponent { ); } + /** + * Same as {@link #concreteIndexNames(ClusterState, IndicesOptions, String...)}, but the index expressions and options + * are encapsulated in the specified request. + */ + public String[] concreteIndexNames(ClusterState state, IndicesRequest request) { + Context context = new Context(state, request.indicesOptions()); + return concreteIndexNames(context, request.indices()); + } + /** * Same as {@link #concreteIndices(ClusterState, IndicesOptions, String...)}, but the index expressions and options * are encapsulated in the specified request. */ - public String[] concreteIndices(ClusterState state, IndicesRequest request) { + public Index[] concreteIndices(ClusterState state, IndicesRequest request) { Context context = new Context(state, request.indicesOptions()); return concreteIndices(context, request.indices()); } @@ -87,7 +97,25 @@ public class IndexNameExpressionResolver extends AbstractComponent { * @throws IllegalArgumentException if one of the aliases resolve to multiple indices and the provided * indices options in the context don't allow such a case. */ - public String[] concreteIndices(ClusterState state, IndicesOptions options, String... indexExpressions) { + public String[] concreteIndexNames(ClusterState state, IndicesOptions options, String... indexExpressions) { + Context context = new Context(state, options); + return concreteIndexNames(context, indexExpressions); + } + + /** + * Translates the provided index expression into actual concrete indices, properly deduplicated. + * + * @param state the cluster state containing all the data to resolve to expressions to concrete indices + * @param options defines how the aliases or indices need to be resolved to concrete indices + * @param indexExpressions expressions that can be resolved to alias or index names. + * @return the resolved concrete indices based on the cluster state, indices options and index expressions + * @throws IndexNotFoundException if one of the index expressions is pointing to a missing index or alias and the + * provided indices options in the context don't allow such a case, or if the final result of the indices resolution + * contains no indices and the indices options in the context don't allow such a case. + * @throws IllegalArgumentException if one of the aliases resolve to multiple indices and the provided + * indices options in the context don't allow such a case. + */ + public Index[] concreteIndices(ClusterState state, IndicesOptions options, String... indexExpressions) { Context context = new Context(state, options); return concreteIndices(context, indexExpressions); } @@ -105,12 +133,21 @@ public class IndexNameExpressionResolver extends AbstractComponent { * @throws IllegalArgumentException if one of the aliases resolve to multiple indices and the provided * indices options in the context don't allow such a case. */ - public String[] concreteIndices(ClusterState state, IndicesOptions options, long startTime, String... indexExpressions) { + public String[] concreteIndexNames(ClusterState state, IndicesOptions options, long startTime, String... indexExpressions) { Context context = new Context(state, options, startTime); - return concreteIndices(context, indexExpressions); + return concreteIndexNames(context, indexExpressions); } - String[] concreteIndices(Context context, String... indexExpressions) { + String[] concreteIndexNames(Context context, String... indexExpressions) { + Index[] indexes = concreteIndices(context, indexExpressions); + String[] names = new String[indexes.length]; + for (int i = 0; i < indexes.length; i++) { + names[i] = indexes[i].getName(); + } + return names; + } + + Index[] concreteIndices(Context context, String... indexExpressions) { if (indexExpressions == null || indexExpressions.length == 0) { indexExpressions = new String[]{MetaData.ALL}; } @@ -136,11 +173,11 @@ public class IndexNameExpressionResolver extends AbstractComponent { infe.setResources("index_expression", indexExpressions); throw infe; } else { - return Strings.EMPTY_ARRAY; + return Index.EMPTY_ARRAY; } } - final Set concreteIndices = new HashSet<>(expressions.size()); + final Set concreteIndices = new HashSet<>(expressions.size()); for (String expression : expressions) { AliasOrIndex aliasOrIndex = metaData.getAliasAndIndexLookup().get(expression); if (aliasOrIndex == null) { @@ -169,11 +206,11 @@ public class IndexNameExpressionResolver extends AbstractComponent { throw new IndexClosedException(index.getIndex()); } else { if (options.forbidClosedIndices() == false) { - concreteIndices.add(index.getIndex().getName()); + concreteIndices.add(index.getIndex()); } } } else if (index.getState() == IndexMetaData.State.OPEN) { - concreteIndices.add(index.getIndex().getName()); + concreteIndices.add(index.getIndex()); } else { throw new IllegalStateException("index state [" + index.getState() + "] not supported"); } @@ -185,7 +222,7 @@ public class IndexNameExpressionResolver extends AbstractComponent { infe.setResources("index_expression", indexExpressions); throw infe; } - return concreteIndices.toArray(new String[concreteIndices.size()]); + return concreteIndices.toArray(new Index[concreteIndices.size()]); } /** @@ -200,9 +237,9 @@ public class IndexNameExpressionResolver extends AbstractComponent { * @throws IllegalArgumentException if the index resolution lead to more than one index * @return the concrete index obtained as a result of the index resolution */ - public String concreteSingleIndex(ClusterState state, IndicesRequest request) { + public Index concreteSingleIndex(ClusterState state, IndicesRequest request) { String indexExpression = request.indices() != null && request.indices().length > 0 ? request.indices()[0] : null; - String[] indices = concreteIndices(state, request.indicesOptions(), indexExpression); + Index[] indices = concreteIndices(state, request.indicesOptions(), indexExpression); if (indices.length != 1) { throw new IllegalArgumentException("unable to return a single index as the index and options provided got resolved to multiple indices"); } @@ -867,7 +904,7 @@ public class IndexNameExpressionResolver extends AbstractComponent { * Returns true iff the given expression resolves to the given index name otherwise false */ public final boolean matchesIndex(String indexName, String expression, ClusterState state) { - final String[] concreteIndices = concreteIndices(state, IndicesOptions.lenientExpandOpen(), expression); + final String[] concreteIndices = concreteIndexNames(state, IndicesOptions.lenientExpandOpen(), expression); for (String index : concreteIndices) { if (Regex.simpleMatch(index, indexName)) { return true; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java index a88f1609b9ec..10b05c466572 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java @@ -84,20 +84,10 @@ public class MappingMetaData extends AbstractDiffable { private static final FormatDateTimeFormatter EPOCH_MILLIS_PARSER = Joda.forPattern("epoch_millis"); - public static String parseStringTimestamp(String timestampAsString, FormatDateTimeFormatter dateTimeFormatter, - Version version) throws TimestampParsingException { + public static String parseStringTimestamp(String timestampAsString, FormatDateTimeFormatter dateTimeFormatter) throws TimestampParsingException { try { - // no need for unix timestamp parsing in 2.x - FormatDateTimeFormatter formatter = version.onOrAfter(Version.V_2_0_0_beta1) ? dateTimeFormatter : EPOCH_MILLIS_PARSER; - return Long.toString(formatter.parser().parseMillis(timestampAsString)); + return Long.toString(dateTimeFormatter.parser().parseMillis(timestampAsString)); } catch (RuntimeException e) { - if (version.before(Version.V_2_0_0_beta1)) { - try { - return Long.toString(dateTimeFormatter.parser().parseMillis(timestampAsString)); - } catch (RuntimeException e1) { - throw new TimestampParsingException(timestampAsString, e1); - } - } throw new TimestampParsingException(timestampAsString, e); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 4c83f64581e6..f802637f22fa 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -232,7 +232,7 @@ public class MetaData implements Iterable, Diffable, Fr public boolean equalsAliases(MetaData other) { for (ObjectCursor cursor : other.indices().values()) { IndexMetaData otherIndex = cursor.value; - IndexMetaData thisIndex= index(otherIndex.getIndex()); + IndexMetaData thisIndex = index(otherIndex.getIndex()); if (thisIndex == null) { return false; } @@ -457,7 +457,28 @@ public class MetaData implements Iterable, Diffable, Fr } public IndexMetaData index(Index index) { - return index(index.getName()); + IndexMetaData metaData = index(index.getName()); + if (metaData != null && metaData.getIndexUUID().equals(index.getUUID())) { + return metaData; + } + return null; + } + + /** + * Returns the {@link IndexMetaData} for this index. + * @throws IndexNotFoundException if no metadata for this index is found + */ + public IndexMetaData getIndexSafe(Index index) { + IndexMetaData metaData = index(index.getName()); + if (metaData != null) { + if(metaData.getIndexUUID().equals(index.getUUID())) { + return metaData; + } + throw new IndexNotFoundException(index, + new IllegalStateException("index uuid doesn't match expected: [" + index.getUUID() + + "] but got: [" + metaData.getIndexUUID() +"]")); + } + throw new IndexNotFoundException(index); } public ImmutableOpenMap indices() { @@ -488,20 +509,13 @@ public class MetaData implements Iterable, Diffable, Fr return (T) customs.get(type); } - public int totalNumberOfShards() { + + public int getTotalNumberOfShards() { return this.totalNumberOfShards; } - public int getTotalNumberOfShards() { - return totalNumberOfShards(); - } - - public int numberOfShards() { - return this.numberOfShards; - } - public int getNumberOfShards() { - return numberOfShards(); + return this.numberOfShards; } /** @@ -844,6 +858,19 @@ public class MetaData implements Iterable, Diffable, Fr return indices.get(index); } + public IndexMetaData getSafe(Index index) { + IndexMetaData indexMetaData = get(index.getName()); + if (indexMetaData != null) { + if(indexMetaData.getIndexUUID().equals(index.getUUID())) { + return indexMetaData; + } + throw new IndexNotFoundException(index, + new IllegalStateException("index uuid doesn't match expected: [" + index.getUUID() + + "] but got: [" + indexMetaData.getIndexUUID() +"]")); + } + throw new IndexNotFoundException(index); + } + public Builder remove(String index) { indices.remove(index); return this; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java index 132e46b1e94f..5492325b6510 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java @@ -35,14 +35,17 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.threadpool.ThreadPool; +import java.util.HashSet; import java.util.Set; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; /** * @@ -68,10 +71,9 @@ public class MetaDataDeleteIndexService extends AbstractComponent { } public void deleteIndices(final Request request, final Listener userListener) { - Set indices = Sets.newHashSet(request.indices); final DeleteIndexListener listener = new DeleteIndexListener(userListener); - clusterService.submitStateUpdateTask("delete-index " + indices, new ClusterStateUpdateTask(Priority.URGENT) { + clusterService.submitStateUpdateTask("delete-index " + request.indices, new ClusterStateUpdateTask(Priority.URGENT) { @Override public TimeValue timeout() { @@ -85,23 +87,21 @@ public class MetaDataDeleteIndexService extends AbstractComponent { @Override public ClusterState execute(final ClusterState currentState) { + final MetaData meta = currentState.metaData(); + final Set metaDatas = request.indices.stream().map(i -> meta.getIndexSafe(i)).collect(Collectors.toSet()); // Check if index deletion conflicts with any running snapshots - SnapshotsService.checkIndexDeletion(currentState, indices); - + SnapshotsService.checkIndexDeletion(currentState, metaDatas); + final Set indices = request.indices; RoutingTable.Builder routingTableBuilder = RoutingTable.builder(currentState.routingTable()); - MetaData.Builder metaDataBuilder = MetaData.builder(currentState.metaData()); + MetaData.Builder metaDataBuilder = MetaData.builder(meta); ClusterBlocks.Builder clusterBlocksBuilder = ClusterBlocks.builder().blocks(currentState.blocks()); - for (final String index: indices) { - if (!currentState.metaData().hasConcreteIndex(index)) { - throw new IndexNotFoundException(index); - } - + for (final Index index : indices) { + String indexName = index.getName(); logger.debug("[{}] deleting index", index); - - routingTableBuilder.remove(index); - clusterBlocksBuilder.removeIndexBlocks(index); - metaDataBuilder.remove(index); + routingTableBuilder.remove(indexName); + clusterBlocksBuilder.removeIndexBlocks(indexName); + metaDataBuilder.remove(indexName); } // wait for events from all nodes that it has been removed from their respective metadata... int count = currentState.nodes().size(); @@ -112,7 +112,7 @@ public class MetaDataDeleteIndexService extends AbstractComponent { // this listener will be notified once we get back a notification based on the cluster state change below. final NodeIndexDeletedAction.Listener nodeIndexDeleteListener = new NodeIndexDeletedAction.Listener() { @Override - public void onNodeIndexDeleted(String deleted, String nodeId) { + public void onNodeIndexDeleted(Index deleted, String nodeId) { if (indices.contains(deleted)) { if (counter.decrementAndGet() == 0) { listener.onResponse(new Response(true)); @@ -122,7 +122,7 @@ public class MetaDataDeleteIndexService extends AbstractComponent { } @Override - public void onNodeIndexStoreDeleted(String deleted, String nodeId) { + public void onNodeIndexStoreDeleted(Index deleted, String nodeId) { if (indices.contains(deleted)) { if (counter.decrementAndGet() == 0) { listener.onResponse(new Response(true)); @@ -187,12 +187,12 @@ public class MetaDataDeleteIndexService extends AbstractComponent { public static class Request { - final String[] indices; + final Set indices; TimeValue timeout = TimeValue.timeValueSeconds(10); TimeValue masterTimeout = MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; - public Request(String[] indices) { + public Request(Set indices) { this.indices = indices; } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java index 121065bc638e..e68b0be36b6f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.RestoreService; @@ -82,15 +83,11 @@ public class MetaDataIndexStateService extends AbstractComponent { @Override public ClusterState execute(ClusterState currentState) { - Set indicesToClose = new HashSet<>(); - for (String index : request.indices()) { - IndexMetaData indexMetaData = currentState.metaData().index(index); - if (indexMetaData == null) { - throw new IndexNotFoundException(index); - } - + Set indicesToClose = new HashSet<>(); + for (Index index : request.indices()) { + final IndexMetaData indexMetaData = currentState.metaData().getIndexSafe(index); if (indexMetaData.getState() != IndexMetaData.State.CLOSE) { - indicesToClose.add(index); + indicesToClose.add(indexMetaData); } } @@ -102,22 +99,22 @@ public class MetaDataIndexStateService extends AbstractComponent { RestoreService.checkIndexClosing(currentState, indicesToClose); // Check if index closing conflicts with any running snapshots SnapshotsService.checkIndexClosing(currentState, indicesToClose); - logger.info("closing indices [{}]", indicesAsString); MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); ClusterBlocks.Builder blocksBuilder = ClusterBlocks.builder() .blocks(currentState.blocks()); - for (String index : indicesToClose) { - mdBuilder.put(IndexMetaData.builder(currentState.metaData().index(index)).state(IndexMetaData.State.CLOSE)); - blocksBuilder.addIndexBlock(index, INDEX_CLOSED_BLOCK); + for (IndexMetaData openIndexMetadata : indicesToClose) { + final String indexName = openIndexMetadata.getIndex().getName(); + mdBuilder.put(IndexMetaData.builder(openIndexMetadata).state(IndexMetaData.State.CLOSE)); + blocksBuilder.addIndexBlock(indexName, INDEX_CLOSED_BLOCK); } ClusterState updatedState = ClusterState.builder(currentState).metaData(mdBuilder).blocks(blocksBuilder).build(); RoutingTable.Builder rtBuilder = RoutingTable.builder(currentState.routingTable()); - for (String index : indicesToClose) { - rtBuilder.remove(index); + for (IndexMetaData index : indicesToClose) { + rtBuilder.remove(index.getIndex().getName()); } RoutingAllocation.Result routingResult = allocationService.reroute( @@ -143,14 +140,11 @@ public class MetaDataIndexStateService extends AbstractComponent { @Override public ClusterState execute(ClusterState currentState) { - List indicesToOpen = new ArrayList<>(); - for (String index : request.indices()) { - IndexMetaData indexMetaData = currentState.metaData().index(index); - if (indexMetaData == null) { - throw new IndexNotFoundException(index); - } + List indicesToOpen = new ArrayList<>(); + for (Index index : request.indices()) { + final IndexMetaData indexMetaData = currentState.metaData().getIndexSafe(index); if (indexMetaData.getState() != IndexMetaData.State.OPEN) { - indicesToOpen.add(index); + indicesToOpen.add(indexMetaData); } } @@ -163,20 +157,21 @@ public class MetaDataIndexStateService extends AbstractComponent { MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); ClusterBlocks.Builder blocksBuilder = ClusterBlocks.builder() .blocks(currentState.blocks()); - for (String index : indicesToOpen) { - IndexMetaData indexMetaData = IndexMetaData.builder(currentState.metaData().index(index)).state(IndexMetaData.State.OPEN).build(); + for (IndexMetaData closedMetaData : indicesToOpen) { + final String indexName = closedMetaData.getIndex().getName(); + IndexMetaData indexMetaData = IndexMetaData.builder(closedMetaData).state(IndexMetaData.State.OPEN).build(); // The index might be closed because we couldn't import it due to old incompatible version // We need to check that this index can be upgraded to the current version indexMetaData = metaDataIndexUpgradeService.upgradeIndexMetaData(indexMetaData); mdBuilder.put(indexMetaData, true); - blocksBuilder.removeIndexBlock(index, INDEX_CLOSED_BLOCK); + blocksBuilder.removeIndexBlock(indexName, INDEX_CLOSED_BLOCK); } ClusterState updatedState = ClusterState.builder(currentState).metaData(mdBuilder).blocks(blocksBuilder).build(); RoutingTable.Builder rtBuilder = RoutingTable.builder(updatedState.routingTable()); - for (String index : indicesToOpen) { - rtBuilder.addAsFromCloseToOpen(updatedState.metaData().index(index)); + for (IndexMetaData index : indicesToOpen) { + rtBuilder.addAsFromCloseToOpen(updatedState.metaData().getIndexSafe(index.getIndex())); } RoutingAllocation.Result routingResult = allocationService.reroute( diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 51095a2d0de8..a4dbe058395a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; @@ -216,31 +217,23 @@ public class MetaDataMappingService extends AbstractComponent { try { // precreate incoming indices; for (PutMappingClusterStateUpdateRequest request : tasks) { - final List indices = new ArrayList<>(request.indices().length); try { - for (String index : request.indices()) { - final IndexMetaData indexMetaData = currentState.metaData().index(index); - if (indexMetaData != null) { - if (indicesService.hasIndex(indexMetaData.getIndex()) == false) { - // if the index does not exists we create it once, add all types to the mapper service and - // close it later once we are done with mapping update - indicesToClose.add(indexMetaData.getIndex()); - IndexService indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, - Collections.emptyList()); - // add mappings for all types, we need them for cross-type validation - for (ObjectCursor mapping : indexMetaData.getMappings().values()) { - indexService.mapperService().merge(mapping.value.type(), mapping.value.source(), - MapperService.MergeReason.MAPPING_RECOVERY, request.updateAllTypes()); - } + for (Index index : request.indices()) { + final IndexMetaData indexMetaData = currentState.metaData().getIndexSafe(index); + if (indicesService.hasIndex(indexMetaData.getIndex()) == false) { + // if the index does not exists we create it once, add all types to the mapper service and + // close it later once we are done with mapping update + indicesToClose.add(indexMetaData.getIndex()); + IndexService indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, + Collections.emptyList()); + // add mappings for all types, we need them for cross-type validation + for (ObjectCursor mapping : indexMetaData.getMappings().values()) { + indexService.mapperService().merge(mapping.value.type(), mapping.value.source(), + MapperService.MergeReason.MAPPING_RECOVERY, request.updateAllTypes()); } - indices.add(indexMetaData.getIndex()); - } else { - // we didn't find the index in the clusterstate - maybe it was deleted - // NOTE: this doesn't fail the entire batch only the current PutMapping request we are processing - throw new IndexNotFoundException(index); } } - currentState = applyRequest(currentState, request, indices); + currentState = applyRequest(currentState, request); builder.success(request); } catch (Throwable t) { builder.failure(request, t); @@ -254,13 +247,20 @@ public class MetaDataMappingService extends AbstractComponent { } } - private ClusterState applyRequest(ClusterState currentState, PutMappingClusterStateUpdateRequest request, - List indices) throws IOException { + private ClusterState applyRequest(ClusterState currentState, PutMappingClusterStateUpdateRequest request) throws IOException { String mappingType = request.type(); CompressedXContent mappingUpdateSource = new CompressedXContent(request.source()); final MetaData metaData = currentState.metaData(); - for (Index index : indices) { + final List> updateList = new ArrayList<>(); + for (Index index : request.indices()) { IndexService indexService = indicesService.indexServiceSafe(index); + // IMPORTANT: always get the metadata from the state since it get's batched + // and if we pull it from the indexService we might miss an update etc. + final IndexMetaData indexMetaData = currentState.getMetaData().getIndexSafe(index); + + // this is paranoia... just to be sure we use the exact same indexService and metadata tuple on the update that + // we used for the validation, it makes this mechanism little less scary (a little) + updateList.add(new Tuple<>(indexService, indexMetaData)); // try and parse it (no need to add it here) so we can bail early in case of parsing exception DocumentMapper newMapper; DocumentMapper existingMapper = indexService.mapperService().documentMapper(request.type()); @@ -281,7 +281,6 @@ public class MetaDataMappingService extends AbstractComponent { // and a put mapping api call, so we don't which type did exist before. // Also the order of the mappings may be backwards. if (newMapper.parentFieldMapper().active()) { - IndexMetaData indexMetaData = metaData.index(index); for (ObjectCursor mapping : indexMetaData.getMappings().values()) { if (newMapper.parentFieldMapper().type().equals(mapping.value.type())) { throw new IllegalArgumentException("can't add a _parent field that points to an already existing type"); @@ -302,13 +301,12 @@ public class MetaDataMappingService extends AbstractComponent { throw new InvalidTypeNameException("Document mapping type name can't start with '_'"); } MetaData.Builder builder = MetaData.builder(metaData); - for (Index index : indices) { + for (Tuple toUpdate : updateList) { // do the actual merge here on the master, and update the mapping source - IndexService indexService = indicesService.indexService(index); - if (indexService == null) { // TODO this seems impossible given we use indexServiceSafe above - continue; - } - + // we use the exact same indexService and metadata we used to validate above here to actually apply the update + final IndexService indexService = toUpdate.v1(); + final IndexMetaData indexMetaData = toUpdate.v2(); + final Index index = indexMetaData.getIndex(); CompressedXContent existingSource = null; DocumentMapper existingMapper = indexService.mapperService().documentMapper(mappingType); if (existingMapper != null) { @@ -323,24 +321,20 @@ public class MetaDataMappingService extends AbstractComponent { } else { // use the merged mapping source if (logger.isDebugEnabled()) { - logger.debug("[{}] update_mapping [{}] with source [{}]", index, mergedMapper.type(), updatedSource); + logger.debug("{} update_mapping [{}] with source [{}]", index, mergedMapper.type(), updatedSource); } else if (logger.isInfoEnabled()) { - logger.info("[{}] update_mapping [{}]", index, mergedMapper.type()); + logger.info("{} update_mapping [{}]", index, mergedMapper.type()); } } } else { if (logger.isDebugEnabled()) { - logger.debug("[{}] create_mapping [{}] with source [{}]", index, mappingType, updatedSource); + logger.debug("{} create_mapping [{}] with source [{}]", index, mappingType, updatedSource); } else if (logger.isInfoEnabled()) { - logger.info("[{}] create_mapping [{}]", index, mappingType); + logger.info("{} create_mapping [{}]", index, mappingType); } } - IndexMetaData indexMetaData = metaData.index(index); - if (indexMetaData == null) { - throw new IndexNotFoundException(index); - } IndexMetaData.Builder indexMetaDataBuilder = IndexMetaData.builder(indexMetaData); // Mapping updates on a single type may have side-effects on other types so we need to // update mapping metadata on all types diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index 2d7ba4c3c057..b27647344691 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -23,7 +23,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeSettingsClusterStateUpdateRequest; -import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterService; @@ -43,7 +42,7 @@ import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.Index; import java.util.ArrayList; import java.util.HashMap; @@ -86,7 +85,7 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements // we will want to know this for translating "all" to a number final int dataNodeCount = event.state().nodes().dataNodes().size(); - Map> nrReplicasChanged = new HashMap<>(); + Map> nrReplicasChanged = new HashMap<>(); // we need to do this each time in case it was changed by update settings for (final IndexMetaData indexMetaData : event.state().metaData()) { AutoExpandReplicas autoExpandReplicas = IndexMetaData.INDEX_AUTO_EXPAND_REPLICAS_SETTING.get(indexMetaData.getSettings()); @@ -117,7 +116,7 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements nrReplicasChanged.put(numberOfReplicas, new ArrayList<>()); } - nrReplicasChanged.get(numberOfReplicas).add(indexMetaData.getIndex().getName()); + nrReplicasChanged.get(numberOfReplicas).add(indexMetaData.getIndex()); } } } @@ -126,25 +125,25 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements // update settings and kick of a reroute (implicit) for them to take effect for (final Integer fNumberOfReplicas : nrReplicasChanged.keySet()) { Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, fNumberOfReplicas).build(); - final List indices = nrReplicasChanged.get(fNumberOfReplicas); + final List indices = nrReplicasChanged.get(fNumberOfReplicas); UpdateSettingsClusterStateUpdateRequest updateRequest = new UpdateSettingsClusterStateUpdateRequest() - .indices(indices.toArray(new String[indices.size()])).settings(settings) + .indices(indices.toArray(new Index[indices.size()])).settings(settings) .ackTimeout(TimeValue.timeValueMillis(0)) //no need to wait for ack here .masterNodeTimeout(TimeValue.timeValueMinutes(10)); updateSettings(updateRequest, new ActionListener() { @Override public void onResponse(ClusterStateUpdateResponse response) { - for (String index : indices) { - logger.info("[{}] auto expanded replicas to [{}]", index, fNumberOfReplicas); + for (Index index : indices) { + logger.info("{} auto expanded replicas to [{}]", index, fNumberOfReplicas); } } @Override public void onFailure(Throwable t) { - for (String index : indices) { - logger.warn("[{}] fail to auto expand replicas to [{}]", index, fNumberOfReplicas); + for (Index index : indices) { + logger.warn("{} fail to auto expand replicas to [{}]", index, fNumberOfReplicas); } } }); @@ -188,16 +187,19 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements @Override public ClusterState execute(ClusterState currentState) { - String[] actualIndices = indexNameExpressionResolver.concreteIndices(currentState, IndicesOptions.strictExpand(), request.indices()); RoutingTable.Builder routingTableBuilder = RoutingTable.builder(currentState.routingTable()); MetaData.Builder metaDataBuilder = MetaData.builder(currentState.metaData()); // allow to change any settings to a close index, and only allow dynamic settings to be changed // on an open index - Set openIndices = new HashSet<>(); - Set closeIndices = new HashSet<>(); - for (String index : actualIndices) { - if (currentState.metaData().index(index).getState() == IndexMetaData.State.OPEN) { + Set openIndices = new HashSet<>(); + Set closeIndices = new HashSet<>(); + final String[] actualIndices = new String[request.indices().length]; + for (int i = 0; i < request.indices().length; i++) { + Index index = request.indices()[i]; + actualIndices[i] = index.getName(); + final IndexMetaData metaData = currentState.metaData().getIndexSafe(index); + if (metaData.getState() == IndexMetaData.State.OPEN) { openIndices.add(index); } else { closeIndices.add(index); @@ -206,13 +208,13 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements if (closeIndices.size() > 0 && closedSettings.get(IndexMetaData.SETTING_NUMBER_OF_REPLICAS) != null) { throw new IllegalArgumentException(String.format(Locale.ROOT, - "Can't update [%s] on closed indices [%s] - can leave index in an unopenable state", IndexMetaData.SETTING_NUMBER_OF_REPLICAS, + "Can't update [%s] on closed indices %s - can leave index in an unopenable state", IndexMetaData.SETTING_NUMBER_OF_REPLICAS, closeIndices )); } if (!skippedSettigns.getAsMap().isEmpty() && !openIndices.isEmpty()) { throw new IllegalArgumentException(String.format(Locale.ROOT, - "Can't update non dynamic settings[%s] for open indices [%s]", + "Can't update non dynamic settings [%s] for open indices %s", skippedSettigns.getAsMap().keySet(), openIndices )); @@ -232,28 +234,22 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements maybeUpdateClusterBlock(actualIndices, blocks, IndexMetaData.INDEX_READ_BLOCK, IndexMetaData.INDEX_BLOCKS_READ_SETTING, openSettings); if (!openIndices.isEmpty()) { - for (String index : openIndices) { - IndexMetaData indexMetaData = metaDataBuilder.get(index); - if (indexMetaData == null) { - throw new IndexNotFoundException(index); - } + for (Index index : openIndices) { + IndexMetaData indexMetaData = metaDataBuilder.getSafe(index); Settings.Builder updates = Settings.builder(); Settings.Builder indexSettings = Settings.builder().put(indexMetaData.getSettings()); - if (indexScopedSettings.updateDynamicSettings(openSettings, indexSettings, updates, index)) { + if (indexScopedSettings.updateDynamicSettings(openSettings, indexSettings, updates, index.getName())) { metaDataBuilder.put(IndexMetaData.builder(indexMetaData).settings(indexSettings)); } } } if (!closeIndices.isEmpty()) { - for (String index : closeIndices) { - IndexMetaData indexMetaData = metaDataBuilder.get(index); - if (indexMetaData == null) { - throw new IndexNotFoundException(index); - } + for (Index index : closeIndices) { + IndexMetaData indexMetaData = metaDataBuilder.getSafe(index); Settings.Builder updates = Settings.builder(); Settings.Builder indexSettings = Settings.builder().put(indexMetaData.getSettings()); - if (indexScopedSettings.updateSettings(closedSettings, indexSettings, updates, index)) { + if (indexScopedSettings.updateSettings(closedSettings, indexSettings, updates, index.getName())) { metaDataBuilder.put(IndexMetaData.builder(indexMetaData).settings(indexSettings)); } } @@ -265,11 +261,11 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements // now, reroute in case things change that require it (like number of replicas) RoutingAllocation.Result routingResult = allocationService.reroute(updatedState, "settings update"); updatedState = ClusterState.builder(updatedState).routingResult(routingResult).build(); - for (String index : openIndices) { - indexScopedSettings.dryRun(updatedState.metaData().index(index).getSettings()); + for (Index index : openIndices) { + indexScopedSettings.dryRun(updatedState.metaData().getIndexSafe(index).getSettings()); } - for (String index : closeIndices) { - indexScopedSettings.dryRun(updatedState.metaData().index(index).getSettings()); + for (Index index : closeIndices) { + indexScopedSettings.dryRun(updatedState.metaData().getIndexSafe(index).getSettings()); } return updatedState; } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java index 160ccbf06b33..c32d9de363d6 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java @@ -313,7 +313,7 @@ public class IndexRoutingTable extends AbstractDiffable imple @Override public IndexRoutingTable readFrom(StreamInput in) throws IOException { - Index index = Index.readIndex(in); + Index index = new Index(in); Builder builder = new Builder(index); int size = in.readVInt(); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java index bda0a24c9a4c..e64f8f5d77c5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java @@ -584,7 +584,7 @@ public class IndexShardRoutingTable implements Iterable { } public static IndexShardRoutingTable readFrom(StreamInput in) throws IOException { - Index index = Index.readIndex(in); + Index index = new Index(in); return readFromThin(in, index); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java b/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java index cfa33e4f2258..a5975deb9cf2 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java @@ -328,7 +328,7 @@ public final class ShardRouting implements Streamable, ToXContent { @Override public void readFrom(StreamInput in) throws IOException { - readFrom(in, Index.readIndex(in), in.readVInt()); + readFrom(in, new Index(in), in.readVInt()); } /** diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java index 5c383bcae836..54f9b6855a6d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java @@ -320,7 +320,7 @@ public class AllocationService extends AbstractComponent { public static void updateLeftDelayOfUnassignedShards(RoutingAllocation allocation, Settings settings) { for (ShardRouting shardRouting : allocation.routingNodes().unassigned()) { final MetaData metaData = allocation.metaData(); - final IndexMetaData indexMetaData = metaData.index(shardRouting.index()); + final IndexMetaData indexMetaData = metaData.getIndexSafe(shardRouting.index()); shardRouting.unassignedInfo().updateDelay(allocation.getCurrentNanoTime(), settings, indexMetaData.getSettings()); } } @@ -340,7 +340,6 @@ public class AllocationService extends AbstractComponent { changed |= failReplicasForUnassignedPrimary(allocation, shardEntry); ShardRouting candidate = allocation.routingNodes().activeReplica(shardEntry); if (candidate != null) { - IndexMetaData index = allocation.metaData().index(candidate.index()); routingNodes.swapPrimaryFlag(shardEntry, candidate); if (candidate.relocatingNodeId() != null) { changed = true; @@ -355,6 +354,7 @@ public class AllocationService extends AbstractComponent { } } } + IndexMetaData index = allocation.metaData().getIndexSafe(candidate.index()); if (IndexMetaData.isIndexUsingShadowReplicas(index.getSettings())) { routingNodes.reinitShadowPrimary(candidate); changed = true; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 26b8b224d781..8102f206799d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -218,7 +218,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards this.threshold = threshold; this.routingNodes = allocation.routingNodes(); metaData = routingNodes.metaData(); - avgShardsPerNode = ((float) metaData.totalNumberOfShards()) / routingNodes.size(); + avgShardsPerNode = ((float) metaData.getTotalNumberOfShards()) / routingNodes.size(); buildModelFromAssigned(); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java index 5ccd9e9bb637..f4b1be19af49 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java @@ -112,7 +112,7 @@ public class AllocateStalePrimaryAllocationCommand extends BasePrimaryAllocation "allocating an empty primary for [" + index + "][" + shardId + "] can result in data loss. Please confirm by setting the accept_data_loss parameter to true"); } - final IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndexName()); + final IndexMetaData indexMetaData = allocation.metaData().getIndexSafe(shardRouting.index()); if (shardRouting.allocatedPostIndexCreate(indexMetaData) == false) { return explainOrThrowRejectedCommand(explain, allocation, "trying to allocate an existing primary shard [" + index + "][" + shardId + "], while no such shard has ever been active"); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java index 77613f390841..227ec277469b 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java @@ -153,7 +153,7 @@ public class AwarenessAllocationDecider extends AllocationDecider { return allocation.decision(Decision.YES, NAME, "no allocation awareness enabled"); } - IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.index()); + IndexMetaData indexMetaData = allocation.metaData().getIndexSafe(shardRouting.index()); int shardCount = indexMetaData.getNumberOfReplicas() + 1; // 1 for primary for (String awarenessAttribute : awarenessAttributes) { // the node the shard exists on must be associated with an awareness attribute diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index dcb6080bd1ed..e2124558f2dc 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -342,7 +342,7 @@ public class DiskThresholdDecider extends AllocationDecider { } // a flag for whether the primary shard has been previously allocated - IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndexName()); + IndexMetaData indexMetaData = allocation.metaData().getIndexSafe(shardRouting.index()); boolean primaryHasBeenAllocated = shardRouting.primary() && shardRouting.allocatedPostIndexCreate(indexMetaData); // checks for exact byte comparisons diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java index 80dada860228..0b69ba2a19e4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java @@ -101,7 +101,7 @@ public class EnableAllocationDecider extends AllocationDecider { return allocation.decision(Decision.YES, NAME, "allocation disabling is ignored"); } - final IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndexName()); + final IndexMetaData indexMetaData = allocation.metaData().getIndexSafe(shardRouting.index()); final Allocation enable; if (INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.exists(indexMetaData.getSettings())) { enable = INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.get(indexMetaData.getSettings()); @@ -136,7 +136,7 @@ public class EnableAllocationDecider extends AllocationDecider { return allocation.decision(Decision.YES, NAME, "rebalance disabling is ignored"); } - Settings indexSettings = allocation.routingNodes().metaData().index(shardRouting.index()).getSettings(); + Settings indexSettings = allocation.routingNodes().metaData().getIndexSafe(shardRouting.index()).getSettings(); final Rebalance enable; if (INDEX_ROUTING_REBALANCE_ENABLE_SETTING.exists(indexSettings)) { enable = INDEX_ROUTING_REBALANCE_ENABLE_SETTING.get(indexSettings); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java index c3ff0bb355ed..d1aa0d8b5839 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java @@ -102,7 +102,7 @@ public class FilterAllocationDecider extends AllocationDecider { Decision decision = shouldClusterFilter(node, allocation); if (decision != null) return decision; - decision = shouldIndexFilter(allocation.routingNodes().metaData().index(shardRouting.index()), node, allocation); + decision = shouldIndexFilter(allocation.routingNodes().metaData().getIndexSafe(shardRouting.index()), node, allocation); if (decision != null) return decision; return allocation.decision(Decision.YES, NAME, "node passes include/exclude/require filters"); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java index ab8be4dc8da5..04247525f1d2 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java @@ -86,7 +86,7 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { - IndexMetaData indexMd = allocation.routingNodes().metaData().index(shardRouting.index()); + IndexMetaData indexMd = allocation.routingNodes().metaData().getIndexSafe(shardRouting.index()); final int indexShardLimit = INDEX_TOTAL_SHARDS_PER_NODE_SETTING.get(indexMd.getSettings(), settings); // Capture the limit here in case it changes during this method's // execution @@ -123,7 +123,7 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { @Override public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { - IndexMetaData indexMd = allocation.routingNodes().metaData().index(shardRouting.index()); + IndexMetaData indexMd = allocation.routingNodes().metaData().getIndexSafe(shardRouting.index()); final int indexShardLimit = INDEX_TOTAL_SHARDS_PER_NODE_SETTING.get(indexMd.getSettings(), settings); // Capture the limit here in case it changes during this method's // execution diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index e84766d021bb..aadf2b9145a8 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -553,9 +553,9 @@ public abstract class StreamInput extends InputStream { } } - public T readOptionalWritable(T prototype) throws IOException { + public T readOptionalWritable(Writeable.IOFunction provider) throws IOException { if (readBoolean()) { - return (T) prototype.readFrom(this); + return provider.apply(this); } else { return null; } diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/Writeable.java b/core/src/main/java/org/elasticsearch/common/io/stream/Writeable.java index 9ff3de736c5c..8f0cb3c96c7c 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/Writeable.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/Writeable.java @@ -37,4 +37,15 @@ public interface Writeable extends StreamableReader { * Write this into the {@linkplain StreamOutput}. */ void writeTo(StreamOutput out) throws IOException; + + @FunctionalInterface + interface IOFunction { + /** + * Applies this function to the given argument. + * + * @param t the function argument + * @return the function result + */ + R apply(T t) throws IOException; + } } diff --git a/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java b/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java index 757a78c3b5f0..1ccdb43cc455 100644 --- a/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java +++ b/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java @@ -269,7 +269,7 @@ public abstract class AsyncShardFetch implements Rel */ // visible for testing void asyncFetch(final ShardId shardId, final String[] nodesIds, final MetaData metaData) { - IndexMetaData indexMetaData = metaData.index(shardId.getIndex()); + IndexMetaData indexMetaData = metaData.getIndexSafe(shardId.getIndex()); logger.trace("{} fetching [{}] from {}", shardId, type, nodesIds); action.list(shardId, indexMetaData, nodesIds, new ActionListener>() { @Override diff --git a/core/src/main/java/org/elasticsearch/gateway/Gateway.java b/core/src/main/java/org/elasticsearch/gateway/Gateway.java index fd3bd9a0b6d4..2d1d48cbd83a 100644 --- a/core/src/main/java/org/elasticsearch/gateway/Gateway.java +++ b/core/src/main/java/org/elasticsearch/gateway/Gateway.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.index.Index; import java.nio.file.Path; import java.util.function.Supplier; @@ -79,7 +80,7 @@ public class Gateway extends AbstractComponent implements ClusterStateListener { } } - ObjectFloatHashMap indices = new ObjectFloatHashMap<>(); + ObjectFloatHashMap indices = new ObjectFloatHashMap<>(); MetaData electedGlobalState = null; int found = 0; for (TransportNodesListGatewayMetaState.NodeGatewayMetaState nodeState : nodesState) { @@ -93,7 +94,7 @@ public class Gateway extends AbstractComponent implements ClusterStateListener { electedGlobalState = nodeState.metaData(); } for (ObjectCursor cursor : nodeState.metaData().indices().values()) { - indices.addTo(cursor.value.getIndex().getName(), 1); + indices.addTo(cursor.value.getIndex(), 1); } } if (found < requiredAllocation) { @@ -107,7 +108,7 @@ public class Gateway extends AbstractComponent implements ClusterStateListener { final Object[] keys = indices.keys; for (int i = 0; i < keys.length; i++) { if (keys[i] != null) { - String index = (String) keys[i]; + Index index = (Index) keys[i]; IndexMetaData electedIndexMetaData = null; int indexMetaDataCount = 0; for (TransportNodesListGatewayMetaState.NodeGatewayMetaState nodeState : nodesState) { diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index 15277d6fb4fc..4f0a3bd714a9 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.index.Index; import java.nio.file.DirectoryStream; import java.nio.file.Files; @@ -61,7 +62,7 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL @Nullable private volatile MetaData previousMetaData; - private volatile Set previouslyWrittenIndices = emptySet(); + private volatile Set previouslyWrittenIndices = emptySet(); @Inject public GatewayMetaState(Settings settings, NodeEnvironment nodeEnv, MetaStateService metaStateService, @@ -102,7 +103,6 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL @Override public void clusterChanged(ClusterChangedEvent event) { - Set relevantIndices = new HashSet<>(); final ClusterState state = event.state(); if (state.blocks().disableStatePersistence()) { // reset the current metadata, we need to start fresh... @@ -113,7 +113,7 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL MetaData newMetaData = state.metaData(); // we don't check if metaData changed, since we might be called several times and we need to check dangling... - + Set relevantIndices = Collections.emptySet(); boolean success = true; // write the state if this node is a master eligible node or if it is a data node and has shards allocated on it if (state.nodes().localNode().masterNode() || state.nodes().localNode().dataNode()) { @@ -126,14 +126,14 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL // persistence was disabled or the node was restarted), see getRelevantIndicesOnDataOnlyNode(). // we therefore have to check here if we have shards on disk and add their indices to the previouslyWrittenIndices list if (isDataOnlyNode(state)) { - Set newPreviouslyWrittenIndices = new HashSet<>(previouslyWrittenIndices.size()); + Set newPreviouslyWrittenIndices = new HashSet<>(previouslyWrittenIndices.size()); for (IndexMetaData indexMetaData : newMetaData) { IndexMetaData indexMetaDataOnDisk = null; if (indexMetaData.getState().equals(IndexMetaData.State.CLOSE)) { indexMetaDataOnDisk = metaStateService.loadIndexState(indexMetaData.getIndex().getName()); } if (indexMetaDataOnDisk != null) { - newPreviouslyWrittenIndices.add(indexMetaDataOnDisk.getIndex().getName()); + newPreviouslyWrittenIndices.add(indexMetaDataOnDisk.getIndex()); } } newPreviouslyWrittenIndices.addAll(previouslyWrittenIndices); @@ -152,9 +152,9 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL } } - Iterable writeInfo; + relevantIndices = getRelevantIndices(event.state(), event.previousState(), previouslyWrittenIndices); - writeInfo = resolveStatesToBeWritten(previouslyWrittenIndices, relevantIndices, previousMetaData, event.state().metaData()); + final Iterable writeInfo = resolveStatesToBeWritten(previouslyWrittenIndices, relevantIndices, previousMetaData, event.state().metaData()); // check and write changes in indices for (IndexMetaWriteInfo indexMetaWrite : writeInfo) { try { @@ -173,8 +173,8 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL } } - public static Set getRelevantIndices(ClusterState state, ClusterState previousState, Set previouslyWrittenIndices) { - Set relevantIndices; + public static Set getRelevantIndices(ClusterState state, ClusterState previousState, Set previouslyWrittenIndices) { + Set relevantIndices; if (isDataOnlyNode(state)) { relevantIndices = getRelevantIndicesOnDataOnlyNode(state, previousState, previouslyWrittenIndices); } else if (state.nodes().localNode().masterNode() == true) { @@ -264,10 +264,10 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL * @param newMetaData The new metadata * @return iterable over all indices states that should be written to disk */ - public static Iterable resolveStatesToBeWritten(Set previouslyWrittenIndices, Set potentiallyUnwrittenIndices, MetaData previousMetaData, MetaData newMetaData) { + public static Iterable resolveStatesToBeWritten(Set previouslyWrittenIndices, Set potentiallyUnwrittenIndices, MetaData previousMetaData, MetaData newMetaData) { List indicesToWrite = new ArrayList<>(); - for (String index : potentiallyUnwrittenIndices) { - IndexMetaData newIndexMetaData = newMetaData.index(index); + for (Index index : potentiallyUnwrittenIndices) { + IndexMetaData newIndexMetaData = newMetaData.getIndexSafe(index); IndexMetaData previousIndexMetaData = previousMetaData == null ? null : previousMetaData.index(index); String writeReason = null; if (previouslyWrittenIndices.contains(index) == false || previousIndexMetaData == null) { @@ -282,14 +282,14 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL return indicesToWrite; } - public static Set getRelevantIndicesOnDataOnlyNode(ClusterState state, ClusterState previousState, Set previouslyWrittenIndices) { + public static Set getRelevantIndicesOnDataOnlyNode(ClusterState state, ClusterState previousState, Set previouslyWrittenIndices) { RoutingNode newRoutingNode = state.getRoutingNodes().node(state.nodes().localNodeId()); if (newRoutingNode == null) { throw new IllegalStateException("cluster state does not contain this node - cannot write index meta state"); } - Set indices = new HashSet<>(); + Set indices = new HashSet<>(); for (ShardRouting routing : newRoutingNode) { - indices.add(routing.index().getName()); + indices.add(routing.index()); } // we have to check the meta data also: closed indices will not appear in the routing table, but we must still write the state if we have it written on disk previously for (IndexMetaData indexMetaData : state.metaData()) { @@ -300,19 +300,19 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL if (previousMetaData != null) { isOrWasClosed = isOrWasClosed || previousMetaData.getState().equals(IndexMetaData.State.CLOSE); } - if (previouslyWrittenIndices.contains(indexMetaData.getIndex().getName()) && isOrWasClosed) { - indices.add(indexMetaData.getIndex().getName()); + if (previouslyWrittenIndices.contains(indexMetaData.getIndex()) && isOrWasClosed) { + indices.add(indexMetaData.getIndex()); } } return indices; } - public static Set getRelevantIndicesForMasterEligibleNode(ClusterState state) { - Set relevantIndices; + public static Set getRelevantIndicesForMasterEligibleNode(ClusterState state) { + Set relevantIndices; relevantIndices = new HashSet<>(); // we have to iterate over the metadata to make sure we also capture closed indices for (IndexMetaData indexMetaData : state.metaData()) { - relevantIndices.add(indexMetaData.getIndex().getName()); + relevantIndices.add(indexMetaData.getIndex()); } return relevantIndices; } diff --git a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java index 5f6e50d6fc90..a456da0779d4 100644 --- a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java @@ -94,7 +94,7 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { continue; } - final IndexMetaData indexMetaData = metaData.index(shard.getIndexName()); + final IndexMetaData indexMetaData = metaData.getIndexSafe(shard.index()); // don't go wild here and create a new IndexSetting object for every shard this could cause a lot of garbage // on cluster restart if we allocate a boat load of shards if (shard.allocatedPostIndexCreate(indexMetaData) == false) { diff --git a/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java b/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java index 04f438c70fee..1d24baf561ab 100644 --- a/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java +++ b/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; import java.util.Comparator; @@ -42,8 +43,8 @@ public abstract class PriorityComparator implements Comparator { final String o2Index = o2.getIndexName(); int cmp = 0; if (o1Index.equals(o2Index) == false) { - final Settings settingsO1 = getIndexSettings(o1Index); - final Settings settingsO2 = getIndexSettings(o2Index); + final Settings settingsO1 = getIndexSettings(o1.index()); + final Settings settingsO2 = getIndexSettings(o2.index()); cmp = Long.compare(priority(settingsO2), priority(settingsO1)); if (cmp == 0) { cmp = Long.compare(timeCreated(settingsO2), timeCreated(settingsO1)); @@ -63,7 +64,7 @@ public abstract class PriorityComparator implements Comparator { return settings.getAsLong(IndexMetaData.SETTING_CREATION_DATE, -1L); } - protected abstract Settings getIndexSettings(String index); + protected abstract Settings getIndexSettings(Index index); /** * Returns a PriorityComparator that uses the RoutingAllocation index metadata to access the index setting per index. @@ -71,8 +72,8 @@ public abstract class PriorityComparator implements Comparator { public static PriorityComparator getAllocationComparator(final RoutingAllocation allocation) { return new PriorityComparator() { @Override - protected Settings getIndexSettings(String index) { - IndexMetaData indexMetaData = allocation.metaData().index(index); + protected Settings getIndexSettings(Index index) { + IndexMetaData indexMetaData = allocation.metaData().getIndexSafe(index); return indexMetaData.getSettings(); } }; diff --git a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java index c94e1370c019..74511639d47e 100644 --- a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java @@ -74,7 +74,7 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { } // if we are allocating a replica because of index creation, no need to go and find a copy, there isn't one... - IndexMetaData indexMetaData = metaData.index(shard.getIndexName()); + IndexMetaData indexMetaData = metaData.getIndexSafe(shard.index()); if (shard.allocatedPostIndexCreate(indexMetaData) == false) { continue; } @@ -129,7 +129,7 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { } // if we are allocating a replica because of index creation, no need to go and find a copy, there isn't one... - IndexMetaData indexMetaData = metaData.index(shard.getIndexName()); + IndexMetaData indexMetaData = metaData.getIndexSafe(shard.index()); if (shard.allocatedPostIndexCreate(indexMetaData) == false) { continue; } diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java index 03f8dc817039..d1aa2a8b3834 100644 --- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java @@ -125,7 +125,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction logger.trace("{} loading local shard state info", shardId); ShardStateMetaData shardStateMetaData = ShardStateMetaData.FORMAT.loadLatestState(logger, nodeEnv.availableShardPaths(request.shardId)); if (shardStateMetaData != null) { - final IndexMetaData metaData = clusterService.state().metaData().index(shardId.getIndexName()); // it's a mystery why this is sometimes null + final IndexMetaData metaData = clusterService.state().metaData().index(shardId.getIndex()); // it's a mystery why this is sometimes null if (metaData != null) { ShardPath shardPath = null; try { diff --git a/core/src/main/java/org/elasticsearch/index/Index.java b/core/src/main/java/org/elasticsearch/index/Index.java index 983b977d6119..3ffe13e38b11 100644 --- a/core/src/main/java/org/elasticsearch/index/Index.java +++ b/core/src/main/java/org/elasticsearch/index/Index.java @@ -31,7 +31,7 @@ import java.io.IOException; */ public class Index implements Writeable { - private final static Index PROTO = new Index("", ""); + public static final Index[] EMPTY_ARRAY = new Index[0]; private final String name; private final String uuid; @@ -41,6 +41,12 @@ public class Index implements Writeable { this.uuid = uuid.intern(); } + public Index(StreamInput in) throws IOException { + this.name = in.readString(); + this.uuid = in.readString(); + } + + public String getName() { return this.name; } @@ -80,13 +86,9 @@ public class Index implements Writeable { return result; } - public static Index readIndex(StreamInput in) throws IOException { - return PROTO.readFrom(in); - } - @Override public Index readFrom(StreamInput in) throws IOException { - return new Index(in.readString(), in.readString()); + return new Index(in); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/shard/ShardId.java b/core/src/main/java/org/elasticsearch/index/shard/ShardId.java index 3dea5501c623..a9bc63ae44f6 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/ShardId.java +++ b/core/src/main/java/org/elasticsearch/index/shard/ShardId.java @@ -98,7 +98,7 @@ public class ShardId implements Streamable, Comparable { @Override public void readFrom(StreamInput in) throws IOException { - index = Index.readIndex(in); + index = new Index(in); shardId = in.readVInt(); hashCode = computeHashCode(); } diff --git a/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java b/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java index 7d24d4fa897d..da8e617759da 100644 --- a/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java +++ b/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java @@ -172,7 +172,7 @@ public class NodeIndicesStats implements Streamable, ToXContent { int entries = in.readVInt(); statsByShard = new HashMap<>(); for (int i = 0; i < entries; i++) { - Index index = Index.readIndex(in); + Index index = new Index(in); int indexShardListSize = in.readVInt(); List indexShardStats = new ArrayList<>(indexShardListSize); for (int j = 0; j < indexShardListSize; j++) { diff --git a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index af667f356e8d..5d501b65686f 100644 --- a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -188,15 +188,15 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent listener) { final ClusterState state = clusterService.state(); - final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, indicesOptions, aliasesOrIndices); + final Index[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, indicesOptions, aliasesOrIndices); final Map> results = ConcurrentCollections.newConcurrentMap(); int totalNumberOfShards = 0; int numberOfShards = 0; - for (String index : concreteIndices) { - final IndexMetaData indexMetaData = state.metaData().index(index); + for (Index index : concreteIndices) { + final IndexMetaData indexMetaData = state.metaData().getIndexSafe(index); totalNumberOfShards += indexMetaData.getTotalNumberOfShards(); numberOfShards += indexMetaData.getNumberOfShards(); - results.put(index, Collections.synchronizedList(new ArrayList<>())); + results.put(index.getName(), Collections.synchronizedList(new ArrayList<>())); } if (numberOfShards == 0) { @@ -129,8 +130,9 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL final int finalTotalNumberOfShards = totalNumberOfShards; final CountDown countDown = new CountDown(numberOfShards); - for (final String index : concreteIndices) { - final IndexMetaData indexMetaData = state.metaData().index(index); + for (final Index concreteIndex : concreteIndices) { + final String index = concreteIndex.getName(); + final IndexMetaData indexMetaData = state.metaData().getIndexSafe(concreteIndex); final int indexNumberOfShards = indexMetaData.getNumberOfShards(); for (int shard = 0; shard < indexNumberOfShards; shard++) { final ShardId shardId = new ShardId(indexMetaData.getIndex(), shard); @@ -240,7 +242,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL final IndexShardRoutingTable getShardRoutingTable(ShardId shardId, ClusterState state) { final IndexRoutingTable indexRoutingTable = state.routingTable().index(shardId.getIndexName()); if (indexRoutingTable == null) { - IndexMetaData index = state.getMetaData().index(shardId.getIndexName()); + IndexMetaData index = state.getMetaData().index(shardId.getIndex()); if (index != null && index.getState() == IndexMetaData.State.CLOSE) { throw new IndexClosedException(shardId.getIndex()); } diff --git a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index 6c09a608c2d8..5dc8af41e80b 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; @@ -115,7 +116,13 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe if (shardCanBeDeleted(event.state(), indexShardRoutingTable)) { ShardId shardId = indexShardRoutingTable.shardId(); IndexService indexService = indicesService.indexService(indexRoutingTable.getIndex()); - IndexSettings indexSettings = indexService != null ? indexService.getIndexSettings() : new IndexSettings(event.state().getMetaData().index(indexRoutingTable.getIndex()), settings); + final IndexSettings indexSettings; + if (indexService == null) { + IndexMetaData indexMetaData = event.state().getMetaData().getIndexSafe(indexRoutingTable.getIndex()); + indexSettings = new IndexSettings(indexMetaData, settings); + } else { + indexSettings = indexService.getIndexSettings(); + } if (indicesService.canDeleteShardContent(shardId, indexSettings)) { deleteShardIfExistElseWhere(event.state(), indexShardRoutingTable); } @@ -164,7 +171,7 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe private void deleteShardIfExistElseWhere(ClusterState state, IndexShardRoutingTable indexShardRoutingTable) { List> requests = new ArrayList<>(indexShardRoutingTable.size()); - String indexUUID = state.getMetaData().index(indexShardRoutingTable.shardId().getIndex()).getIndexUUID(); + String indexUUID = indexShardRoutingTable.shardId().getIndex().getUUID(); ClusterName clusterName = state.getClusterName(); for (ShardRouting shardRouting : indexShardRoutingTable) { // Node can't be null, because otherwise shardCanBeDeleted() would have returned false diff --git a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index e009cbf04d18..0422c0944e24 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -134,7 +134,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction(channel) { diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java index 94a82e8e7734..445f7099fef0 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java @@ -191,7 +191,7 @@ public class RestShardsAction extends AbstractCatAction { table.addCell(shard.getIndexName()); table.addCell(shard.id()); - IndexMetaData indexMeta = state.getState().getMetaData().index(shard.index()); + IndexMetaData indexMeta = state.getState().getMetaData().getIndexSafe(shard.index()); boolean usesShadowReplicas = false; if (indexMeta != null) { usesShadowReplicas = IndexMetaData.isIndexUsingShadowReplicas(indexMeta.getSettings()); diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index d7420d315d6a..6cd59546d988 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -195,7 +195,7 @@ public class SearchService extends AbstractLifecycleComponent imp public void afterIndexClosed(Index index, Settings indexSettings) { // once an index is closed we can just clean up all the pending search context information // to release memory and let references to the filesystem go etc. - IndexMetaData idxMeta = SearchService.this.clusterService.state().metaData().index(index.getName()); + IndexMetaData idxMeta = SearchService.this.clusterService.state().metaData().index(index); if (idxMeta != null && idxMeta.getState() == IndexMetaData.State.CLOSE) { // we need to check if it's really closed // since sometimes due to a relocation we already closed the shard and that causes the index to be closed diff --git a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java index 65fb88d4b645..acbc15be72c0 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -778,18 +778,19 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis * Check if any of the indices to be closed are currently being restored from a snapshot and fail closing if such an index * is found as closing an index that is being restored makes the index unusable (it cannot be recovered). */ - public static void checkIndexClosing(ClusterState currentState, Set indices) { + public static void checkIndexClosing(ClusterState currentState, Set indices) { RestoreInProgress restore = currentState.custom(RestoreInProgress.TYPE); if (restore != null) { - Set indicesToFail = null; + Set indicesToFail = null; for (RestoreInProgress.Entry entry : restore.entries()) { for (ObjectObjectCursor shard : entry.shards()) { if (!shard.value.state().completed()) { - if (indices.contains(shard.key.getIndexName())) { + IndexMetaData indexMetaData = currentState.metaData().index(shard.key.getIndex()); + if (indexMetaData != null && indices.contains(indexMetaData)) { if (indicesToFail == null) { indicesToFail = new HashSet<>(); } - indicesToFail.add(shard.key.getIndexName()); + indicesToFail.add(shard.key.getIndex()); } } } diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 949befad059c..fb878d6cb38a 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -51,6 +51,7 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardRepository; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; @@ -204,7 +205,7 @@ public class SnapshotsService extends AbstractLifecycleComponent indices = Arrays.asList(indexNameExpressionResolver.concreteIndices(currentState, request.indicesOptions(), request.indices())); + List indices = Arrays.asList(indexNameExpressionResolver.concreteIndexNames(currentState, request.indicesOptions(), request.indices())); logger.trace("[{}][{}] creating snapshot for indices [{}]", request.repository(), request.name(), indices); newSnapshot = new SnapshotsInProgress.Entry(snapshotId, request.includeGlobalState(), request.partial(), State.INIT, indices, System.currentTimeMillis(), null); snapshots = new SnapshotsInProgress(newSnapshot); @@ -751,7 +752,7 @@ public class SnapshotsService extends AbstractLifecycleComponent closed = new HashSet<>(); for (ObjectObjectCursor entry : shards) { if (entry.value.state() == State.MISSING) { - if (metaData.hasIndex(entry.key.getIndex().getName()) && metaData.index(entry.key.getIndex()).getState() == IndexMetaData.State.CLOSE) { + if (metaData.hasIndex(entry.key.getIndex().getName()) && metaData.getIndexSafe(entry.key.getIndex()).getState() == IndexMetaData.State.CLOSE) { closed.add(entry.key.getIndex().getName()); } else { missing.add(entry.key.getIndex().getName()); @@ -1065,8 +1066,8 @@ public class SnapshotsService extends AbstractLifecycleComponent indices) { - Set indicesToFail = indicesToFailForCloseOrDeletion(currentState, indices); + public static void checkIndexDeletion(ClusterState currentState, Set indices) { + Set indicesToFail = indicesToFailForCloseOrDeletion(currentState, indices); if (indicesToFail != null) { throw new IllegalArgumentException("Cannot delete indices that are being snapshotted: " + indicesToFail + ". Try again after snapshot finishes or cancel the currently running snapshot."); @@ -1077,37 +1078,39 @@ public class SnapshotsService extends AbstractLifecycleComponent indices) { - Set indicesToFail = indicesToFailForCloseOrDeletion(currentState, indices); + public static void checkIndexClosing(ClusterState currentState, Set indices) { + Set indicesToFail = indicesToFailForCloseOrDeletion(currentState, indices); if (indicesToFail != null) { throw new IllegalArgumentException("Cannot close indices that are being snapshotted: " + indicesToFail + ". Try again after snapshot finishes or cancel the currently running snapshot."); } } - private static Set indicesToFailForCloseOrDeletion(ClusterState currentState, Set indices) { + private static Set indicesToFailForCloseOrDeletion(ClusterState currentState, Set indices) { SnapshotsInProgress snapshots = currentState.custom(SnapshotsInProgress.TYPE); - Set indicesToFail = null; + Set indicesToFail = null; if (snapshots != null) { for (final SnapshotsInProgress.Entry entry : snapshots.entries()) { if (entry.partial() == false) { if (entry.state() == State.INIT) { for (String index : entry.indices()) { - if (indices.contains(index)) { + IndexMetaData indexMetaData = currentState.metaData().index(index); + if (indexMetaData != null && indices.contains(indexMetaData)) { if (indicesToFail == null) { indicesToFail = new HashSet<>(); } - indicesToFail.add(index); + indicesToFail.add(indexMetaData.getIndex()); } } } else { for (ObjectObjectCursor shard : entry.shards()) { if (!shard.value.state().completed()) { - if (indices.contains(shard.key.getIndexName())) { + IndexMetaData indexMetaData = currentState.metaData().index(shard.key.getIndex()); + if (indexMetaData != null && indices.contains(indexMetaData)) { if (indicesToFail == null) { indicesToFail = new HashSet<>(); } - indicesToFail.add(shard.key.getIndexName()); + indicesToFail.add(shard.key.getIndex()); } } } diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index bf7983e3c9df..5846b1d5b0ff 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -405,9 +405,11 @@ public class TribeService extends AbstractLifecycleComponent { if (table == null) { continue; } - final IndexMetaData indexMetaData = currentState.metaData().index(tribeIndex.getIndex()); + //NOTE: we have to use the index name here since UUID are different even if the name is the same + final String indexName = tribeIndex.getIndex().getName(); + final IndexMetaData indexMetaData = currentState.metaData().index(indexName); if (indexMetaData == null) { - if (!droppedIndices.contains(tribeIndex.getIndex().getName())) { + if (!droppedIndices.contains(indexName)) { // a new index, add it, and add the tribe name as a setting clusterStateChanged = true; logger.info("[{}] adding index {}", tribeName, tribeIndex.getIndex()); @@ -425,7 +427,7 @@ public class TribeService extends AbstractLifecycleComponent { logger.info("[{}] dropping index {} due to conflict with [{}]", tribeName, tribeIndex.getIndex(), existingFromTribe); removeIndex(blocks, metaData, routingTable, tribeIndex); - droppedIndices.add(tribeIndex.getIndex().getName()); + droppedIndices.add(indexName); } else if (onConflict.startsWith(ON_CONFLICT_PREFER)) { // on conflict, prefer a tribe... String preferredTribeName = onConflict.substring(ON_CONFLICT_PREFER.length()); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index 6cd877315cd7..d0a0e094dcb7 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -191,7 +191,7 @@ public class TransportBulkActionTookTests extends ESTestCase { } @Override - public String[] concreteIndices(ClusterState state, IndicesRequest request) { + public String[] concreteIndexNames(ClusterState state, IndicesRequest request) { return request.indices(); } } diff --git a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index 2615e5a0b224..925d4a929015 100644 --- a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -168,7 +168,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { } @Override - public String[] concreteIndices(ClusterState state, IndicesRequest request) { + public String[] concreteIndexNames(ClusterState state, IndicesRequest request) { return request.indices(); } } diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index 3fc33477746b..631dad5d66dc 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -1065,8 +1065,8 @@ public class TransportReplicationActionTests extends ESTestCase { ClusterService clusterService, ThreadPool threadPool) { super(settings, actionName, transportService, clusterService, null, threadPool, - new ShardStateAction(settings, clusterService, transportService, null, null, threadPool), null, - new ActionFilters(new HashSet()), new IndexNameExpressionResolver(Settings.EMPTY), Request::new, Request::new, ThreadPool.Names.SAME); + new ShardStateAction(settings, clusterService, transportService, null, null, threadPool), + new ActionFilters(new HashSet()), new IndexNameExpressionResolver(Settings.EMPTY), Request::new, Request::new, ThreadPool.Names.SAME); } @Override diff --git a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java index 462a44e08b45..1bf1188ef53c 100644 --- a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java @@ -123,7 +123,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { } @Override - public String[] concreteIndices(ClusterState state, IndicesRequest request) { + public String[] concreteIndexNames(ClusterState state, IndicesRequest request) { return request.indices(); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java b/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java index 3562fa313ba2..9c0f1014dcfe 100644 --- a/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java @@ -64,7 +64,7 @@ public class ClusterStateHealthTests extends ESTestCase { routingTable.add(indexRoutingTable); } ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable.build()).build(); - String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterState, IndicesOptions.strictExpand(), (String[]) null); + String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, IndicesOptions.strictExpand(), (String[]) null); ClusterStateHealth clusterStateHealth = new ClusterStateHealth(clusterState, concreteIndices); logger.info("cluster status: {}, expected {}", clusterStateHealth.getStatus(), counter.status()); clusterStateHealth = maybeSerialize(clusterStateHealth); @@ -91,7 +91,7 @@ public class ClusterStateHealthTests extends ESTestCase { metaData.put(indexMetaData, true); routingTable.add(indexRoutingTable); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable.build()).build(); - String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterState, IndicesOptions.strictExpand(), (String[]) null); + String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, IndicesOptions.strictExpand(), (String[]) null); ClusterStateHealth clusterStateHealth = new ClusterStateHealth(clusterState, concreteIndices); clusterStateHealth = maybeSerialize(clusterStateHealth); // currently we have no cluster level validation failures as index validation issues are reported per index. diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index 1c61292d87c5..0e0c9fb442bc 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -61,79 +61,79 @@ public class IndexNameExpressionResolverTests extends ESTestCase { IndicesOptions[] indicesOptions = new IndicesOptions[]{ IndicesOptions.strictExpandOpen(), IndicesOptions.strictExpand()}; for (IndicesOptions options : indicesOptions) { IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, options); - String[] results = indexNameExpressionResolver.concreteIndices(context, "foo"); + String[] results = indexNameExpressionResolver.concreteIndexNames(context, "foo"); assertEquals(1, results.length); assertEquals("foo", results[0]); try { - indexNameExpressionResolver.concreteIndices(context, "bar"); + indexNameExpressionResolver.concreteIndexNames(context, "bar"); fail(); } catch (IndexNotFoundException e) { assertThat(e.getIndex().getName(), equalTo("bar")); } - results = indexNameExpressionResolver.concreteIndices(context, "foofoo", "foobar"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foofoo", "foobar"); assertEquals(2, results.length); assertThat(results, arrayContainingInAnyOrder("foofoo", "foobar")); - results = indexNameExpressionResolver.concreteIndices(context, "foofoobar"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foofoobar"); assertEquals(new HashSet<>(Arrays.asList("foo", "foobar")), new HashSet<>(Arrays.asList(results))); try { - indexNameExpressionResolver.concreteIndices(context, "bar"); + indexNameExpressionResolver.concreteIndexNames(context, "bar"); fail(); } catch (IndexNotFoundException e) { assertThat(e.getIndex().getName(), equalTo("bar")); } try { - indexNameExpressionResolver.concreteIndices(context, "foo", "bar"); + indexNameExpressionResolver.concreteIndexNames(context, "foo", "bar"); fail(); } catch (IndexNotFoundException e) { assertThat(e.getIndex().getName(), equalTo("bar")); } - results = indexNameExpressionResolver.concreteIndices(context, "barbaz", "foobar"); + results = indexNameExpressionResolver.concreteIndexNames(context, "barbaz", "foobar"); assertEquals(2, results.length); assertThat(results, arrayContainingInAnyOrder("foofoo", "foobar")); try { - indexNameExpressionResolver.concreteIndices(context, "barbaz", "bar"); + indexNameExpressionResolver.concreteIndexNames(context, "barbaz", "bar"); fail(); } catch (IndexNotFoundException e) { assertThat(e.getIndex().getName(), equalTo("bar")); } - results = indexNameExpressionResolver.concreteIndices(context, "baz*"); + results = indexNameExpressionResolver.concreteIndexNames(context, "baz*"); assertThat(results, emptyArray()); - results = indexNameExpressionResolver.concreteIndices(context, "foo", "baz*"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foo", "baz*"); assertEquals(1, results.length); assertEquals("foo", results[0]); } IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictExpandOpen()); - String[] results = indexNameExpressionResolver.concreteIndices(context, Strings.EMPTY_ARRAY); + String[] results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY); assertEquals(3, results.length); - results = indexNameExpressionResolver.concreteIndices(context, (String[])null); + results = indexNameExpressionResolver.concreteIndexNames(context, (String[])null); assertEquals(3, results.length); context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictExpand()); - results = indexNameExpressionResolver.concreteIndices(context, Strings.EMPTY_ARRAY); + results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY); assertEquals(4, results.length); - results = indexNameExpressionResolver.concreteIndices(context, (String[])null); + results = indexNameExpressionResolver.concreteIndexNames(context, (String[])null); assertEquals(4, results.length); context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictExpandOpen()); - results = indexNameExpressionResolver.concreteIndices(context, "foofoo*"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foofoo*"); assertEquals(3, results.length); assertThat(results, arrayContainingInAnyOrder("foo", "foobar", "foofoo")); context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictExpand()); - results = indexNameExpressionResolver.concreteIndices(context, "foofoo*"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foofoo*"); assertEquals(4, results.length); assertThat(results, arrayContainingInAnyOrder("foo", "foobar", "foofoo", "foofoo-closed")); } @@ -150,57 +150,57 @@ public class IndexNameExpressionResolverTests extends ESTestCase { IndicesOptions[] indicesOptions = new IndicesOptions[]{ IndicesOptions.lenientExpandOpen(), lenientExpand}; for (IndicesOptions options : indicesOptions) { IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, options); - String[] results = indexNameExpressionResolver.concreteIndices(context, "foo"); + String[] results = indexNameExpressionResolver.concreteIndexNames(context, "foo"); assertEquals(1, results.length); assertEquals("foo", results[0]); - results = indexNameExpressionResolver.concreteIndices(context, "bar"); + results = indexNameExpressionResolver.concreteIndexNames(context, "bar"); assertThat(results, emptyArray()); - results = indexNameExpressionResolver.concreteIndices(context, "foofoo", "foobar"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foofoo", "foobar"); assertEquals(2, results.length); assertThat(results, arrayContainingInAnyOrder("foofoo", "foobar")); - results = indexNameExpressionResolver.concreteIndices(context, "foofoobar"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foofoobar"); assertEquals(2, results.length); assertEquals(new HashSet<>(Arrays.asList("foo", "foobar")), new HashSet<>(Arrays.asList(results))); - results = indexNameExpressionResolver.concreteIndices(context, "foo", "bar"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foo", "bar"); assertEquals(1, results.length); assertThat(results, arrayContainingInAnyOrder("foo")); - results = indexNameExpressionResolver.concreteIndices(context, "barbaz", "foobar"); + results = indexNameExpressionResolver.concreteIndexNames(context, "barbaz", "foobar"); assertEquals(2, results.length); assertThat(results, arrayContainingInAnyOrder("foofoo", "foobar")); - results = indexNameExpressionResolver.concreteIndices(context, "barbaz", "bar"); + results = indexNameExpressionResolver.concreteIndexNames(context, "barbaz", "bar"); assertEquals(1, results.length); assertThat(results, arrayContainingInAnyOrder("foofoo")); - results = indexNameExpressionResolver.concreteIndices(context, "baz*"); + results = indexNameExpressionResolver.concreteIndexNames(context, "baz*"); assertThat(results, emptyArray()); - results = indexNameExpressionResolver.concreteIndices(context, "foo", "baz*"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foo", "baz*"); assertEquals(1, results.length); assertEquals("foo", results[0]); } IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.lenientExpandOpen()); - String[] results = indexNameExpressionResolver.concreteIndices(context, Strings.EMPTY_ARRAY); + String[] results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY); assertEquals(3, results.length); context = new IndexNameExpressionResolver.Context(state, lenientExpand); - results = indexNameExpressionResolver.concreteIndices(context, Strings.EMPTY_ARRAY); + results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY); assertEquals(Arrays.toString(results), 4, results.length); context = new IndexNameExpressionResolver.Context(state, IndicesOptions.lenientExpandOpen()); - results = indexNameExpressionResolver.concreteIndices(context, "foofoo*"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foofoo*"); assertEquals(3, results.length); assertThat(results, arrayContainingInAnyOrder("foo", "foobar", "foofoo")); context = new IndexNameExpressionResolver.Context(state, lenientExpand); - results = indexNameExpressionResolver.concreteIndices(context, "foofoo*"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foofoo*"); assertEquals(4, results.length); assertThat(results, arrayContainingInAnyOrder("foo", "foobar", "foofoo", "foofoo-closed")); } @@ -219,26 +219,26 @@ public class IndexNameExpressionResolverTests extends ESTestCase { for (IndicesOptions options : indicesOptions) { IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, options); - String[] results = indexNameExpressionResolver.concreteIndices(context, "foo"); + String[] results = indexNameExpressionResolver.concreteIndexNames(context, "foo"); assertEquals(1, results.length); assertEquals("foo", results[0]); try { - indexNameExpressionResolver.concreteIndices(context, "bar"); + indexNameExpressionResolver.concreteIndexNames(context, "bar"); fail(); } catch(IndexNotFoundException e) { assertThat(e.getIndex().getName(), equalTo("bar")); } try { - indexNameExpressionResolver.concreteIndices(context, "baz*"); + indexNameExpressionResolver.concreteIndexNames(context, "baz*"); fail(); } catch (IndexNotFoundException e) { assertThat(e.getIndex().getName(), equalTo("baz*")); } try { - indexNameExpressionResolver.concreteIndices(context, "foo", "baz*"); + indexNameExpressionResolver.concreteIndexNames(context, "foo", "baz*"); fail(); } catch (IndexNotFoundException e) { assertThat(e.getIndex().getName(), equalTo("baz*")); @@ -246,11 +246,11 @@ public class IndexNameExpressionResolverTests extends ESTestCase { } IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, expandOpen); - String[] results = indexNameExpressionResolver.concreteIndices(context, Strings.EMPTY_ARRAY); + String[] results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY); assertEquals(3, results.length); context = new IndexNameExpressionResolver.Context(state, expand); - results = indexNameExpressionResolver.concreteIndices(context, Strings.EMPTY_ARRAY); + results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY); assertEquals(4, results.length); } @@ -264,60 +264,60 @@ public class IndexNameExpressionResolverTests extends ESTestCase { // Only closed IndicesOptions options = IndicesOptions.fromOptions(false, true, false, true); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, options); - String[] results = indexNameExpressionResolver.concreteIndices(context, Strings.EMPTY_ARRAY); + String[] results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY); assertEquals(1, results.length); assertEquals("foo", results[0]); - results = indexNameExpressionResolver.concreteIndices(context, "foo*"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foo*"); assertEquals(1, results.length); assertEquals("foo", results[0]); // no wildcards, so wildcard expansion don't apply - results = indexNameExpressionResolver.concreteIndices(context, "bar"); + results = indexNameExpressionResolver.concreteIndexNames(context, "bar"); assertEquals(1, results.length); assertEquals("bar", results[0]); // Only open options = IndicesOptions.fromOptions(false, true, true, false); context = new IndexNameExpressionResolver.Context(state, options); - results = indexNameExpressionResolver.concreteIndices(context, Strings.EMPTY_ARRAY); + results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY); assertEquals(2, results.length); assertThat(results, arrayContainingInAnyOrder("bar", "foobar")); - results = indexNameExpressionResolver.concreteIndices(context, "foo*"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foo*"); assertEquals(1, results.length); assertEquals("foobar", results[0]); - results = indexNameExpressionResolver.concreteIndices(context, "bar"); + results = indexNameExpressionResolver.concreteIndexNames(context, "bar"); assertEquals(1, results.length); assertEquals("bar", results[0]); // Open and closed options = IndicesOptions.fromOptions(false, true, true, true); context = new IndexNameExpressionResolver.Context(state, options); - results = indexNameExpressionResolver.concreteIndices(context, Strings.EMPTY_ARRAY); + results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY); assertEquals(3, results.length); assertThat(results, arrayContainingInAnyOrder("bar", "foobar", "foo")); - results = indexNameExpressionResolver.concreteIndices(context, "foo*"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foo*"); assertEquals(2, results.length); assertThat(results, arrayContainingInAnyOrder("foobar", "foo")); - results = indexNameExpressionResolver.concreteIndices(context, "bar"); + results = indexNameExpressionResolver.concreteIndexNames(context, "bar"); assertEquals(1, results.length); assertEquals("bar", results[0]); - results = indexNameExpressionResolver.concreteIndices(context, "-foo*"); + results = indexNameExpressionResolver.concreteIndexNames(context, "-foo*"); assertEquals(1, results.length); assertEquals("bar", results[0]); - results = indexNameExpressionResolver.concreteIndices(context, "-*"); + results = indexNameExpressionResolver.concreteIndexNames(context, "-*"); assertEquals(0, results.length); options = IndicesOptions.fromOptions(false, false, true, true); context = new IndexNameExpressionResolver.Context(state, options); try { - indexNameExpressionResolver.concreteIndices(context, "-*"); + indexNameExpressionResolver.concreteIndexNames(context, "-*"); fail(); } catch (IndexNotFoundException e) { assertThat(e.getResourceId().toString(), equalTo("[-*]")); @@ -336,21 +336,21 @@ public class IndexNameExpressionResolverTests extends ESTestCase { { IndicesOptions noExpandLenient = IndicesOptions.fromOptions(true, true, false, false); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, noExpandLenient); - String[] results = indexNameExpressionResolver.concreteIndices(context, "baz*"); + String[] results = indexNameExpressionResolver.concreteIndexNames(context, "baz*"); assertThat(results, emptyArray()); - results = indexNameExpressionResolver.concreteIndices(context, "foo", "baz*"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foo", "baz*"); assertEquals(1, results.length); assertEquals("foo", results[0]); - results = indexNameExpressionResolver.concreteIndices(context, "foofoobar"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foofoobar"); assertEquals(2, results.length); assertThat(results, arrayContainingInAnyOrder("foo", "foobar")); - results = indexNameExpressionResolver.concreteIndices(context, (String[])null); + results = indexNameExpressionResolver.concreteIndexNames(context, (String[])null); assertEquals(0, results.length); - results = indexNameExpressionResolver.concreteIndices(context, Strings.EMPTY_ARRAY); + results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY); assertEquals(0, results.length); } @@ -359,17 +359,17 @@ public class IndexNameExpressionResolverTests extends ESTestCase { IndicesOptions noExpandDisallowEmpty = IndicesOptions.fromOptions(true, false, false, false); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, noExpandDisallowEmpty); try { - indexNameExpressionResolver.concreteIndices(context, "baz*"); + indexNameExpressionResolver.concreteIndexNames(context, "baz*"); fail(); } catch (IndexNotFoundException e) { assertThat(e.getIndex().getName(), equalTo("baz*")); } - String[] results = indexNameExpressionResolver.concreteIndices(context, "foo", "baz*"); + String[] results = indexNameExpressionResolver.concreteIndexNames(context, "foo", "baz*"); assertEquals(1, results.length); assertEquals("foo", results[0]); - results = indexNameExpressionResolver.concreteIndices(context, "foofoobar"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foofoobar"); assertEquals(2, results.length); assertThat(results, arrayContainingInAnyOrder("foo", "foobar")); } @@ -378,17 +378,17 @@ public class IndexNameExpressionResolverTests extends ESTestCase { { IndicesOptions noExpandErrorUnavailable = IndicesOptions.fromOptions(false, true, false, false); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, noExpandErrorUnavailable); - String[] results = indexNameExpressionResolver.concreteIndices(context, "baz*"); + String[] results = indexNameExpressionResolver.concreteIndexNames(context, "baz*"); assertThat(results, emptyArray()); try { - indexNameExpressionResolver.concreteIndices(context, "foo", "baz*"); + indexNameExpressionResolver.concreteIndexNames(context, "foo", "baz*"); fail(); } catch (IndexNotFoundException e) { assertThat(e.getIndex().getName(), equalTo("baz*")); } - results = indexNameExpressionResolver.concreteIndices(context, "foofoobar"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foofoobar"); assertEquals(2, results.length); assertThat(results, arrayContainingInAnyOrder("foo", "foobar")); } @@ -398,20 +398,20 @@ public class IndexNameExpressionResolverTests extends ESTestCase { IndicesOptions noExpandStrict = IndicesOptions.fromOptions(false, false, false, false); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, noExpandStrict); try { - indexNameExpressionResolver.concreteIndices(context, "baz*"); + indexNameExpressionResolver.concreteIndexNames(context, "baz*"); fail(); } catch (IndexNotFoundException e) { assertThat(e.getIndex().getName(), equalTo("baz*")); } try { - indexNameExpressionResolver.concreteIndices(context, "foo", "baz*"); + indexNameExpressionResolver.concreteIndexNames(context, "foo", "baz*"); fail(); } catch (IndexNotFoundException e) { assertThat(e.getIndex().getName(), equalTo("baz*")); } - String[] results = indexNameExpressionResolver.concreteIndices(context, "foofoobar"); + String[] results = indexNameExpressionResolver.concreteIndexNames(context, "foofoobar"); assertEquals(2, results.length); assertThat(results, arrayContainingInAnyOrder("foo", "foobar")); } @@ -429,7 +429,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { try { IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictSingleIndexNoExpandForbidClosed()); - indexNameExpressionResolver.concreteIndices(context, "baz*"); + indexNameExpressionResolver.concreteIndexNames(context, "baz*"); fail(); } catch (IndexNotFoundException e) { assertThat(e.getIndex().getName(), equalTo("baz*")); @@ -437,7 +437,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { try { IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictSingleIndexNoExpandForbidClosed()); - indexNameExpressionResolver.concreteIndices(context, "foo", "baz*"); + indexNameExpressionResolver.concreteIndexNames(context, "foo", "baz*"); fail(); } catch (IndexNotFoundException e) { assertThat(e.getIndex().getName(), equalTo("baz*")); @@ -445,7 +445,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { try { IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictSingleIndexNoExpandForbidClosed()); - indexNameExpressionResolver.concreteIndices(context, "foofoobar"); + indexNameExpressionResolver.concreteIndexNames(context, "foofoobar"); fail(); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), containsString("Alias [foofoobar] has more than one indices associated with it")); @@ -453,7 +453,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { try { IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictSingleIndexNoExpandForbidClosed()); - indexNameExpressionResolver.concreteIndices(context, "foo", "foofoobar"); + indexNameExpressionResolver.concreteIndexNames(context, "foo", "foofoobar"); fail(); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), containsString("Alias [foofoobar] has more than one indices associated with it")); @@ -461,7 +461,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { try { IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictSingleIndexNoExpandForbidClosed()); - indexNameExpressionResolver.concreteIndices(context, "foofoo-closed", "foofoobar"); + indexNameExpressionResolver.concreteIndexNames(context, "foofoo-closed", "foofoobar"); fail(); } catch(IndexClosedException e) { assertThat(e.getMessage(), equalTo("closed")); @@ -469,7 +469,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { } IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictSingleIndexNoExpandForbidClosed()); - String[] results = indexNameExpressionResolver.concreteIndices(context, "foo", "barbaz"); + String[] results = indexNameExpressionResolver.concreteIndexNames(context, "foo", "barbaz"); assertEquals(2, results.length); assertThat(results, arrayContainingInAnyOrder("foo", "foofoo")); } @@ -479,18 +479,18 @@ public class IndexNameExpressionResolverTests extends ESTestCase { IndicesOptions options = IndicesOptions.strictExpandOpen(); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, options); - String[] results = indexNameExpressionResolver.concreteIndices(context, Strings.EMPTY_ARRAY); + String[] results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY); assertThat(results, emptyArray()); try { - indexNameExpressionResolver.concreteIndices(context, "foo"); + indexNameExpressionResolver.concreteIndexNames(context, "foo"); fail(); } catch (IndexNotFoundException e) { assertThat(e.getIndex().getName(), equalTo("foo")); } - results = indexNameExpressionResolver.concreteIndices(context, "foo*"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foo*"); assertThat(results, emptyArray()); try { - indexNameExpressionResolver.concreteIndices(context, "foo*", "bar"); + indexNameExpressionResolver.concreteIndexNames(context, "foo*", "bar"); fail(); } catch (IndexNotFoundException e) { assertThat(e.getIndex().getName(), equalTo("bar")); @@ -498,18 +498,18 @@ public class IndexNameExpressionResolverTests extends ESTestCase { context = new IndexNameExpressionResolver.Context(state, IndicesOptions.lenientExpandOpen()); - results = indexNameExpressionResolver.concreteIndices(context, Strings.EMPTY_ARRAY); + results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY); assertThat(results, emptyArray()); - results = indexNameExpressionResolver.concreteIndices(context, "foo"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foo"); assertThat(results, emptyArray()); - results = indexNameExpressionResolver.concreteIndices(context, "foo*"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foo*"); assertThat(results, emptyArray()); - results = indexNameExpressionResolver.concreteIndices(context, "foo*", "bar"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foo*", "bar"); assertThat(results, emptyArray()); context = new IndexNameExpressionResolver.Context(state, IndicesOptions.fromOptions(true, false, true, false)); try { - indexNameExpressionResolver.concreteIndices(context, Strings.EMPTY_ARRAY); + indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY); } catch (IndexNotFoundException e) { assertThat(e.getResourceId().toString(), equalTo("[_all]")); } @@ -527,7 +527,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictExpandOpen()); try { - indexNameExpressionResolver.concreteIndices(context, "testZZZ"); + indexNameExpressionResolver.concreteIndexNames(context, "testZZZ"); fail("Expected IndexNotFoundException"); } catch(IndexNotFoundException e) { assertThat(e.getMessage(), is("no such index")); @@ -541,7 +541,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.lenientExpandOpen()); - assertThat(newHashSet(indexNameExpressionResolver.concreteIndices(context, "testXXX", "testZZZ")), equalTo(newHashSet("testXXX"))); + assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "testXXX", "testZZZ")), equalTo(newHashSet("testXXX"))); } public void testConcreteIndicesIgnoreIndicesAllMissing() { @@ -552,7 +552,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictExpandOpen()); try { - indexNameExpressionResolver.concreteIndices(context, "testMo", "testMahdy"); + indexNameExpressionResolver.concreteIndexNames(context, "testMo", "testMahdy"); fail("Expected IndexNotFoundException"); } catch(IndexNotFoundException e) { assertThat(e.getMessage(), is("no such index")); @@ -565,7 +565,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { .put(indexBuilder("kuku")); ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.lenientExpandOpen()); - assertThat(newHashSet(indexNameExpressionResolver.concreteIndices(context, new String[]{})), equalTo(newHashSet("kuku", "testXXX"))); + assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context, new String[]{})), equalTo(newHashSet("kuku", "testXXX"))); } public void testConcreteIndicesWildcardExpansion() { @@ -578,13 +578,13 @@ public class IndexNameExpressionResolverTests extends ESTestCase { ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.fromOptions(true, true, false, false)); - assertThat(newHashSet(indexNameExpressionResolver.concreteIndices(context, "testX*")), equalTo(new HashSet())); + assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "testX*")), equalTo(new HashSet())); context = new IndexNameExpressionResolver.Context(state, IndicesOptions.fromOptions(true, true, true, false)); - assertThat(newHashSet(indexNameExpressionResolver.concreteIndices(context, "testX*")), equalTo(newHashSet("testXXX", "testXXY"))); + assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "testX*")), equalTo(newHashSet("testXXX", "testXXY"))); context = new IndexNameExpressionResolver.Context(state, IndicesOptions.fromOptions(true, true, false, true)); - assertThat(newHashSet(indexNameExpressionResolver.concreteIndices(context, "testX*")), equalTo(newHashSet("testXYY"))); + assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "testX*")), equalTo(newHashSet("testXYY"))); context = new IndexNameExpressionResolver.Context(state, IndicesOptions.fromOptions(true, true, true, true)); - assertThat(newHashSet(indexNameExpressionResolver.concreteIndices(context, "testX*")), equalTo(newHashSet("testXXX", "testXXY", "testXYY"))); + assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "testX*")), equalTo(newHashSet("testXXX", "testXXY", "testXYY"))); } /** @@ -610,7 +610,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { // with no indices, asking for all indices should return empty list or exception, depending on indices options if (indicesOptions.allowNoIndices()) { - String[] concreteIndices = indexNameExpressionResolver.concreteIndices(context, allIndices); + String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(context, allIndices); assertThat(concreteIndices, notNullValue()); assertThat(concreteIndices.length, equalTo(0)); } else { @@ -625,7 +625,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); context = new IndexNameExpressionResolver.Context(state, indicesOptions); if (indicesOptions.expandWildcardsOpen() || indicesOptions.expandWildcardsClosed() || indicesOptions.allowNoIndices()) { - String[] concreteIndices = indexNameExpressionResolver.concreteIndices(context, allIndices); + String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(context, allIndices); assertThat(concreteIndices, notNullValue()); int expectedNumberOfIndices = 0; if (indicesOptions.expandWildcardsOpen()) { @@ -646,7 +646,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { */ private void checkCorrectException(IndexNameExpressionResolver indexNameExpressionResolver, IndexNameExpressionResolver.Context context, String[] allIndices) { try { - indexNameExpressionResolver.concreteIndices(context, allIndices); + indexNameExpressionResolver.concreteIndexNames(context, allIndices); fail("wildcard expansion on should trigger IndexMissingException"); } catch (IndexNotFoundException e) { // expected @@ -668,12 +668,12 @@ public class IndexNameExpressionResolverTests extends ESTestCase { // asking for non existing wildcard pattern should return empty list or exception if (indicesOptions.allowNoIndices()) { - String[] concreteIndices = indexNameExpressionResolver.concreteIndices(context, "Foo*"); + String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(context, "Foo*"); assertThat(concreteIndices, notNullValue()); assertThat(concreteIndices.length, equalTo(0)); } else { try { - indexNameExpressionResolver.concreteIndices(context, "Foo*"); + indexNameExpressionResolver.concreteIndexNames(context, "Foo*"); fail("expecting exception when result empty and allowNoIndicec=false"); } catch (IndexNotFoundException e) { // expected exception @@ -798,51 +798,51 @@ public class IndexNameExpressionResolverTests extends ESTestCase { IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictExpandOpenAndForbidClosed()); try { - indexNameExpressionResolver.concreteIndices(context, "foo1-closed"); + indexNameExpressionResolver.concreteIndexNames(context, "foo1-closed"); fail("foo1-closed should be closed, but it is open"); } catch (IndexClosedException e) { // expected } try { - indexNameExpressionResolver.concreteIndices(context, "foobar1-closed"); + indexNameExpressionResolver.concreteIndexNames(context, "foobar1-closed"); fail("foo1-closed should be closed, but it is open"); } catch (IndexClosedException e) { // expected } context = new IndexNameExpressionResolver.Context(state, IndicesOptions.fromOptions(true, context.getOptions().allowNoIndices(), context.getOptions().expandWildcardsOpen(), context.getOptions().expandWildcardsClosed(), context.getOptions())); - String[] results = indexNameExpressionResolver.concreteIndices(context, "foo1-closed"); + String[] results = indexNameExpressionResolver.concreteIndexNames(context, "foo1-closed"); assertThat(results, emptyArray()); - results = indexNameExpressionResolver.concreteIndices(context, "foobar1-closed"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foobar1-closed"); assertThat(results, emptyArray()); context = new IndexNameExpressionResolver.Context(state, IndicesOptions.lenientExpandOpen()); - results = indexNameExpressionResolver.concreteIndices(context, "foo1-closed"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foo1-closed"); assertThat(results, arrayWithSize(1)); assertThat(results, arrayContaining("foo1-closed")); - results = indexNameExpressionResolver.concreteIndices(context, "foobar1-closed"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foobar1-closed"); assertThat(results, arrayWithSize(1)); assertThat(results, arrayContaining("foo1-closed")); // testing an alias pointing to three indices: context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictExpandOpenAndForbidClosed()); try { - indexNameExpressionResolver.concreteIndices(context, "foobar2-closed"); + indexNameExpressionResolver.concreteIndexNames(context, "foobar2-closed"); fail("foo2-closed should be closed, but it is open"); } catch (IndexClosedException e) { // expected } context = new IndexNameExpressionResolver.Context(state, IndicesOptions.fromOptions(true, context.getOptions().allowNoIndices(), context.getOptions().expandWildcardsOpen(), context.getOptions().expandWildcardsClosed(), context.getOptions())); - results = indexNameExpressionResolver.concreteIndices(context, "foobar2-closed"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foobar2-closed"); assertThat(results, arrayWithSize(1)); assertThat(results, arrayContaining("foo3")); context = new IndexNameExpressionResolver.Context(state, IndicesOptions.lenientExpandOpen()); - results = indexNameExpressionResolver.concreteIndices(context, "foobar2-closed"); + results = indexNameExpressionResolver.concreteIndexNames(context, "foobar2-closed"); assertThat(results, arrayWithSize(3)); assertThat(results, arrayContainingInAnyOrder("foo1-closed", "foo2-closed", "foo3")); } @@ -855,7 +855,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { IndicesOptions.lenientExpandOpen(), IndicesOptions.strictExpandOpenAndForbidClosed()}; for (IndicesOptions options : indicesOptions) { IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, options); - String[] results = indexNameExpressionResolver.concreteIndices(context, "index1", "index1", "alias1"); + String[] results = indexNameExpressionResolver.concreteIndexNames(context, "index1", "index1", "alias1"); assertThat(results, equalTo(new String[]{"index1"})); } } @@ -875,11 +875,11 @@ public class IndexNameExpressionResolverTests extends ESTestCase { ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.lenientExpandOpen()); - String[] strings = indexNameExpressionResolver.concreteIndices(context, "alias-*"); + String[] strings = indexNameExpressionResolver.concreteIndexNames(context, "alias-*"); assertArrayEquals(new String[] {"test-0"}, strings); context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictExpandOpen()); - strings = indexNameExpressionResolver.concreteIndices(context, "alias-*"); + strings = indexNameExpressionResolver.concreteIndexNames(context, "alias-*"); assertArrayEquals(new String[] {"test-0"}, strings); } diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java index 422aea701348..4be0cf15afc0 100644 --- a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; +import org.elasticsearch.index.Index; import org.elasticsearch.test.ESAllocationTestCase; import java.util.HashMap; @@ -172,12 +173,12 @@ public class GatewayMetaStateTests extends ESAllocationTestCase { boolean stateInMemory, boolean expectMetaData) throws Exception { MetaData inMemoryMetaData = null; - Set oldIndicesList = emptySet(); + Set oldIndicesList = emptySet(); if (stateInMemory) { inMemoryMetaData = event.previousState().metaData(); oldIndicesList = GatewayMetaState.getRelevantIndices(event.previousState(), event.previousState(), oldIndicesList); } - Set newIndicesList = GatewayMetaState.getRelevantIndices(event.state(),event.previousState(), oldIndicesList); + Set newIndicesList = GatewayMetaState.getRelevantIndices(event.state(),event.previousState(), oldIndicesList); // third, get the actual write info Iterator indices = GatewayMetaState.resolveStatesToBeWritten(oldIndicesList, newIndicesList, inMemoryMetaData, event.state().metaData()).iterator(); diff --git a/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java b/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java index 6da00d822a22..bb5a6ff748ef 100644 --- a/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; @@ -37,7 +38,7 @@ import java.util.Map; public class PriorityComparatorTests extends ESTestCase { public void testPreferNewIndices() { - RoutingNodes.UnassignedShards shards = new RoutingNodes.UnassignedShards((RoutingNodes) null); + RoutingNodes.UnassignedShards shards = new RoutingNodes.UnassignedShards(null); List shardRoutings = Arrays.asList(TestShardRouting.newShardRouting("oldest", 0, null, null, null, randomBoolean(), ShardRoutingState.UNASSIGNED, new UnassignedInfo(randomFrom(UnassignedInfo.Reason.values()), "foobar")), TestShardRouting.newShardRouting("newest", 0, null, null, null, randomBoolean(), ShardRoutingState.UNASSIGNED, new UnassignedInfo(randomFrom(UnassignedInfo.Reason.values()), "foobar"))); @@ -47,11 +48,11 @@ public class PriorityComparatorTests extends ESTestCase { } shards.sort(new PriorityComparator() { @Override - protected Settings getIndexSettings(String index) { - if ("oldest".equals(index)) { + protected Settings getIndexSettings(Index index) { + if ("oldest".equals(index.getName())) { return Settings.builder().put(IndexMetaData.SETTING_CREATION_DATE, 10) .put(IndexMetaData.SETTING_PRIORITY, 1).build(); - } else if ("newest".equals(index)) { + } else if ("newest".equals(index.getName())) { return Settings.builder().put(IndexMetaData.SETTING_CREATION_DATE, 100) .put(IndexMetaData.SETTING_PRIORITY, 1).build(); } @@ -77,11 +78,11 @@ public class PriorityComparatorTests extends ESTestCase { } shards.sort(new PriorityComparator() { @Override - protected Settings getIndexSettings(String index) { - if ("oldest".equals(index)) { + protected Settings getIndexSettings(Index index) { + if ("oldest".equals(index.getName())) { return Settings.builder().put(IndexMetaData.SETTING_CREATION_DATE, 10) .put(IndexMetaData.SETTING_PRIORITY, 100).build(); - } else if ("newest".equals(index)) { + } else if ("newest".equals(index.getName())) { return Settings.builder().put(IndexMetaData.SETTING_CREATION_DATE, 100) .put(IndexMetaData.SETTING_PRIORITY, 1).build(); } @@ -118,8 +119,8 @@ public class PriorityComparatorTests extends ESTestCase { } shards.sort(new PriorityComparator() { @Override - protected Settings getIndexSettings(String index) { - IndexMeta indexMeta = map.get(index); + protected Settings getIndexSettings(Index index) { + IndexMeta indexMeta = map.get(index.getName()); return indexMeta.settings; } }); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index d5efd6dcfc36..0a8d75d42f0d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -182,7 +182,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { IndexRequest request = new IndexRequest("test", "type", "1").source(doc); request.process(metaData, mappingMetaData, true, "test"); assertThat(request.timestamp(), notNullValue()); - assertThat(request.timestamp(), is(MappingMetaData.Timestamp.parseStringTimestamp("1970-01-01", Joda.forPattern("YYYY-MM-dd"), Version.CURRENT))); + assertThat(request.timestamp(), is(MappingMetaData.Timestamp.parseStringTimestamp("1970-01-01", Joda.forPattern("YYYY-MM-dd")))); } // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index aaac9c0cca4b..234311984b90 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -65,6 +65,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitC import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.startsWith; public class IndicesOptionsIntegrationIT extends ESIntegTestCase { @@ -685,7 +686,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { try { verify(client().admin().indices().prepareUpdateSettings("barbaz").setSettings(Settings.builder().put("e", "f")), false); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Can't update non dynamic settings[[index.e]] for open indices [[barbaz]]")); + assertThat(e.getMessage(), startsWith("Can't update non dynamic settings [[index.e]] for open indices [[barbaz")); } verify(client().admin().indices().prepareUpdateSettings("baz*").setSettings(Settings.builder().put("a", "b")), true); } diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index d85849570cf1..642d646fe9e5 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -137,7 +137,8 @@ public class UpdateSettingsIT extends ESIntegTestCase { .execute().actionGet(); fail("can't change number of replicas on a closed index"); } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "Can't update [index.number_of_replicas] on closed indices [[test]] - can leave index in an unopenable state"); + assertTrue(ex.getMessage(), ex.getMessage().startsWith("Can't update [index.number_of_replicas] on closed indices [[test/")); + assertTrue(ex.getMessage(), ex.getMessage().endsWith("]] - can leave index in an unopenable state")); // expected } client().admin().indices().prepareUpdateSettings("test") diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 5dc6d59692bc..5a8d1daf5884 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -1880,7 +1880,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas client.admin().indices().prepareDelete("test-idx-1").get(); fail("Expected deleting index to fail during snapshot"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("Cannot delete indices that are being snapshotted: [test-idx-1]")); + assertThat(e.getMessage(), containsString("Cannot delete indices that are being snapshotted: [[test-idx-1/")); } } else { try { @@ -1888,7 +1888,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas client.admin().indices().prepareClose("test-idx-1").get(); fail("Expected closing index to fail during snapshot"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("Cannot close indices that are being snapshotted: [test-idx-1]")); + assertThat(e.getMessage(), containsString("Cannot close indices that are being snapshotted: [[test-idx-1/")); } } } @@ -1964,9 +1964,10 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas client.admin().indices().prepareClose("test-idx-1").get(); fail("Expected closing index to fail during restore"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("Cannot close indices that are being restored: [test-idx-1]")); + assertThat(e.getMessage(), containsString("Cannot close indices that are being restored: [[test-idx-1/")); } } finally { + // unblock even if the try block fails otherwise we will get bogus failures when we delete all indices in test teardown. logger.info("--> unblocking all data nodes"); unblockAllDataNodes("test-repo"); } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java index 60b9460bb12e..80274807b3b0 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java @@ -370,7 +370,7 @@ public class IndicesRequestTests extends ESIntegTestCase { internalCluster().clientNodeClient().admin().indices().flush(flushRequest).actionGet(); clearInterceptedActions(); - String[] indices = new IndexNameExpressionResolver(Settings.EMPTY).concreteIndices(client().admin().cluster().prepareState().get().getState(), flushRequest); + String[] indices = new IndexNameExpressionResolver(Settings.EMPTY).concreteIndexNames(client().admin().cluster().prepareState().get().getState(), flushRequest); assertIndicesSubset(Arrays.asList(indices), indexShardActions); } @@ -393,7 +393,7 @@ public class IndicesRequestTests extends ESIntegTestCase { internalCluster().clientNodeClient().admin().indices().refresh(refreshRequest).actionGet(); clearInterceptedActions(); - String[] indices = new IndexNameExpressionResolver(Settings.EMPTY).concreteIndices(client().admin().cluster().prepareState().get().getState(), refreshRequest); + String[] indices = new IndexNameExpressionResolver(Settings.EMPTY).concreteIndexNames(client().admin().cluster().prepareState().get().getState(), refreshRequest); assertIndicesSubset(Arrays.asList(indices), indexShardActions); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index dbe464e98b45..87fa15cd9ac7 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -96,9 +96,9 @@ public class TransportReindexAction extends HandledTransportAction Date: Mon, 14 Mar 2016 12:24:58 +0100 Subject: [PATCH 215/320] Updated link to Joda time zones --- .../aggregations/bucket/daterange-aggregation.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc b/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc index e649928810b9..a69b2b3cb113 100644 --- a/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc @@ -118,7 +118,7 @@ Any characters in the pattern that are not in the ranges of ['a'..'z'] and ['A'. Dates can be converted from another time zone to UTC by specifying the `time_zone` parameter. Time zones may either be specified as an ISO 8601 UTC offset (e.g. +01:00 or -08:00) or as one of -the the http://joda-time.sourceforge.net/timezones.html[time zone ids] from the TZ database. +the http://www.joda.org/joda-time/timezones.html[time zone ids] from the TZ database. The `time_zone` parameter is also applied to rounding in date math expressions. As an example, to round to the beginning of the day in the CET time zone, you can do the following: From 5bff6e421843dffe9a2a1a856a257398cc3a858e Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Mon, 7 Dec 2015 17:16:55 +0100 Subject: [PATCH 216/320] Refactor FieldSortBuilder * adds json parsing, * refactors json serialisation, * adds writable parsing and serialisation, * adds json and writable roundtrip test --- .../elasticsearch/common/unit/Fuzziness.java | 1 - .../search/sort/FieldSortBuilder.java | 229 +++++++++++++++++- .../search/sort/ParameterParser.java | 39 +++ .../AbstractSearchSourceItemTestCase.java | 162 +++++++++++++ .../search/sort/FieldSortBuilderTests.java | 90 +++++++ 5 files changed, 514 insertions(+), 7 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/search/sort/ParameterParser.java create mode 100644 core/src/test/java/org/elasticsearch/search/sort/AbstractSearchSourceItemTestCase.java create mode 100644 core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java diff --git a/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java b/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java index 24a727691ccc..831fbe505bb0 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java +++ b/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java @@ -67,7 +67,6 @@ public final class Fuzziness implements ToXContent, Writeable { /** * Creates a {@link Fuzziness} instance from an edit distance. The value must be one of [0, 1, 2] - * * Note: Using this method only makes sense if the field you are applying Fuzziness to is some sort of string. */ public static Fuzziness fromEdits(int edits) { diff --git a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index e805e21eff53..1d72dd983574 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -19,16 +19,36 @@ package org.elasticsearch.search.sort; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; import java.io.IOException; +import java.util.Objects; /** * A sort builder to sort based on a document field. */ public class FieldSortBuilder extends SortBuilder { + static final FieldSortBuilder PROTOTYPE = new FieldSortBuilder(""); + public static final String NAME = "field_sort"; + public static final ParseField NESTED_PATH = new ParseField("nested_path"); + public static final ParseField NESTED_FILTER = new ParseField("nested_filter"); + public static final ParseField MISSING = new ParseField("missing"); + public static final ParseField ORDER = new ParseField("order"); + public static final ParseField SORT_MODE = new ParseField("mode"); + public static final ParseField UNMAPPED_TYPE = new ParseField("unmapped_type"); + private final String fieldName; private Object missing; @@ -41,6 +61,16 @@ public class FieldSortBuilder extends SortBuilder { private String nestedPath; + /** Copy constructor. */ + public FieldSortBuilder(FieldSortBuilder template) { + this(template.fieldName); + this.order(template.order()); + this.missing(template.missing()); + this.unmappedType(template.unmappedType()); + this.sortMode(template.sortMode()); + this.setNestedFilter(template.getNestedFilter()); + this.setNestedPath(template.getNestedPath()); + } /** * Constructs a new sort based on a document field. * @@ -52,16 +82,30 @@ public class FieldSortBuilder extends SortBuilder { } this.fieldName = fieldName; } + + /** Returns the document field this sort should be based on. */ + public String getFieldName() { + return this.fieldName; + } /** * Sets the value when a field is missing in a doc. Can also be set to _last or * _first to sort missing last or first respectively. */ public FieldSortBuilder missing(Object missing) { - this.missing = missing; + if (missing instanceof String) { + this.missing = BytesRefs.toBytesRef(missing); + } else { + this.missing = missing; + } return this; } + /** Returns the value used when a field is missing in a doc. */ + public Object missing() { + return this.missing; + } + /** * Set the type to use in case the current field is not mapped in an index. * Specifying a type tells Elasticsearch what type the sort values should have, which is important @@ -74,9 +118,16 @@ public class FieldSortBuilder extends SortBuilder { return this; } + /** Returns the type to use in case the current field is not mapped in an index. */ + public String unmappedType() { + return this.unmappedType; + } + /** * Defines what values to pick in the case a document contains multiple values for the targeted sort field. * Possible values: min, max, sum and avg + * + * TODO would love to see an enum here *

      * The last two values are only applicable for number based fields. */ @@ -85,15 +136,26 @@ public class FieldSortBuilder extends SortBuilder { return this; } + /** Returns what values to pick in the case a document contains multiple values for the targeted sort field. */ + public String sortMode() { + return this.sortMode; + } /** * Sets the nested filter that the nested objects should match with in order to be taken into account * for sorting. + * + * TODO should the above getters and setters be deprecated/ changed in favour of real getters and setters? */ public FieldSortBuilder setNestedFilter(QueryBuilder nestedFilter) { this.nestedFilter = nestedFilter; return this; } + /** Returns the nested filter that the nested objects should match with in order to be taken into account + * for sorting. */ + public QueryBuilder getNestedFilter() { + return this.nestedFilter; + } /** * Sets the nested path if sorting occurs on a field that is inside a nested object. By default when sorting on a @@ -104,26 +166,181 @@ public class FieldSortBuilder extends SortBuilder { return this; } + /** Returns the nested path if sorting occurs in a field that is inside a nested object. */ + public String getNestedPath() { + return this.nestedPath; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(fieldName); builder.field(ORDER_FIELD.getPreferredName(), order); if (missing != null) { - builder.field("missing", missing); + if (missing instanceof BytesRef) { + builder.field(MISSING.getPreferredName(), ((BytesRef) missing).utf8ToString()); + } else { + builder.field(MISSING.getPreferredName(), missing); + } } if (unmappedType != null) { - builder.field(SortParseElement.UNMAPPED_TYPE.getPreferredName(), unmappedType); + builder.field(UNMAPPED_TYPE.getPreferredName(), unmappedType); } if (sortMode != null) { - builder.field("mode", sortMode); + builder.field(SORT_MODE.getPreferredName(), sortMode); } if (nestedFilter != null) { - builder.field("nested_filter", nestedFilter, params); + builder.field(NESTED_FILTER.getPreferredName(), nestedFilter, params); } if (nestedPath != null) { - builder.field("nested_path", nestedPath); + builder.field(NESTED_PATH.getPreferredName(), nestedPath); } builder.endObject(); return builder; } + + @Override + public boolean equals(Object other) { + if (! (other instanceof FieldSortBuilder)) { + return false; + } + FieldSortBuilder builder = (FieldSortBuilder) other; + return (Objects.equals(this.fieldName, builder.fieldName) && + Objects.equals(this.nestedFilter, builder.nestedFilter) && + Objects.equals(this.nestedPath, builder.nestedPath) && + Objects.equals(this.missing, builder.missing) && + Objects.equals(this.order, builder.order) && + Objects.equals(this.sortMode, builder.sortMode) && + Objects.equals(this.unmappedType, builder.unmappedType)); + } + + @Override + public int hashCode() { + return Objects.hash(this.fieldName, this.nestedFilter, this.nestedPath, + this.missing, this.order, this.sortMode, this.unmappedType); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(this.fieldName); + if (this.nestedFilter != null) { + out.writeBoolean(true); + out.writeQuery(this.nestedFilter); + } else { + out.writeBoolean(false); + } + out.writeOptionalString(this.nestedPath); + if (this.missing != null) { + out.writeBoolean(true); + out.writeGenericValue(this.missing); + } else { + out.writeBoolean(false); + } + + if (this.order != null) { + out.writeBoolean(true); + this.order.writeTo(out); + } else { + out.writeBoolean(false); + } + + out.writeOptionalString(this.sortMode); + out.writeOptionalString(this.unmappedType); + } + + @Override + public FieldSortBuilder readFrom(StreamInput in) throws IOException { + String fieldName = in.readString(); + FieldSortBuilder result = new FieldSortBuilder(fieldName); + if (in.readBoolean()) { + QueryBuilder query = in.readQuery(); + result.setNestedFilter(query); + } + result.setNestedPath(in.readOptionalString()); + if (in.readBoolean()) { + result.missing(in.readGenericValue()); + } + if (in.readBoolean()) { + result.order(SortOrder.readOrderFrom(in)); + } + result.sortMode(in.readOptionalString()); + result.unmappedType(in.readOptionalString()); + return result; + } + + @Override + public FieldSortBuilder fromXContent(QueryParseContext context, String elementName) throws IOException { + XContentParser parser = context.parser(); + + String fieldName = null; + QueryBuilder nestedFilter = null; + String nestedPath = null; + Object missing = null; + SortOrder order = null; + String sortMode = null; + String unmappedType = null; + + String currentFieldName = null; + XContentParser.Token token; + fieldName = elementName; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + if (context.parseFieldMatcher().match(currentFieldName, NESTED_FILTER)) { + nestedFilter = context.parseInnerQueryBuilder(); + } + } else if (token.isValue()) { + if (context.parseFieldMatcher().match(currentFieldName, NESTED_PATH)) { + nestedPath = parser.text(); + } else if (context.parseFieldMatcher().match(currentFieldName, MISSING)) { + missing = parser.objectBytes(); + } else if (context.parseFieldMatcher().match(currentFieldName, ORDER)) { + String sortOrder = parser.text(); + if ("asc".equals(sortOrder)) { + order = SortOrder.ASC; + } else if ("desc".equals(sortOrder)) { + order = SortOrder.DESC; + } else { + throw new IllegalStateException("Sort order " + sortOrder + " not supported."); + } + } else if (context.parseFieldMatcher().match(currentFieldName, SORT_MODE)) { + sortMode = parser.text(); + } else if (context.parseFieldMatcher().match(currentFieldName, UNMAPPED_TYPE)) { + unmappedType = parser.text(); + } + } + } + } + } + + FieldSortBuilder builder = new FieldSortBuilder(fieldName); + if (nestedFilter != null) { + builder.setNestedFilter(nestedFilter); + } + if (nestedPath != null) { + builder.setNestedPath(nestedPath); + } + if (missing != null) { + builder.missing(missing); + } + if (order != null) { + builder.order(order); + } + if (sortMode != null) { + builder.sortMode(sortMode); + } + if (unmappedType != null) { + builder.unmappedType(unmappedType); + } + return builder; + } + } diff --git a/core/src/main/java/org/elasticsearch/search/sort/ParameterParser.java b/core/src/main/java/org/elasticsearch/search/sort/ParameterParser.java new file mode 100644 index 000000000000..74f0628fcc6b --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/sort/ParameterParser.java @@ -0,0 +1,39 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.sort; + +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.index.query.QueryParseContext; + +import java.io.IOException; + +public interface ParameterParser { + /** + * Creates a new item from the json held by the {@link ParameterParser} + * in {@link org.elasticsearch.common.xcontent.XContent} format + * + * @param context + * the input parse context. The state on the parser contained in + * this context will be changed as a side effect of this method + * call + * @return the new item + */ + T fromXContent(QueryParseContext context, String elementName) throws IOException; +} diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSearchSourceItemTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSearchSourceItemTestCase.java new file mode 100644 index 000000000000..896bce1843f4 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSearchSourceItemTestCase.java @@ -0,0 +1,162 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.sort; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.test.ESTestCase; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.IOException; + +import static org.hamcrest.Matchers.*; + +//TODO maybe merge with AbstractsortBuilderTestCase once #14933 is in? +public abstract class AbstractSearchSourceItemTestCase & ToXContent & ParameterParser> extends ESTestCase { + + protected static NamedWriteableRegistry namedWriteableRegistry; + + private static final int NUMBER_OF_TESTBUILDERS = 20; + static IndicesQueriesRegistry indicesQueriesRegistry; + + @BeforeClass + public static void init() { + namedWriteableRegistry = new NamedWriteableRegistry(); + namedWriteableRegistry.registerPrototype(FieldSortBuilder.class, FieldSortBuilder.PROTOTYPE); + indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).buildQueryParserRegistry(); + } + + @AfterClass + public static void afterClass() throws Exception { + namedWriteableRegistry = null; + } + + /** Returns random sort that is put under test */ + protected abstract T createTestItem(); + + /** Returns mutated version of original so the returned sort is different in terms of equals/hashcode */ + protected abstract T mutate(T original) throws IOException; + + /** + * Test that creates new sort from a random test sort and checks both for equality + */ + public void testFromXContent() throws IOException { + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + T testItem = createTestItem(); + + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + if (randomBoolean()) { + builder.prettyPrint(); + } + builder.startObject(); + testItem.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + + XContentParser itemParser = XContentHelper.createParser(builder.bytes()); + itemParser.nextToken(); + + /* + * filter out name of sort, or field name to sort on for element fieldSort + */ + itemParser.nextToken(); + String elementName = itemParser.currentName(); + itemParser.nextToken(); + + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); + context.reset(itemParser); + NamedWriteable parsedItem = testItem.fromXContent(context, elementName); + assertNotSame(testItem, parsedItem); + assertEquals(testItem, parsedItem); + assertEquals(testItem.hashCode(), parsedItem.hashCode()); + } + } + + /** + * Test serialization and deserialization of the test sort. + */ + public void testSerialization() throws IOException { + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + T testsort = createTestItem(); + T deserializedsort = copyItem(testsort); + assertEquals(testsort, deserializedsort); + assertEquals(testsort.hashCode(), deserializedsort.hashCode()); + assertNotSame(testsort, deserializedsort); + } + } + + /** + * Test equality and hashCode properties + */ + public void testEqualsAndHashcode() throws IOException { + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + T firstsort = createTestItem(); + assertFalse("sort is equal to null", firstsort.equals(null)); + assertFalse("sort is equal to incompatible type", firstsort.equals("")); + assertTrue("sort is not equal to self", firstsort.equals(firstsort)); + assertThat("same sort's hashcode returns different values if called multiple times", firstsort.hashCode(), + equalTo(firstsort.hashCode())); + assertThat("different sorts should not be equal", mutate(firstsort), not(equalTo(firstsort))); + assertThat("different sorts should have different hashcode", mutate(firstsort).hashCode(), not(equalTo(firstsort.hashCode()))); + + T secondsort = copyItem(firstsort); + assertTrue("sort is not equal to self", secondsort.equals(secondsort)); + assertTrue("sort is not equal to its copy", firstsort.equals(secondsort)); + assertTrue("equals is not symmetric", secondsort.equals(firstsort)); + assertThat("sort copy's hashcode is different from original hashcode", secondsort.hashCode(), equalTo(firstsort.hashCode())); + + T thirdsort = copyItem(secondsort); + assertTrue("sort is not equal to self", thirdsort.equals(thirdsort)); + assertTrue("sort is not equal to its copy", secondsort.equals(thirdsort)); + assertThat("sort copy's hashcode is different from original hashcode", secondsort.hashCode(), equalTo(thirdsort.hashCode())); + assertTrue("equals is not transitive", firstsort.equals(thirdsort)); + assertThat("sort copy's hashcode is different from original hashcode", firstsort.hashCode(), equalTo(thirdsort.hashCode())); + assertTrue("equals is not symmetric", thirdsort.equals(secondsort)); + assertTrue("equals is not symmetric", thirdsort.equals(firstsort)); + } + } + + protected T copyItem(T original) throws IOException { + try (BytesStreamOutput output = new BytesStreamOutput()) { + original.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + @SuppressWarnings("unchecked") + T prototype = (T) namedWriteableRegistry.getPrototype(getPrototype(), original.getWriteableName()); + T copy = (T) prototype.readFrom(in); + return copy; + } + } + } + + protected abstract Class getPrototype(); +} diff --git a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java new file mode 100644 index 000000000000..6353231408fd --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -0,0 +1,90 @@ +/* +x * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.sort; + +import java.io.IOException; + +public class FieldSortBuilderTests extends AbstractSearchSourceItemTestCase { + + @SuppressWarnings("unchecked") + public Class getPrototype() { + return (Class) FieldSortBuilder.PROTOTYPE.getClass(); + } + + @Override + protected FieldSortBuilder createTestItem() { + String fieldName = randomAsciiOfLengthBetween(1, 10); + FieldSortBuilder builder = new FieldSortBuilder(fieldName); + if (randomBoolean()) { + builder.order(RandomSortDataGenerator.order(builder.order())); + } + + if (randomBoolean()) { + builder.missing(RandomSortDataGenerator.missing(builder.missing())); + } + + if (randomBoolean()) { + builder.unmappedType(RandomSortDataGenerator.randomAscii(builder.unmappedType())); + } + + if (randomBoolean()) { + builder.sortMode(RandomSortDataGenerator.mode(builder.sortMode())); + } + + if (randomBoolean()) { + builder.setNestedFilter(RandomSortDataGenerator.nestedFilter(builder.getNestedFilter())); + } + + if (randomBoolean()) { + builder.setNestedPath(RandomSortDataGenerator.randomAscii(builder.getNestedPath())); + } + + return builder; + } + + @Override + protected FieldSortBuilder mutate(FieldSortBuilder original) throws IOException { + FieldSortBuilder mutated = new FieldSortBuilder(original); + int parameter = randomIntBetween(0, 5); + switch (parameter) { + case 0: + mutated.setNestedPath(RandomSortDataGenerator.randomAscii(mutated.getNestedPath())); + break; + case 1: + mutated.setNestedFilter(RandomSortDataGenerator.nestedFilter(mutated.getNestedFilter())); + break; + case 2: + mutated.sortMode(RandomSortDataGenerator.mode(mutated.sortMode())); + break; + case 3: + mutated.unmappedType(RandomSortDataGenerator.randomAscii(mutated.unmappedType())); + break; + case 4: + mutated.missing(RandomSortDataGenerator.missing(mutated.missing())); + break; + case 5: + mutated.order(RandomSortDataGenerator.order(mutated.order())); + break; + default: + throw new IllegalStateException("Unsupported mutation."); + } + return mutated; + } +} From dd6e835e3039102b75d4fba81a694a1b3689b12e Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Wed, 27 Jan 2016 15:16:49 +0100 Subject: [PATCH 217/320] First comments. --- .../search/sort/FieldSortBuilder.java | 24 ++++++++++--------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index 1d72dd983574..ff9192c039f9 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -103,7 +103,10 @@ public class FieldSortBuilder extends SortBuilder { /** Returns the value used when a field is missing in a doc. */ public Object missing() { - return this.missing; + if (missing instanceof BytesRef) { + return ((BytesRef) missing).utf8ToString(); + } + return missing; } /** @@ -200,9 +203,14 @@ public class FieldSortBuilder extends SortBuilder { @Override public boolean equals(Object other) { - if (! (other instanceof FieldSortBuilder)) { + if (this == other) { + return true; + } + + if (other== null || getClass() != other.getClass()) { return false; } + FieldSortBuilder builder = (FieldSortBuilder) other; return (Objects.equals(this.fieldName, builder.fieldName) && Objects.equals(this.nestedFilter, builder.nestedFilter) && @@ -234,12 +242,7 @@ public class FieldSortBuilder extends SortBuilder { out.writeBoolean(false); } out.writeOptionalString(this.nestedPath); - if (this.missing != null) { - out.writeBoolean(true); - out.writeGenericValue(this.missing); - } else { - out.writeBoolean(false); - } + out.writeGenericValue(this.missing); if (this.order != null) { out.writeBoolean(true); @@ -261,9 +264,8 @@ public class FieldSortBuilder extends SortBuilder { result.setNestedFilter(query); } result.setNestedPath(in.readOptionalString()); - if (in.readBoolean()) { - result.missing(in.readGenericValue()); - } + result.missing(in.readGenericValue()); + if (in.readBoolean()) { result.order(SortOrder.readOrderFrom(in)); } From 720f47e87f065dba680b5eb71abb67c56f8283c0 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Mon, 8 Feb 2016 15:26:08 +0100 Subject: [PATCH 218/320] Second round of comments --- .../indices/query/IndicesQueriesRegistry.java | 9 ++ .../search/sort/FieldSortBuilder.java | 97 +++++++++++-------- ...ameterParser.java => SortBuilderTemp.java} | 11 ++- .../elasticsearch/search/sort/SortOrder.java | 2 +- .../AbstractSearchSourceItemTestCase.java | 19 ++-- 5 files changed, 87 insertions(+), 51 deletions(-) rename core/src/main/java/org/elasticsearch/search/sort/{ParameterParser.java => SortBuilderTemp.java} (75%) diff --git a/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java b/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java index a9e90884a686..c72810212eb6 100644 --- a/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java +++ b/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java @@ -24,9 +24,11 @@ import java.util.Map; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryParser; +import org.elasticsearch.search.sort.SortBuilderTemp; public class IndicesQueriesRegistry extends AbstractComponent { private Map> queryParsers; + private Map> sortParsers; public IndicesQueriesRegistry(Settings settings, Map> queryParsers) { super(settings); @@ -39,4 +41,11 @@ public class IndicesQueriesRegistry extends AbstractComponent { public Map> queryParsers() { return queryParsers; } + + /** + * Returns all registered sort parsers + */ + public Map> sortParsers() { + return sortParsers; + } } diff --git a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index ff9192c039f9..5440e996b508 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -21,12 +21,9 @@ package org.elasticsearch.search.sort; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.BytesRefs; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; @@ -38,17 +35,17 @@ import java.util.Objects; /** * A sort builder to sort based on a document field. */ -public class FieldSortBuilder extends SortBuilder { +public class FieldSortBuilder extends SortBuilder implements SortBuilderTemp { + public static final String NAME = "field_sort"; static final FieldSortBuilder PROTOTYPE = new FieldSortBuilder(""); public static final String NAME = "field_sort"; public static final ParseField NESTED_PATH = new ParseField("nested_path"); public static final ParseField NESTED_FILTER = new ParseField("nested_filter"); public static final ParseField MISSING = new ParseField("missing"); public static final ParseField ORDER = new ParseField("order"); - public static final ParseField SORT_MODE = new ParseField("mode"); + public static final ParseField SORT_MODE = new ParseField("mode"); public static final ParseField UNMAPPED_TYPE = new ParseField("unmapped_type"); - private final String fieldName; private Object missing; @@ -71,10 +68,12 @@ public class FieldSortBuilder extends SortBuilder { this.setNestedFilter(template.getNestedFilter()); this.setNestedPath(template.getNestedPath()); } + /** * Constructs a new sort based on a document field. * - * @param fieldName The field name. + * @param fieldName + * The field name. */ public FieldSortBuilder(String fieldName) { if (fieldName == null) { @@ -82,7 +81,7 @@ public class FieldSortBuilder extends SortBuilder { } this.fieldName = fieldName; } - + /** Returns the document field this sort should be based on. */ public String getFieldName() { return this.fieldName; @@ -111,24 +110,29 @@ public class FieldSortBuilder extends SortBuilder { /** * Set the type to use in case the current field is not mapped in an index. - * Specifying a type tells Elasticsearch what type the sort values should have, which is important - * for cross-index search, if there are sort fields that exist on some indices only. - * If the unmapped type is null then query execution will fail if one or more indices - * don't have a mapping for the current field. + * Specifying a type tells Elasticsearch what type the sort values should + * have, which is important for cross-index search, if there are sort fields + * that exist on some indices only. If the unmapped type is null + * then query execution will fail if one or more indices don't have a + * mapping for the current field. */ public FieldSortBuilder unmappedType(String type) { this.unmappedType = type; return this; } - /** Returns the type to use in case the current field is not mapped in an index. */ + /** + * Returns the type to use in case the current field is not mapped in an + * index. + */ public String unmappedType() { return this.unmappedType; } /** - * Defines what values to pick in the case a document contains multiple values for the targeted sort field. - * Possible values: min, max, sum and avg + * Defines what values to pick in the case a document contains multiple + * values for the targeted sort field. Possible values: min, max, sum and + * avg * * TODO would love to see an enum here *

      @@ -139,37 +143,48 @@ public class FieldSortBuilder extends SortBuilder { return this; } - /** Returns what values to pick in the case a document contains multiple values for the targeted sort field. */ + /** + * Returns what values to pick in the case a document contains multiple + * values for the targeted sort field. + */ public String sortMode() { return this.sortMode; } + /** - * Sets the nested filter that the nested objects should match with in order to be taken into account - * for sorting. + * Sets the nested filter that the nested objects should match with in order + * to be taken into account for sorting. * - * TODO should the above getters and setters be deprecated/ changed in favour of real getters and setters? + * TODO should the above getters and setters be deprecated/ changed in + * favour of real getters and setters? */ public FieldSortBuilder setNestedFilter(QueryBuilder nestedFilter) { this.nestedFilter = nestedFilter; return this; } - /** Returns the nested filter that the nested objects should match with in order to be taken into account - * for sorting. */ + /** + * Returns the nested filter that the nested objects should match with in + * order to be taken into account for sorting. + */ public QueryBuilder getNestedFilter() { return this.nestedFilter; } /** - * Sets the nested path if sorting occurs on a field that is inside a nested object. By default when sorting on a - * field inside a nested object, the nearest upper nested object is selected as nested path. + * Sets the nested path if sorting occurs on a field that is inside a nested + * object. By default when sorting on a field inside a nested object, the + * nearest upper nested object is selected as nested path. */ public FieldSortBuilder setNestedPath(String nestedPath) { this.nestedPath = nestedPath; return this; } - /** Returns the nested path if sorting occurs in a field that is inside a nested object. */ + /** + * Returns the nested path if sorting occurs in a field that is inside a + * nested object. + */ public String getNestedPath() { return this.nestedPath; } @@ -207,24 +222,20 @@ public class FieldSortBuilder extends SortBuilder { return true; } - if (other== null || getClass() != other.getClass()) { + if (other == null || getClass() != other.getClass()) { return false; } FieldSortBuilder builder = (FieldSortBuilder) other; - return (Objects.equals(this.fieldName, builder.fieldName) && - Objects.equals(this.nestedFilter, builder.nestedFilter) && - Objects.equals(this.nestedPath, builder.nestedPath) && - Objects.equals(this.missing, builder.missing) && - Objects.equals(this.order, builder.order) && - Objects.equals(this.sortMode, builder.sortMode) && - Objects.equals(this.unmappedType, builder.unmappedType)); + return (Objects.equals(this.fieldName, builder.fieldName) && Objects.equals(this.nestedFilter, builder.nestedFilter) + && Objects.equals(this.nestedPath, builder.nestedPath) && Objects.equals(this.missing, builder.missing) + && Objects.equals(this.order, builder.order) && Objects.equals(this.sortMode, builder.sortMode) + && Objects.equals(this.unmappedType, builder.unmappedType)); } - + @Override public int hashCode() { - return Objects.hash(this.fieldName, this.nestedFilter, this.nestedPath, - this.missing, this.order, this.sortMode, this.unmappedType); + return Objects.hash(this.fieldName, this.nestedFilter, this.nestedPath, this.missing, this.order, this.sortMode, this.unmappedType); } @Override @@ -243,14 +254,14 @@ public class FieldSortBuilder extends SortBuilder { } out.writeOptionalString(this.nestedPath); out.writeGenericValue(this.missing); - + if (this.order != null) { out.writeBoolean(true); this.order.writeTo(out); } else { out.writeBoolean(false); } - + out.writeOptionalString(this.sortMode); out.writeOptionalString(this.unmappedType); } @@ -285,7 +296,7 @@ public class FieldSortBuilder extends SortBuilder { SortOrder order = null; String sortMode = null; String unmappedType = null; - + String currentFieldName = null; XContentParser.Token token; fieldName = elementName; @@ -345,4 +356,14 @@ public class FieldSortBuilder extends SortBuilder { return builder; } + @Override + public String getName() { + return "field_sort_builder"; + } + + @Override + public SortBuilderTemp getBuilderPrototype() { + return PROTOTYPE; + } + } diff --git a/core/src/main/java/org/elasticsearch/search/sort/ParameterParser.java b/core/src/main/java/org/elasticsearch/search/sort/SortBuilderTemp.java similarity index 75% rename from core/src/main/java/org/elasticsearch/search/sort/ParameterParser.java rename to core/src/main/java/org/elasticsearch/search/sort/SortBuilderTemp.java index 74f0628fcc6b..15a12ec90f2e 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ParameterParser.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortBuilderTemp.java @@ -19,14 +19,15 @@ package org.elasticsearch.search.sort; +import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.index.query.QueryParseContext; import java.io.IOException; -public interface ParameterParser { +public interface SortBuilderTemp extends NamedWriteable, ToXContent { /** - * Creates a new item from the json held by the {@link ParameterParser} + * Creates a new item from the json held by the {@link SortBuilderTemp} * in {@link org.elasticsearch.common.xcontent.XContent} format * * @param context @@ -35,5 +36,9 @@ public interface ParameterParser { * call * @return the new item */ - T fromXContent(QueryParseContext context, String elementName) throws IOException; + NamedWriteable fromXContent(QueryParseContext context, String elementName) throws IOException; + + String getName(); + + SortBuilderTemp getBuilderPrototype(); } diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java b/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java index 73e5ac552473..9c3518802937 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java @@ -51,7 +51,7 @@ public enum SortOrder implements Writeable { } }; - private static final SortOrder PROTOTYPE = ASC; + private static final SortOrder PROTOTYPE = DESC; @Override public SortOrder readFrom(StreamInput in) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSearchSourceItemTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSearchSourceItemTestCase.java index 896bce1843f4..3e86a2279863 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSearchSourceItemTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSearchSourceItemTestCase.java @@ -42,8 +42,7 @@ import java.io.IOException; import static org.hamcrest.Matchers.*; -//TODO maybe merge with AbstractsortBuilderTestCase once #14933 is in? -public abstract class AbstractSearchSourceItemTestCase & ToXContent & ParameterParser> extends ESTestCase { +public abstract class AbstractSearchSourceItemTestCase> extends ESTestCase { protected static NamedWriteableRegistry namedWriteableRegistry; @@ -146,17 +145,19 @@ public abstract class AbstractSearchSourceItemTestCase sort) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { - original.writeTo(output); + sort.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + SortBuilderTemp prototype = sortParser(sort.getName()).getBuilderPrototype(); @SuppressWarnings("unchecked") - T prototype = (T) namedWriteableRegistry.getPrototype(getPrototype(), original.getWriteableName()); - T copy = (T) prototype.readFrom(in); - return copy; + T secondQuery = (T) prototype.readFrom(in); + return secondQuery; } } } - - protected abstract Class getPrototype(); + + private SortBuilderTemp sortParser(String queryId) { + return indicesQueriesRegistry.sortParsers().get(queryId); + } } From 02e698bc43d164add42ef9fb6012cb9d351bf5c6 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Wed, 10 Feb 2016 11:26:30 +0100 Subject: [PATCH 219/320] Third round of comments --- .../elasticsearch/common/unit/Fuzziness.java | 1 + .../indices/query/IndicesQueriesRegistry.java | 9 - .../search/sort/FieldSortBuilder.java | 62 +++---- .../search/sort/GeoDistanceSortBuilder.java | 5 +- .../search/sort/SortBuilderTemp.java | 8 +- .../elasticsearch/search/sort/SortOrder.java | 2 +- .../AbstractSearchSourceItemTestCase.java | 163 ------------------ .../search/sort/AbstractSortTestCase.java | 6 +- .../search/sort/FieldSortBuilderTests.java | 7 +- 9 files changed, 31 insertions(+), 232 deletions(-) delete mode 100644 core/src/test/java/org/elasticsearch/search/sort/AbstractSearchSourceItemTestCase.java diff --git a/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java b/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java index 831fbe505bb0..24a727691ccc 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java +++ b/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java @@ -67,6 +67,7 @@ public final class Fuzziness implements ToXContent, Writeable { /** * Creates a {@link Fuzziness} instance from an edit distance. The value must be one of [0, 1, 2] + * * Note: Using this method only makes sense if the field you are applying Fuzziness to is some sort of string. */ public static Fuzziness fromEdits(int edits) { diff --git a/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java b/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java index c72810212eb6..a9e90884a686 100644 --- a/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java +++ b/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java @@ -24,11 +24,9 @@ import java.util.Map; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryParser; -import org.elasticsearch.search.sort.SortBuilderTemp; public class IndicesQueriesRegistry extends AbstractComponent { private Map> queryParsers; - private Map> sortParsers; public IndicesQueriesRegistry(Settings settings, Map> queryParsers) { super(settings); @@ -41,11 +39,4 @@ public class IndicesQueriesRegistry extends AbstractComponent { public Map> queryParsers() { return queryParsers; } - - /** - * Returns all registered sort parsers - */ - public Map> sortParsers() { - return sortParsers; - } } diff --git a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index 5440e996b508..a11eefc1f668 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -286,10 +286,9 @@ public class FieldSortBuilder extends SortBuilder implements SortBuilderTemp getBuilderPrototype() { - return PROTOTYPE; - } - } diff --git a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index b5a10e238b71..8cc138bf62c6 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -23,11 +23,9 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; @@ -44,8 +42,7 @@ import java.util.Objects; /** * A geo distance based sorting on a geo point like field. */ -public class GeoDistanceSortBuilder extends SortBuilder - implements ToXContent, NamedWriteable, SortElementParserTemp { +public class GeoDistanceSortBuilder extends SortBuilder implements SortBuilderTemp { public static final String NAME = "_geo_distance"; public static final boolean DEFAULT_COERCE = false; public static final boolean DEFAULT_IGNORE_MALFORMED = false; diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortBuilderTemp.java b/core/src/main/java/org/elasticsearch/search/sort/SortBuilderTemp.java index 15a12ec90f2e..e3e571c4410e 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortBuilderTemp.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortBuilderTemp.java @@ -25,7 +25,7 @@ import org.elasticsearch.index.query.QueryParseContext; import java.io.IOException; -public interface SortBuilderTemp extends NamedWriteable, ToXContent { +public interface SortBuilderTemp extends NamedWriteable, ToXContent { /** * Creates a new item from the json held by the {@link SortBuilderTemp} * in {@link org.elasticsearch.common.xcontent.XContent} format @@ -36,9 +36,5 @@ public interface SortBuilderTemp extends NamedWriteable fromXContent(QueryParseContext context, String elementName) throws IOException; - - String getName(); - - SortBuilderTemp getBuilderPrototype(); + SortBuilderTemp fromXContent(QueryParseContext context, String elementName) throws IOException; } diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java b/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java index 9c3518802937..73e5ac552473 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java @@ -51,7 +51,7 @@ public enum SortOrder implements Writeable { } }; - private static final SortOrder PROTOTYPE = DESC; + private static final SortOrder PROTOTYPE = ASC; @Override public SortOrder readFrom(StreamInput in) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSearchSourceItemTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSearchSourceItemTestCase.java deleted file mode 100644 index 3e86a2279863..000000000000 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSearchSourceItemTestCase.java +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.sort; - -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.search.SearchModule; -import org.elasticsearch.test.ESTestCase; -import org.junit.AfterClass; -import org.junit.BeforeClass; - -import java.io.IOException; - -import static org.hamcrest.Matchers.*; - -public abstract class AbstractSearchSourceItemTestCase> extends ESTestCase { - - protected static NamedWriteableRegistry namedWriteableRegistry; - - private static final int NUMBER_OF_TESTBUILDERS = 20; - static IndicesQueriesRegistry indicesQueriesRegistry; - - @BeforeClass - public static void init() { - namedWriteableRegistry = new NamedWriteableRegistry(); - namedWriteableRegistry.registerPrototype(FieldSortBuilder.class, FieldSortBuilder.PROTOTYPE); - indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).buildQueryParserRegistry(); - } - - @AfterClass - public static void afterClass() throws Exception { - namedWriteableRegistry = null; - } - - /** Returns random sort that is put under test */ - protected abstract T createTestItem(); - - /** Returns mutated version of original so the returned sort is different in terms of equals/hashcode */ - protected abstract T mutate(T original) throws IOException; - - /** - * Test that creates new sort from a random test sort and checks both for equality - */ - public void testFromXContent() throws IOException { - for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { - T testItem = createTestItem(); - - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); - if (randomBoolean()) { - builder.prettyPrint(); - } - builder.startObject(); - testItem.toXContent(builder, ToXContent.EMPTY_PARAMS); - builder.endObject(); - - XContentParser itemParser = XContentHelper.createParser(builder.bytes()); - itemParser.nextToken(); - - /* - * filter out name of sort, or field name to sort on for element fieldSort - */ - itemParser.nextToken(); - String elementName = itemParser.currentName(); - itemParser.nextToken(); - - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); - context.reset(itemParser); - NamedWriteable parsedItem = testItem.fromXContent(context, elementName); - assertNotSame(testItem, parsedItem); - assertEquals(testItem, parsedItem); - assertEquals(testItem.hashCode(), parsedItem.hashCode()); - } - } - - /** - * Test serialization and deserialization of the test sort. - */ - public void testSerialization() throws IOException { - for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { - T testsort = createTestItem(); - T deserializedsort = copyItem(testsort); - assertEquals(testsort, deserializedsort); - assertEquals(testsort.hashCode(), deserializedsort.hashCode()); - assertNotSame(testsort, deserializedsort); - } - } - - /** - * Test equality and hashCode properties - */ - public void testEqualsAndHashcode() throws IOException { - for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { - T firstsort = createTestItem(); - assertFalse("sort is equal to null", firstsort.equals(null)); - assertFalse("sort is equal to incompatible type", firstsort.equals("")); - assertTrue("sort is not equal to self", firstsort.equals(firstsort)); - assertThat("same sort's hashcode returns different values if called multiple times", firstsort.hashCode(), - equalTo(firstsort.hashCode())); - assertThat("different sorts should not be equal", mutate(firstsort), not(equalTo(firstsort))); - assertThat("different sorts should have different hashcode", mutate(firstsort).hashCode(), not(equalTo(firstsort.hashCode()))); - - T secondsort = copyItem(firstsort); - assertTrue("sort is not equal to self", secondsort.equals(secondsort)); - assertTrue("sort is not equal to its copy", firstsort.equals(secondsort)); - assertTrue("equals is not symmetric", secondsort.equals(firstsort)); - assertThat("sort copy's hashcode is different from original hashcode", secondsort.hashCode(), equalTo(firstsort.hashCode())); - - T thirdsort = copyItem(secondsort); - assertTrue("sort is not equal to self", thirdsort.equals(thirdsort)); - assertTrue("sort is not equal to its copy", secondsort.equals(thirdsort)); - assertThat("sort copy's hashcode is different from original hashcode", secondsort.hashCode(), equalTo(thirdsort.hashCode())); - assertTrue("equals is not transitive", firstsort.equals(thirdsort)); - assertThat("sort copy's hashcode is different from original hashcode", firstsort.hashCode(), equalTo(thirdsort.hashCode())); - assertTrue("equals is not symmetric", thirdsort.equals(secondsort)); - assertTrue("equals is not symmetric", thirdsort.equals(firstsort)); - } - } - - protected T copyItem(SortBuilderTemp sort) throws IOException { - try (BytesStreamOutput output = new BytesStreamOutput()) { - sort.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { - SortBuilderTemp prototype = sortParser(sort.getName()).getBuilderPrototype(); - @SuppressWarnings("unchecked") - T secondQuery = (T) prototype.readFrom(in); - return secondQuery; - } - } - } - - private SortBuilderTemp sortParser(String queryId) { - return indicesQueriesRegistry.sortParsers().get(queryId); - } -} diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index dc61f0ef34c4..8cadfb244080 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.sort; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -43,7 +42,7 @@ import java.io.IOException; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; -public abstract class AbstractSortTestCase & SortElementParserTemp> extends ESTestCase { +public abstract class AbstractSortTestCase> extends ESTestCase { protected static NamedWriteableRegistry namedWriteableRegistry; @@ -154,7 +153,8 @@ public abstract class AbstractSortTestCase { - - @SuppressWarnings("unchecked") - public Class getPrototype() { - return (Class) FieldSortBuilder.PROTOTYPE.getClass(); - } +public class FieldSortBuilderTests extends AbstractSortTestCase { @Override protected FieldSortBuilder createTestItem() { From fb647e9bf41c411c65b14a3987d9710965a5866a Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Thu, 10 Mar 2016 12:50:09 +0100 Subject: [PATCH 220/320] Add sanity checks and support for reverse in FieldSortBuilder.parse(...) After another round of input from @cbuescher this adds a few more sanity checks to request parsing. In addition adds (back) support for the reverse option. --- .../elasticsearch/search/sort/FieldSortBuilder.java | 13 +++++++++++-- .../search/sort/GeoDistanceSortBuilder.java | 2 +- .../elasticsearch/search/sort/ScoreSortBuilder.java | 3 +-- ...{SortBuilderTemp.java => SortBuilderParser.java} | 6 +++--- .../search/sort/AbstractSortTestCase.java | 3 ++- 5 files changed, 18 insertions(+), 9 deletions(-) rename core/src/main/java/org/elasticsearch/search/sort/{SortBuilderTemp.java => SortBuilderParser.java} (87%) diff --git a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index a11eefc1f668..4ead35fd96b6 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -35,14 +35,14 @@ import java.util.Objects; /** * A sort builder to sort based on a document field. */ -public class FieldSortBuilder extends SortBuilder implements SortBuilderTemp { - public static final String NAME = "field_sort"; +public class FieldSortBuilder extends SortBuilder implements SortBuilderParser { static final FieldSortBuilder PROTOTYPE = new FieldSortBuilder(""); public static final String NAME = "field_sort"; public static final ParseField NESTED_PATH = new ParseField("nested_path"); public static final ParseField NESTED_FILTER = new ParseField("nested_filter"); public static final ParseField MISSING = new ParseField("missing"); public static final ParseField ORDER = new ParseField("order"); + public static final ParseField REVERSE = new ParseField("reverse"); public static final ParseField SORT_MODE = new ParseField("mode"); public static final ParseField UNMAPPED_TYPE = new ParseField("unmapped_type"); @@ -304,12 +304,19 @@ public class FieldSortBuilder extends SortBuilder implements SortBuilderTemp { +public class GeoDistanceSortBuilder extends SortBuilder implements SortBuilderParser { public static final String NAME = "_geo_distance"; public static final boolean DEFAULT_COERCE = false; public static final boolean DEFAULT_IGNORE_MALFORMED = false; diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java index 6b1bc054ee77..c416965f38a9 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.sort; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -35,7 +34,7 @@ import java.util.Objects; /** * A sort builder allowing to sort by score. */ -public class ScoreSortBuilder extends SortBuilder implements NamedWriteable, +public class ScoreSortBuilder extends SortBuilder implements SortBuilderParser, SortElementParserTemp { private static final String NAME = "_score"; diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortBuilderTemp.java b/core/src/main/java/org/elasticsearch/search/sort/SortBuilderParser.java similarity index 87% rename from core/src/main/java/org/elasticsearch/search/sort/SortBuilderTemp.java rename to core/src/main/java/org/elasticsearch/search/sort/SortBuilderParser.java index e3e571c4410e..90d54a501215 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortBuilderTemp.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortBuilderParser.java @@ -25,9 +25,9 @@ import org.elasticsearch.index.query.QueryParseContext; import java.io.IOException; -public interface SortBuilderTemp extends NamedWriteable, ToXContent { +public interface SortBuilderParser extends NamedWriteable, ToXContent { /** - * Creates a new item from the json held by the {@link SortBuilderTemp} + * Creates a new item from the json held by the {@link SortBuilderParser} * in {@link org.elasticsearch.common.xcontent.XContent} format * * @param context @@ -36,5 +36,5 @@ public interface SortBuilderTemp extends NamedWriteable * call * @return the new item */ - SortBuilderTemp fromXContent(QueryParseContext context, String elementName) throws IOException; + SortBuilder fromXContent(QueryParseContext context, String elementName) throws IOException; } diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 8cadfb244080..def78acf5afc 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -42,7 +42,7 @@ import java.io.IOException; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; -public abstract class AbstractSortTestCase> extends ESTestCase { +public abstract class AbstractSortTestCase> extends ESTestCase { protected static NamedWriteableRegistry namedWriteableRegistry; @@ -54,6 +54,7 @@ public abstract class AbstractSortTestCase> extends namedWriteableRegistry = new NamedWriteableRegistry(); namedWriteableRegistry.registerPrototype(SortBuilder.class, GeoDistanceSortBuilder.PROTOTYPE); namedWriteableRegistry.registerPrototype(SortBuilder.class, ScoreSortBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(SortBuilder.class, FieldSortBuilder.PROTOTYPE); indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).buildQueryParserRegistry(); } From 744de1f6cbd36a2e1e98273b9d602802c8dcd89a Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Mon, 14 Mar 2016 14:00:58 +0100 Subject: [PATCH 221/320] Throw ParsingExceptions instead of IllegalArgument ... keeps track of the position of the error. --- .../java/org/elasticsearch/search/sort/FieldSortBuilder.java | 3 ++- .../org/elasticsearch/search/sort/AbstractSortTestCase.java | 3 +++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index 4ead35fd96b6..1157457afb9f 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.sort; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.BytesRefs; @@ -305,7 +306,7 @@ public class FieldSortBuilder extends SortBuilder implements S if (context.parseFieldMatcher().match(currentFieldName, NESTED_FILTER)) { nestedFilter = context.parseInnerQueryBuilder(); } else { - throw new IllegalArgumentException("Expected " + NESTED_FILTER.getPreferredName() + " element."); + throw new ParsingException(parser.getTokenLocation(), "Expected " + NESTED_FILTER.getPreferredName() + " element."); } } else if (token.isValue()) { if (context.parseFieldMatcher().match(currentFieldName, NESTED_PATH)) { diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index def78acf5afc..6c5800c97cb0 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.sort; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -85,6 +86,8 @@ public abstract class AbstractSortTestCase Date: Mon, 14 Mar 2016 14:18:18 +0100 Subject: [PATCH 222/320] Adding enum for SortMode and use it in ScriptSortBuilder --- .../elasticsearch/search/sort/SortMode.java | 91 +++++++++++++++++++ .../search/sort/SortModeTest.java | 55 +++++++++++ 2 files changed, 146 insertions(+) create mode 100644 core/src/main/java/org/elasticsearch/search/sort/SortMode.java create mode 100644 core/src/test/java/org/elasticsearch/search/sort/SortModeTest.java diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortMode.java b/core/src/main/java/org/elasticsearch/search/sort/SortMode.java new file mode 100644 index 000000000000..2f6ce9401d49 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/sort/SortMode.java @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.sort; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; +import java.util.Locale; +import java.util.Objects; + +/** + * Elasticsearch supports sorting by array or multi-valued fields. The SortMode option controls what array value is picked + * for sorting the document it belongs to. The mode option can have the following values: + *

        + *
      • min - Pick the lowest value.
      • + *
      • max - Pick the highest value.
      • + *
      • sum - Use the sum of all values as sort value. Only applicable for number based array fields.
      • + *
      • avg - Use the average of all values as sort value. Only applicable for number based array fields.
      • + *
      • median - Use the median of all values as sort value. Only applicable for number based array fields.
      • + *
      + */ +public enum SortMode implements Writeable { + /** pick the lowest value **/ + MIN, + /** pick the highest value **/ + MAX, + /** Use the sum of all values as sort value. Only applicable for number based array fields. **/ + SUM, + /** Use the average of all values as sort value. Only applicable for number based array fields. **/ + AVG, + /** Use the median of all values as sort value. Only applicable for number based array fields. **/ + MEDIAN; + + static SortMode PROTOTYPE = MIN; + + @Override + public void writeTo(final StreamOutput out) throws IOException { + out.writeVInt(ordinal()); + } + + @Override + public SortMode readFrom(final StreamInput in) throws IOException { + int ordinal = in.readVInt(); + if (ordinal < 0 || ordinal >= values().length) { + throw new IOException("Unknown SortMode ordinal [" + ordinal + "]"); + } + return values()[ordinal]; + } + + public static SortMode fromString(final String str) { + Objects.requireNonNull(str, "input string is null"); + switch (str.toLowerCase(Locale.ROOT)) { + case ("min"): + return MIN; + case ("max"): + return MAX; + case ("sum"): + return SUM; + case ("avg"): + return AVG; + case ("median"): + return MEDIAN; + default: + throw new IllegalArgumentException("Unknown SortMode [" + str + "]"); + } + } + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } +} \ No newline at end of file diff --git a/core/src/test/java/org/elasticsearch/search/sort/SortModeTest.java b/core/src/test/java/org/elasticsearch/search/sort/SortModeTest.java new file mode 100644 index 000000000000..f7318ad0a593 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/sort/SortModeTest.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.sort; + +import org.elasticsearch.test.ESTestCase; +import org.junit.Rule; +import org.junit.rules.ExpectedException; + +public class SortModeTest extends ESTestCase { + + @Rule + public ExpectedException exceptionRule = ExpectedException.none(); + + public void testSortMode() { + // we rely on these ordinals in serialization, so changing them breaks bwc. + assertEquals(0, SortMode.MIN.ordinal()); + assertEquals(1, SortMode.MAX.ordinal()); + assertEquals(2, SortMode.SUM.ordinal()); + assertEquals(3, SortMode.AVG.ordinal()); + assertEquals(4, SortMode.MEDIAN.ordinal()); + + assertEquals("min", SortMode.MIN.toString()); + assertEquals("max", SortMode.MAX.toString()); + assertEquals("sum", SortMode.SUM.toString()); + assertEquals("avg", SortMode.AVG.toString()); + assertEquals("median", SortMode.MEDIAN.toString()); + + for (SortMode mode : SortMode.values()) { + assertEquals(mode, SortMode.fromString(mode.toString())); + assertEquals(mode, SortMode.fromString(mode.toString().toUpperCase())); + } + + exceptionRule.expect(IllegalArgumentException.class); + exceptionRule.expectMessage("Unknown SortMode [xyz]"); + SortMode.fromString("xyz"); + } + +} From 9f382da5d3fc5ad3b44819dfdb0d712965919e49 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 14 Mar 2016 14:27:35 +0100 Subject: [PATCH 223/320] Add better validation error message and a dedicated test --- .../action/admin/indices/mapping/put/PutMappingRequest.java | 2 +- .../admin/indices/mapping/put/PutMappingRequestTests.java | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java index 9444e322a68a..9e33103b371d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -94,7 +94,7 @@ public class PutMappingRequest extends AcknowledgedRequest im validationException = addValidationError("mapping source is empty", validationException); } if (concreteIndex != null && (indices != null && indices.length > 0)) { - validationException = addValidationError("either concreteIndices or unresolved indices can be set", validationException); + validationException = addValidationError("either concreteIndices or unresolved indices can be set concrete: [" + concreteIndex + "] and indices: " + indices , validationException); } return validationException; } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java index 967a90264693..65b9ff0dd22a 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.indices.mapping.put; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; public class PutMappingRequestTests extends ESTestCase { @@ -48,5 +49,10 @@ public class PutMappingRequestTests extends ESTestCase { r.source("somevalidmapping"); ex = r.validate(); assertNull("validation should succeed", ex); + + r.setConcreteIndex(new Index("foo", "bar")); + ex = r.validate(); + assertNotNull("source validation should fail", ex); + assertTrue(ex.getMessage().contains("either concreteIndices or unresolved indices can be set")); } } From 0ebcef0bb47154cf5582c9722490ea43d1d3345a Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 14 Mar 2016 14:51:43 +0100 Subject: [PATCH 224/320] wrap line after 140 chars --- .../action/admin/indices/mapping/put/PutMappingRequest.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java index 9e33103b371d..17fdd6e5f00d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -94,7 +94,8 @@ public class PutMappingRequest extends AcknowledgedRequest im validationException = addValidationError("mapping source is empty", validationException); } if (concreteIndex != null && (indices != null && indices.length > 0)) { - validationException = addValidationError("either concreteIndices or unresolved indices can be set concrete: [" + concreteIndex + "] and indices: " + indices , validationException); + validationException = addValidationError("either concreteIndices or unresolved indices can be set concrete: [" + concreteIndex + + "] and indices: " + indices , validationException); } return validationException; } From e4bed0c97e7e0acb293624f9601027f8be38c874 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 14 Mar 2016 14:57:43 +0100 Subject: [PATCH 225/320] Improve validation error message on PutMappingRequest --- .../action/admin/indices/mapping/put/PutMappingRequest.java | 5 +++-- .../admin/indices/mapping/put/PutMappingRequestTests.java | 4 +++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java index 17fdd6e5f00d..7b389dba25a9 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.Index; import java.io.IOException; +import java.util.Arrays; import java.util.Map; import java.util.Objects; @@ -94,8 +95,8 @@ public class PutMappingRequest extends AcknowledgedRequest im validationException = addValidationError("mapping source is empty", validationException); } if (concreteIndex != null && (indices != null && indices.length > 0)) { - validationException = addValidationError("either concreteIndices or unresolved indices can be set concrete: [" + concreteIndex - + "] and indices: " + indices , validationException); + validationException = addValidationError("either concrete index or unresolved indices can be set, concrete index: [" + + concreteIndex + "] and indices: " + Arrays.asList(indices) , validationException); } return validationException; } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java index 65b9ff0dd22a..04892b82339f 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java @@ -53,6 +53,8 @@ public class PutMappingRequestTests extends ESTestCase { r.setConcreteIndex(new Index("foo", "bar")); ex = r.validate(); assertNotNull("source validation should fail", ex); - assertTrue(ex.getMessage().contains("either concreteIndices or unresolved indices can be set")); + assertEquals(ex.getMessage(), + "Validation Failed: 1: either concrete index or unresolved indices can be set," + + " concrete index: [[foo/bar]] and indices: [myindex];"); } } From 17a420e6aa7745c93bab7b0e2d6c08b203527508 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 14 Mar 2016 15:21:39 +0100 Subject: [PATCH 226/320] Adressing review comments, adding parsing tests --- .../search/sort/ScriptSortBuilder.java | 28 +++- .../search/sort/RandomSortDataGenerator.java | 13 +- .../search/sort/ScriptSortBuilderTests.java | 139 +++++++++++++++++- .../search/sort/SortModeTest.java | 8 + 4 files changed, 172 insertions(+), 16 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index 6254d5b1e414..9b51eeca31dd 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.sort; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -59,8 +60,7 @@ public class ScriptSortBuilder extends SortBuilder implements private ScriptSortType type; - // TODO make this an enum - private String sortMode; + private SortMode sortMode; private QueryBuilder nestedFilter; @@ -109,7 +109,8 @@ public class ScriptSortBuilder extends SortBuilder implements * Defines which distance to use for sorting in the case a document contains multiple geo points. * Possible values: min and max */ - public ScriptSortBuilder sortMode(String sortMode) { + public ScriptSortBuilder sortMode(SortMode sortMode) { + Objects.requireNonNull(sortMode, "sort mode cannot be null."); this.sortMode = sortMode; return this; } @@ -117,7 +118,7 @@ public class ScriptSortBuilder extends SortBuilder implements /** * Get the sort mode. */ - public String sortMode() { + public SortMode sortMode() { return this.sortMode; } @@ -179,7 +180,7 @@ public class ScriptSortBuilder extends SortBuilder implements ParseFieldMatcher parseField = context.parseFieldMatcher(); Script script = null; ScriptSortType type = null; - String sortMode = null; + SortMode sortMode = null; SortOrder order = null; QueryBuilder nestedFilter = null; String nestedPath = null; @@ -197,6 +198,8 @@ public class ScriptSortBuilder extends SortBuilder implements params = parser.map(); } else if (parseField.match(currentName, NESTED_FILTER_FIELD)) { nestedFilter = context.parseInnerQueryBuilder(); + } else { + throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] failed to parse field [" + currentName + "]"); } } else if (token.isValue()) { if (parseField.match(currentName, ORDER_FIELD)) { @@ -206,10 +209,14 @@ public class ScriptSortBuilder extends SortBuilder implements } else if (parseField.match(currentName, TYPE_FIELD)) { type = ScriptSortType.fromString(parser.text()); } else if (parseField.match(currentName, SORTMODE_FIELD)) { - sortMode = parser.text(); + sortMode = SortMode.fromString(parser.text()); } else if (parseField.match(currentName, NESTED_PATH_FIELD)) { nestedPath = parser.text(); + } else { + throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] failed to parse field [" + currentName + "]"); } + } else { + throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] unexpected token [" + token + "]"); } } @@ -266,7 +273,10 @@ public class ScriptSortBuilder extends SortBuilder implements script.writeTo(out); type.writeTo(out); order.writeTo(out); - out.writeOptionalString(sortMode); + out.writeBoolean(sortMode != null); + if (sortMode != null) { + sortMode.writeTo(out); + } out.writeOptionalString(nestedPath); boolean hasNestedFilter = nestedFilter != null; out.writeBoolean(hasNestedFilter); @@ -279,7 +289,9 @@ public class ScriptSortBuilder extends SortBuilder implements public ScriptSortBuilder readFrom(StreamInput in) throws IOException { ScriptSortBuilder builder = new ScriptSortBuilder(Script.readScript(in), ScriptSortType.PROTOTYPE.readFrom(in)); builder.order(SortOrder.readOrderFrom(in)); - builder.sortMode = in.readOptionalString(); + if (in.readBoolean()) { + builder.sortMode(SortMode.PROTOTYPE.readFrom(in)); + } builder.nestedPath = in.readOptionalString(); if (in.readBoolean()) { builder.nestedFilter = in.readQuery(); diff --git a/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java b/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java index fcd5284119cd..405c1c43e775 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java +++ b/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java @@ -60,15 +60,14 @@ public class RandomSortDataGenerator { return nestedPath; } - public static String mode(String original) { - String[] modes = {"min", "max", "avg", "sum"}; - String mode = ESTestCase.randomFrom(modes); + public static SortMode mode(SortMode original) { + SortMode mode = ESTestCase.randomFrom(SortMode.values()); while (mode.equals(original)) { - mode = ESTestCase.randomFrom(modes); + mode = ESTestCase.randomFrom(SortMode.values()); } return mode; } - + public static Object missing(Object original) { Object missing = null; Object otherMissing = null; @@ -95,12 +94,12 @@ public class RandomSortDataGenerator { break; default: throw new IllegalStateException("Unknown missing type."); - + } } return missing; } - + public static SortOrder order(SortOrder original) { SortOrder order = SortOrder.ASC; if (order.equals(original)) { diff --git a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index 40798cbcb882..091a6c3002a2 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -20,8 +20,17 @@ package org.elasticsearch.search.sort; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; +import org.junit.Rule; +import org.junit.rules.ExpectedException; import java.io.IOException; @@ -59,7 +68,9 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase Date: Mon, 14 Mar 2016 21:16:44 +0100 Subject: [PATCH 227/320] Move shared directory under build/cluster/shared to prevent granting r/w permission to the build/cluster directory --- .../gradle/test/ClusterFormationTasks.groovy | 17 ++++++++++------- .../elasticsearch/gradle/test/NodeInfo.groovy | 6 +++++- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 97073c67cfe6..a82fefdc510a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -53,12 +53,15 @@ class ClusterFormationTasks { // no need to add cluster formation tasks if the task won't run! return } - // first we remove everything in the cluster directory to ensure there are no leftovers in repos or anything - // this also forces unpacking of nodes and wipes logfiles etc. to prevent leftovers along those lines + File sharedDir = new File(project.buildDir, "cluster/shared") + // first we remove everything in the shared cluster directory to ensure there are no leftovers in repos or anything // in theory this should not be necessary but repositories are only deleted in the cluster-state and not on-disk // such that snapshots survive failures / test runs and there is no simple way today to fix that. - Task cleanup = project.tasks.create(name: "${task.name}#prepareCluster.clean", type: Delete, dependsOn: task.dependsOn.collect()) { - delete new File(project.buildDir, "cluster"); + Task cleanup = project.tasks.create(name: "${task.name}#prepareCluster.cleanShared", type: Delete, dependsOn: task.dependsOn.collect()) { + delete sharedDir + doLast { + sharedDir.mkdirs() + } } List startTasks = [cleanup] List nodes = [] @@ -93,7 +96,7 @@ class ClusterFormationTasks { elasticsearchVersion = config.bwcVersion configuration = project.configurations.elasticsearchBwcDistro } - NodeInfo node = new NodeInfo(config, i, project, task, elasticsearchVersion) + NodeInfo node = new NodeInfo(config, i, project, task, elasticsearchVersion, sharedDir) if (i == 0) { if (config.seedNodePortsFile != null) { // we might allow this in the future to be set but for now we are the only authority to set this! @@ -252,8 +255,8 @@ class ClusterFormationTasks { Map esConfig = [ 'cluster.name' : node.clusterName, 'pidfile' : node.pidFile, - 'path.repo' : "${node.baseDir}/../repo", - 'path.shared_data' : "${node.baseDir}/../", + 'path.repo' : "${node.sharedDir}/repo", + 'path.shared_data' : "${node.sharedDir}/", // Define a node attribute so we can test that it exists 'node.testattr' : 'test', 'repositories.url.allowed_urls': 'http://snapshot.test*' diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index 5dcdcbed5f81..6f45cddf77ee 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -40,6 +40,9 @@ class NodeInfo { /** root directory all node files and operations happen under */ File baseDir + /** shared data directory all nodes share */ + File sharedDir + /** the pid file the node will use */ File pidFile @@ -89,9 +92,10 @@ class NodeInfo { ByteArrayOutputStream buffer = new ByteArrayOutputStream() /** Creates a node to run as part of a cluster for the given task */ - NodeInfo(ClusterConfiguration config, int nodeNum, Project project, Task task, String nodeVersion) { + NodeInfo(ClusterConfiguration config, int nodeNum, Project project, Task task, String nodeVersion, File sharedDir) { this.config = config this.nodeNum = nodeNum + this.sharedDir = sharedDir clusterName = "${task.path.replace(':', '_').substring(1)}" baseDir = new File(project.buildDir, "cluster/${task.name} node${nodeNum}") pidFile = new File(baseDir, 'es.pid') From dfec4547eaf01e41d39929996750abaeb2a6a59d Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Mon, 14 Mar 2016 13:19:52 -0700 Subject: [PATCH 228/320] Added one minor comment for expressions tests. --- .../elasticsearch/script/expression/MoreExpressionTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java index 1260919bfab6..a8856ea78b5f 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java @@ -385,7 +385,7 @@ public class MoreExpressionTests extends ESIntegTestCase { AggregationBuilders.stats("double_agg").field("y") .script(new Script("_value - 1.1", ScriptType.INLINE, ExpressionScriptEngineService.NAME, null))) .addAggregation( - AggregationBuilders.stats("const_agg").field("x") + AggregationBuilders.stats("const_agg").field("x") // specifically to test a script w/o _value .script(new Script("3.0", ScriptType.INLINE, ExpressionScriptEngineService.NAME, null)) ); From 6f28c173e2230738ad8279288910de6c9899d320 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 14 Mar 2016 21:42:37 +0100 Subject: [PATCH 229/320] [TEST] Test that all processors are available --- .../test/ingest_attachment/10_basic.yaml | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml index 67bb7340ce39..7c789b9c2ca0 100644 --- a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml +++ b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml @@ -8,4 +8,19 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: ingest-attachment } - - match: { nodes.$master.ingest.processors.11.type: attachment } + - match: { nodes.$master.ingest.processors.0.type: append } + - match: { nodes.$master.ingest.processors.1.type: attachment } + - match: { nodes.$master.ingest.processors.2.type: convert } + - match: { nodes.$master.ingest.processors.3.type: date } + - match: { nodes.$master.ingest.processors.4.type: fail } + - match: { nodes.$master.ingest.processors.5.type: foreach } + - match: { nodes.$master.ingest.processors.6.type: gsub } + - match: { nodes.$master.ingest.processors.7.type: join } + - match: { nodes.$master.ingest.processors.8.type: lowercase } + - match: { nodes.$master.ingest.processors.9.type: remove } + - match: { nodes.$master.ingest.processors.10.type: rename } + - match: { nodes.$master.ingest.processors.11.type: set } + - match: { nodes.$master.ingest.processors.12.type: split } + - match: { nodes.$master.ingest.processors.13.type: trim } + - match: { nodes.$master.ingest.processors.14.type: uppercase } + From 554bf2c282a71ab25f8958fcad5993c2b616ca20 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 14 Mar 2016 22:35:25 +0100 Subject: [PATCH 230/320] [TEST] Test that all processors are available --- .../test/ingest_geoip/10_basic.yaml | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml index b924484aa7dd..cf86f4c7f4c3 100644 --- a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml +++ b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml @@ -8,4 +8,18 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: ingest-geoip } - - match: { nodes.$master.ingest.processors.3.type: geoip } + - match: { nodes.$master.ingest.processors.0.type: append } + - match: { nodes.$master.ingest.processors.1.type: convert } + - match: { nodes.$master.ingest.processors.2.type: date } + - match: { nodes.$master.ingest.processors.3.type: fail } + - match: { nodes.$master.ingest.processors.4.type: foreach } + - match: { nodes.$master.ingest.processors.5.type: geoip } + - match: { nodes.$master.ingest.processors.6.type: gsub } + - match: { nodes.$master.ingest.processors.7.type: join } + - match: { nodes.$master.ingest.processors.8.type: lowercase } + - match: { nodes.$master.ingest.processors.9.type: remove } + - match: { nodes.$master.ingest.processors.10.type: rename } + - match: { nodes.$master.ingest.processors.11.type: set } + - match: { nodes.$master.ingest.processors.12.type: split } + - match: { nodes.$master.ingest.processors.13.type: trim } + - match: { nodes.$master.ingest.processors.14.type: uppercase } From 35f7cfb6c0fb333b1eafcea65a4ee30f5e94f007 Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Mon, 14 Mar 2016 23:13:06 -0400 Subject: [PATCH 231/320] Add upgrader to upgrade old indices to new naming convention --- .../common/util/IndexFolderUpgrader.java | 154 ++++++++ .../common/util/IndexFolderUpgraderTests.java | 366 ++++++++++++++++++ docs/reference/migration/migrate_5_0.asciidoc | 11 + 3 files changed, 531 insertions(+) create mode 100644 core/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java create mode 100644 core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java diff --git a/core/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java b/core/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java new file mode 100644 index 000000000000..54dac7447ebb --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java @@ -0,0 +1,154 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.util; + +import org.apache.lucene.util.IOUtils; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.gateway.MetaDataStateFormat; +import org.elasticsearch.gateway.MetaStateService; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexSettings; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.NoSuchFileException; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; + +/** + * Renames index folders from {index.name} to {index.uuid} + */ +public class IndexFolderUpgrader { + private final NodeEnvironment nodeEnv; + private final Settings settings; + private final ESLogger logger = Loggers.getLogger(IndexFolderUpgrader.class); + private final MetaDataStateFormat indexStateFormat = readOnlyIndexMetaDataStateFormat(); + + /** + * Creates a new upgrader instance + * @param settings node settings + * @param nodeEnv the node env to operate on + */ + IndexFolderUpgrader(Settings settings, NodeEnvironment nodeEnv) { + this.settings = settings; + this.nodeEnv = nodeEnv; + } + + /** + * Moves the index folder found in source to target + */ + void upgrade(final Index index, final Path source, final Path target) throws IOException { + boolean success = false; + try { + Files.move(source, target, StandardCopyOption.ATOMIC_MOVE); + success = true; + } catch (NoSuchFileException | FileNotFoundException exception) { + // thrown when the source is non-existent because the folder was renamed + // by another node (shared FS) after we checked if the target exists + logger.error("multiple nodes trying to upgrade [{}] in parallel, retry upgrading with single node", + exception, target); + throw exception; + } finally { + if (success) { + logger.info("{} moved from [{}] to [{}]", index, source, target); + logger.trace("{} syncing directory [{}]", index, target); + IOUtils.fsync(target, true); + } + } + } + + /** + * Renames indexFolderName index folders found in node paths and custom path + * iff {@link #needsUpgrade(Index, String)} is true. + * Index folder in custom paths are renamed first followed by index folders in each node path. + */ + void upgrade(final String indexFolderName) throws IOException { + for (NodeEnvironment.NodePath nodePath : nodeEnv.nodePaths()) { + final Path indexFolderPath = nodePath.indicesPath.resolve(indexFolderName); + final IndexMetaData indexMetaData = indexStateFormat.loadLatestState(logger, indexFolderPath); + if (indexMetaData != null) { + final Index index = indexMetaData.getIndex(); + if (needsUpgrade(index, indexFolderName)) { + logger.info("{} upgrading [{}] to new naming convention", index, indexFolderPath); + final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); + if (indexSettings.hasCustomDataPath()) { + // we rename index folder in custom path before renaming them in any node path + // to have the index state under a not-yet-upgraded index folder, which we use to + // continue renaming after a incomplete upgrade. + final Path customLocationSource = nodeEnv.resolveBaseCustomLocation(indexSettings) + .resolve(indexFolderName); + final Path customLocationTarget = customLocationSource.resolveSibling(index.getUUID()); + // we rename the folder in custom path only the first time we encounter a state + // in a node path, which needs upgrading, it is a no-op for subsequent node paths + if (Files.exists(customLocationSource) // might not exist if no data was written for this index + && Files.exists(customLocationTarget) == false) { + upgrade(index, customLocationSource, customLocationTarget); + } else { + logger.info("[{}] no upgrade needed - already upgraded", customLocationTarget); + } + } + upgrade(index, indexFolderPath, indexFolderPath.resolveSibling(index.getUUID())); + } else { + logger.debug("[{}] no upgrade needed - already upgraded", indexFolderPath); + } + } else { + logger.warn("[{}] no index state found - ignoring", indexFolderPath); + } + } + } + + /** + * Upgrades all indices found under nodeEnv. Already upgraded indices are ignored. + */ + public static void upgradeIndicesIfNeeded(final Settings settings, final NodeEnvironment nodeEnv) throws IOException { + final IndexFolderUpgrader upgrader = new IndexFolderUpgrader(settings, nodeEnv); + for (String indexFolderName : nodeEnv.availableIndexFolders()) { + upgrader.upgrade(indexFolderName); + } + } + + static boolean needsUpgrade(Index index, String indexFolderName) { + return indexFolderName.equals(index.getUUID()) == false; + } + + static MetaDataStateFormat readOnlyIndexMetaDataStateFormat() { + // NOTE: XContentType param is not used as we use the format read from the serialized index state + return new MetaDataStateFormat(XContentType.SMILE, MetaStateService.INDEX_STATE_FILE_PREFIX) { + + @Override + public void toXContent(XContentBuilder builder, IndexMetaData state) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public IndexMetaData fromXContent(XContentParser parser) throws IOException { + return IndexMetaData.Builder.fromXContent(parser); + } + }; + } +} diff --git a/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java b/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java new file mode 100644 index 000000000000..01c6ec89c7c7 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java @@ -0,0 +1,366 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.util; + +import org.apache.lucene.util.CollectionUtil; +import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.util.TestUtil; +import org.elasticsearch.Version; +import org.elasticsearch.bwcompat.OldIndexBackwardsCompatibilityIT; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.routing.AllocationId; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.io.FileSystemUtils; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.gateway.MetaDataStateFormat; +import org.elasticsearch.gateway.MetaStateService; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardPath; +import org.elasticsearch.index.shard.ShardStateMetaData; +import org.elasticsearch.test.ESTestCase; + +import java.io.BufferedWriter; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.nio.file.DirectoryStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.core.Is.is; + +@LuceneTestCase.SuppressFileSystems("ExtrasFS") +public class IndexFolderUpgraderTests extends ESTestCase { + + private static MetaDataStateFormat indexMetaDataStateFormat = + new MetaDataStateFormat(XContentType.SMILE, MetaStateService.INDEX_STATE_FILE_PREFIX) { + + @Override + public void toXContent(XContentBuilder builder, IndexMetaData state) throws IOException { + IndexMetaData.Builder.toXContent(state, builder, ToXContent.EMPTY_PARAMS); + } + + @Override + public IndexMetaData fromXContent(XContentParser parser) throws IOException { + return IndexMetaData.Builder.fromXContent(parser); + } + }; + + /** + * tests custom data paths are upgraded + */ + public void testUpgradeCustomDataPath() throws IOException { + Path customPath = createTempDir(); + final Settings nodeSettings = Settings.builder() + .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()) + .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), customPath.toAbsolutePath().toString()).build(); + try (NodeEnvironment nodeEnv = newNodeEnvironment(nodeSettings)) { + final Index index = new Index(randomAsciiOfLength(10), Strings.randomBase64UUID()); + Settings settings = Settings.builder() + .put(nodeSettings) + .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0) + .put(IndexMetaData.SETTING_DATA_PATH, customPath.toAbsolutePath().toString()) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .build(); + IndexMetaData indexState = IndexMetaData.builder(index.getName()).settings(settings).build(); + int numIdxFiles = randomIntBetween(1, 5); + int numTranslogFiles = randomIntBetween(1, 5); + IndexSettings indexSettings = new IndexSettings(indexState, nodeSettings); + writeIndex(nodeEnv, indexSettings, numIdxFiles, numTranslogFiles); + IndexFolderUpgrader helper = new IndexFolderUpgrader(settings, nodeEnv); + helper.upgrade(indexSettings.getIndex().getName()); + checkIndex(nodeEnv, indexSettings, numIdxFiles, numTranslogFiles); + } + } + + /** + * tests upgrade on partially upgraded index, when we crash while upgrading + */ + public void testPartialUpgradeCustomDataPath() throws IOException { + Path customPath = createTempDir(); + final Settings nodeSettings = Settings.builder() + .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()) + .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), customPath.toAbsolutePath().toString()).build(); + try (NodeEnvironment nodeEnv = newNodeEnvironment(nodeSettings)) { + final Index index = new Index(randomAsciiOfLength(10), Strings.randomBase64UUID()); + Settings settings = Settings.builder() + .put(nodeSettings) + .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0) + .put(IndexMetaData.SETTING_DATA_PATH, customPath.toAbsolutePath().toString()) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .build(); + IndexMetaData indexState = IndexMetaData.builder(index.getName()).settings(settings).build(); + int numIdxFiles = randomIntBetween(1, 5); + int numTranslogFiles = randomIntBetween(1, 5); + IndexSettings indexSettings = new IndexSettings(indexState, nodeSettings); + writeIndex(nodeEnv, indexSettings, numIdxFiles, numTranslogFiles); + IndexFolderUpgrader helper = new IndexFolderUpgrader(settings, nodeEnv) { + @Override + void upgrade(Index index, Path source, Path target) throws IOException { + if(randomBoolean()) { + throw new FileNotFoundException("simulated"); + } + } + }; + // only upgrade some paths + try { + helper.upgrade(index.getName()); + } catch (IOException e) { + assertTrue(e instanceof FileNotFoundException); + } + helper = new IndexFolderUpgrader(settings, nodeEnv); + // try to upgrade again + helper.upgrade(indexSettings.getIndex().getName()); + checkIndex(nodeEnv, indexSettings, numIdxFiles, numTranslogFiles); + } + } + + public void testUpgrade() throws IOException { + final Settings nodeSettings = Settings.builder() + .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()).build(); + try (NodeEnvironment nodeEnv = newNodeEnvironment(nodeSettings)) { + final Index index = new Index(randomAsciiOfLength(10), Strings.randomBase64UUID()); + Settings settings = Settings.builder() + .put(nodeSettings) + .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .build(); + IndexMetaData indexState = IndexMetaData.builder(index.getName()).settings(settings).build(); + int numIdxFiles = randomIntBetween(1, 5); + int numTranslogFiles = randomIntBetween(1, 5); + IndexSettings indexSettings = new IndexSettings(indexState, nodeSettings); + writeIndex(nodeEnv, indexSettings, numIdxFiles, numTranslogFiles); + IndexFolderUpgrader helper = new IndexFolderUpgrader(settings, nodeEnv); + helper.upgrade(indexSettings.getIndex().getName()); + checkIndex(nodeEnv, indexSettings, numIdxFiles, numTranslogFiles); + } + } + + public void testUpgradeIndices() throws IOException { + final Settings nodeSettings = Settings.builder() + .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()).build(); + try (NodeEnvironment nodeEnv = newNodeEnvironment(nodeSettings)) { + Map> indexSettingsMap = new HashMap<>(); + for (int i = 0; i < randomIntBetween(2, 5); i++) { + final Index index = new Index(randomAsciiOfLength(10), Strings.randomBase64UUID()); + Settings settings = Settings.builder() + .put(nodeSettings) + .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .build(); + IndexMetaData indexState = IndexMetaData.builder(index.getName()).settings(settings).build(); + Tuple fileCounts = new Tuple<>(randomIntBetween(1, 5), randomIntBetween(1, 5)); + IndexSettings indexSettings = new IndexSettings(indexState, nodeSettings); + indexSettingsMap.put(indexSettings, fileCounts); + writeIndex(nodeEnv, indexSettings, fileCounts.v1(), fileCounts.v2()); + } + IndexFolderUpgrader.upgradeIndicesIfNeeded(nodeSettings, nodeEnv); + for (Map.Entry> entry : indexSettingsMap.entrySet()) { + checkIndex(nodeEnv, entry.getKey(), entry.getValue().v1(), entry.getValue().v2()); + } + } + } + + /** + * Run upgrade on a real bwc index + */ + public void testUpgradeRealIndex() throws IOException, URISyntaxException { + List indexes = new ArrayList<>(); + try (DirectoryStream stream = Files.newDirectoryStream(getBwcIndicesPath(), "index-*.zip")) { + for (Path path : stream) { + indexes.add(path); + } + } + CollectionUtil.introSort(indexes, (o1, o2) -> o1.getFileName().compareTo(o2.getFileName())); + final Path path = randomFrom(indexes); + final String indexName = path.getFileName().toString().replace(".zip", "").toLowerCase(Locale.ROOT); + try (NodeEnvironment nodeEnvironment = newNodeEnvironment()) { + Path unzipDir = createTempDir(); + Path unzipDataDir = unzipDir.resolve("data"); + // decompress the index + try (InputStream stream = Files.newInputStream(path)) { + TestUtil.unzip(stream, unzipDir); + } + // check it is unique + assertTrue(Files.exists(unzipDataDir)); + Path[] list = FileSystemUtils.files(unzipDataDir); + if (list.length != 1) { + throw new IllegalStateException("Backwards index must contain exactly one cluster but was " + list.length); + } + // the bwc scripts packs the indices under this path + Path src = list[0].resolve("nodes/0/indices/" + indexName); + assertTrue("[" + path + "] missing index dir: " + src.toString(), Files.exists(src)); + final Path indicesPath = randomFrom(nodeEnvironment.nodePaths()).indicesPath; + logger.info("--> injecting index [{}] into [{}]", indexName, indicesPath); + OldIndexBackwardsCompatibilityIT.copyIndex(logger, src, indexName, indicesPath); + IndexFolderUpgrader.upgradeIndicesIfNeeded(Settings.EMPTY, nodeEnvironment); + + // ensure old index folder is deleted + Set indexFolders = nodeEnvironment.availableIndexFolders(); + assertEquals(indexFolders.size(), 1); + + // ensure index metadata is moved + IndexMetaData indexMetaData = indexMetaDataStateFormat.loadLatestState(logger, + nodeEnvironment.resolveIndexFolder(indexFolders.iterator().next())); + assertNotNull(indexMetaData); + Index index = indexMetaData.getIndex(); + assertEquals(index.getName(), indexName); + + Set shardIds = nodeEnvironment.findAllShardIds(index); + // ensure all shards are moved + assertEquals(shardIds.size(), indexMetaData.getNumberOfShards()); + for (ShardId shardId : shardIds) { + final ShardPath shardPath = ShardPath.loadShardPath(logger, nodeEnvironment, shardId, + new IndexSettings(indexMetaData, Settings.EMPTY)); + final Path translog = shardPath.resolveTranslog(); + final Path idx = shardPath.resolveIndex(); + final Path state = shardPath.getShardStatePath().resolve(MetaDataStateFormat.STATE_DIR_NAME); + assertTrue(shardPath.exists()); + assertTrue(Files.exists(translog)); + assertTrue(Files.exists(idx)); + assertTrue(Files.exists(state)); + } + } + } + + public void testNeedsUpgrade() throws IOException { + final Index index = new Index("foo", Strings.randomBase64UUID()); + IndexMetaData indexState = IndexMetaData.builder(index.getName()) + .settings(Settings.builder() + .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + try (NodeEnvironment nodeEnvironment = newNodeEnvironment()) { + indexMetaDataStateFormat.write(indexState, 1, nodeEnvironment.indexPaths(index)); + assertFalse(IndexFolderUpgrader.needsUpgrade(index, index.getUUID())); + } + } + + private void checkIndex(NodeEnvironment nodeEnv, IndexSettings indexSettings, + int numIdxFiles, int numTranslogFiles) throws IOException { + final Index index = indexSettings.getIndex(); + // ensure index state can be loaded + IndexMetaData loadLatestState = indexMetaDataStateFormat.loadLatestState(logger, nodeEnv.indexPaths(index)); + assertNotNull(loadLatestState); + assertEquals(loadLatestState.getIndex(), index); + for (int shardId = 0; shardId < indexSettings.getNumberOfShards(); shardId++) { + // ensure shard path can be loaded + ShardPath targetShardPath = ShardPath.loadShardPath(logger, nodeEnv, new ShardId(index, shardId), indexSettings); + assertNotNull(targetShardPath); + // ensure shard contents are copied over + final Path translog = targetShardPath.resolveTranslog(); + final Path idx = targetShardPath.resolveIndex(); + + // ensure index and translog files are copied over + assertEquals(numTranslogFiles, FileSystemUtils.files(translog).length); + assertEquals(numIdxFiles, FileSystemUtils.files(idx).length); + Path[] files = FileSystemUtils.files(translog); + final HashSet translogFiles = new HashSet<>(Arrays.asList(files)); + for (int i = 0; i < numTranslogFiles; i++) { + final String name = Integer.toString(i); + translogFiles.contains(translog.resolve(name + ".translog")); + byte[] content = Files.readAllBytes(translog.resolve(name + ".translog")); + assertEquals(name , new String(content, StandardCharsets.UTF_8)); + } + Path[] indexFileList = FileSystemUtils.files(idx); + final HashSet idxFiles = new HashSet<>(Arrays.asList(indexFileList)); + for (int i = 0; i < numIdxFiles; i++) { + final String name = Integer.toString(i); + idxFiles.contains(idx.resolve(name + ".tst")); + byte[] content = Files.readAllBytes(idx.resolve(name + ".tst")); + assertEquals(name, new String(content, StandardCharsets.UTF_8)); + } + } + } + + private void writeIndex(NodeEnvironment nodeEnv, IndexSettings indexSettings, + int numIdxFiles, int numTranslogFiles) throws IOException { + NodeEnvironment.NodePath[] nodePaths = nodeEnv.nodePaths(); + Path[] oldIndexPaths = new Path[nodePaths.length]; + for (int i = 0; i < nodePaths.length; i++) { + oldIndexPaths[i] = nodePaths[i].indicesPath.resolve(indexSettings.getIndex().getName()); + } + indexMetaDataStateFormat.write(indexSettings.getIndexMetaData(), 1, oldIndexPaths); + for (int id = 0; id < indexSettings.getNumberOfShards(); id++) { + Path oldIndexPath = randomFrom(oldIndexPaths); + ShardId shardId = new ShardId(indexSettings.getIndex(), id); + if (indexSettings.hasCustomDataPath()) { + Path customIndexPath = nodeEnv.resolveBaseCustomLocation(indexSettings).resolve(indexSettings.getIndex().getName()); + writeShard(shardId, customIndexPath, numIdxFiles, numTranslogFiles); + } else { + writeShard(shardId, oldIndexPath, numIdxFiles, numTranslogFiles); + } + ShardStateMetaData state = new ShardStateMetaData(true, indexSettings.getUUID(), AllocationId.newInitializing()); + ShardStateMetaData.FORMAT.write(state, 1, oldIndexPath.resolve(String.valueOf(shardId.getId()))); + } + } + + private void writeShard(ShardId shardId, Path indexLocation, + final int numIdxFiles, final int numTranslogFiles) throws IOException { + Path oldShardDataPath = indexLocation.resolve(String.valueOf(shardId.getId())); + final Path translogPath = oldShardDataPath.resolve(ShardPath.TRANSLOG_FOLDER_NAME); + final Path idxPath = oldShardDataPath.resolve(ShardPath.INDEX_FOLDER_NAME); + Files.createDirectories(translogPath); + Files.createDirectories(idxPath); + for (int i = 0; i < numIdxFiles; i++) { + String filename = Integer.toString(i); + try (BufferedWriter w = Files.newBufferedWriter(idxPath.resolve(filename + ".tst"), + StandardCharsets.UTF_8)) { + w.write(filename); + } + } + for (int i = 0; i < numTranslogFiles; i++) { + String filename = Integer.toString(i); + try (BufferedWriter w = Files.newBufferedWriter(translogPath.resolve(filename + ".translog"), + StandardCharsets.UTF_8)) { + w.write(filename); + } + } + } +} diff --git a/docs/reference/migration/migrate_5_0.asciidoc b/docs/reference/migration/migrate_5_0.asciidoc index 23cadbbd9edc..f4aaacee35e0 100644 --- a/docs/reference/migration/migrate_5_0.asciidoc +++ b/docs/reference/migration/migrate_5_0.asciidoc @@ -4,6 +4,17 @@ This section discusses the changes that you need to be aware of when migrating your application to Elasticsearch 5.0. +[float] +=== Indices created before 5.0 + +Elasticsearch 5.0 can read indices created in version 2.0 and above. If any +of your indices were created before 2.0 you will need to upgrade to the +latest 2.x version of Elasticsearch first, in order to upgrade your indices or +to delete the old indices. Elasticsearch will not start in the presence of old +indices. To upgrade 2.x indices, first start a node which have access to all +the data folders and let it upgrade all the indices before starting up rest of +the cluster. + [IMPORTANT] .Reindex indices from Elasticseach 1.x or before ========================================= From 3daa83b2d2cd0d374b36d9f6c76f498abff9c92c Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Mon, 14 Mar 2016 23:18:17 -0400 Subject: [PATCH 232/320] remove redundant getters in MetaData --- .../cluster/health/ClusterStateHealth.java | 2 +- .../metadata/IndexNameExpressionResolver.java | 10 +++++----- .../cluster/metadata/MetaData.java | 20 ++++--------------- 3 files changed, 10 insertions(+), 22 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java b/core/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java index d66a2437ef2f..42ab496fe336 100644 --- a/core/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java +++ b/core/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java @@ -79,7 +79,7 @@ public final class ClusterStateHealth implements Iterable, S * @param clusterState The current cluster state. Must not be null. */ public ClusterStateHealth(ClusterState clusterState) { - this(clusterState, clusterState.metaData().concreteAllIndices()); + this(clusterState, clusterState.metaData().getConcreteAllIndices()); } /** diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 49c057f104db..2abbea04d51d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -432,7 +432,7 @@ public class IndexNameExpressionResolver extends AbstractComponent { if (routing != null) { Set r = Strings.splitStringByCommaToSet(routing); Map> routings = new HashMap<>(); - String[] concreteIndices = metaData.concreteAllIndices(); + String[] concreteIndices = metaData.getConcreteAllIndices(); for (String index : concreteIndices) { routings.put(index, r); } @@ -472,7 +472,7 @@ public class IndexNameExpressionResolver extends AbstractComponent { */ boolean isPatternMatchingAllIndices(MetaData metaData, String[] indicesOrAliases, String[] concreteIndices) { // if we end up matching on all indices, check, if its a wildcard parameter, or a "-something" structure - if (concreteIndices.length == metaData.concreteAllIndices().length && indicesOrAliases.length > 0) { + if (concreteIndices.length == metaData.getConcreteAllIndices().length && indicesOrAliases.length > 0) { //we might have something like /-test1,+test1 that would identify all indices //or something like /-test1 with test1 index missing and IndicesOptions.lenient() @@ -728,11 +728,11 @@ public class IndexNameExpressionResolver extends AbstractComponent { private List resolveEmptyOrTrivialWildcard(IndicesOptions options, MetaData metaData, boolean assertEmpty) { if (options.expandWildcardsOpen() && options.expandWildcardsClosed()) { - return Arrays.asList(metaData.concreteAllIndices()); + return Arrays.asList(metaData.getConcreteAllIndices()); } else if (options.expandWildcardsOpen()) { - return Arrays.asList(metaData.concreteAllOpenIndices()); + return Arrays.asList(metaData.getConcreteAllOpenIndices()); } else if (options.expandWildcardsClosed()) { - return Arrays.asList(metaData.concreteAllClosedIndices()); + return Arrays.asList(metaData.getConcreteAllClosedIndices()); } else { assert assertEmpty : "Shouldn't end up here"; return Collections.emptyList(); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index f802637f22fa..0beae6a77e8c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -370,26 +370,14 @@ public class MetaData implements Iterable, Diffable, Fr /** * Returns all the concrete indices. */ - public String[] concreteAllIndices() { - return allIndices; - } - public String[] getConcreteAllIndices() { - return concreteAllIndices(); - } - - public String[] concreteAllOpenIndices() { - return allOpenIndices; + return allIndices; } public String[] getConcreteAllOpenIndices() { return allOpenIndices; } - public String[] concreteAllClosedIndices() { - return allClosedIndices; - } - public String[] getConcreteAllClosedIndices() { return allClosedIndices; } @@ -795,9 +783,9 @@ public class MetaData implements Iterable, Diffable, Fr metaData.getIndices(), metaData.getTemplates(), metaData.getCustoms(), - metaData.concreteAllIndices(), - metaData.concreteAllOpenIndices(), - metaData.concreteAllClosedIndices(), + metaData.getConcreteAllIndices(), + metaData.getConcreteAllOpenIndices(), + metaData.getConcreteAllClosedIndices(), metaData.getAliasAndIndexLookup()); } else { // No changes: From 2b18a3ce1dcc53313fc447344c9c6642fc46aee9 Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Mon, 14 Mar 2016 23:20:44 -0400 Subject: [PATCH 233/320] use index uuid as folder name to decouple index folder name from index name --- .../elasticsearch/env/NodeEnvironment.java | 173 ++++++++---------- .../gateway/DanglingIndicesState.java | 70 +++---- .../gateway/GatewayMetaState.java | 10 +- .../gateway/MetaStateService.java | 65 +++++-- .../elasticsearch/index/shard/ShardPath.java | 53 +----- .../elasticsearch/indices/IndicesService.java | 2 +- .../env/NodeEnvironmentTests.java | 107 +++++++---- .../gateway/DanglingIndicesStateTests.java | 77 +++++--- 8 files changed, 297 insertions(+), 260 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 0b1e3ebf9509..c6eec09b1c89 100644 --- a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -70,7 +70,6 @@ import java.util.Set; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.stream.Collectors; import static java.util.Collections.unmodifiableSet; @@ -89,7 +88,7 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl * not running on Linux, or we hit an exception trying), True means the device possibly spins and False means it does not. */ public final Boolean spins; - public NodePath(Path path, Environment environment) throws IOException { + public NodePath(Path path) throws IOException { this.path = path; this.indicesPath = path.resolve(INDICES_FOLDER); this.fileStore = Environment.getFileStore(path); @@ -102,16 +101,18 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl /** * Resolves the given shards directory against this NodePath + * ${data.paths}/nodes/{node.id}/indices/{index.uuid}/{shard.id} */ public Path resolve(ShardId shardId) { return resolve(shardId.getIndex()).resolve(Integer.toString(shardId.id())); } /** - * Resolves the given indexes directory against this NodePath + * Resolves index directory against this NodePath + * ${data.paths}/nodes/{node.id}/indices/{index.uuid} */ public Path resolve(Index index) { - return indicesPath.resolve(index.getName()); + return indicesPath.resolve(index.getUUID()); } @Override @@ -131,7 +132,7 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl private final int localNodeId; private final AtomicBoolean closed = new AtomicBoolean(false); - private final Map shardLocks = new HashMap<>(); + private final Map shardLocks = new HashMap<>(); /** * Maximum number of data nodes that should run in an environment. @@ -186,7 +187,7 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl logger.trace("obtaining node lock on {} ...", dir.toAbsolutePath()); try { locks[dirIndex] = luceneDir.obtainLock(NODE_LOCK_FILENAME); - nodePaths[dirIndex] = new NodePath(dir, environment); + nodePaths[dirIndex] = new NodePath(dir); localNodeId = possibleLockId; } catch (LockObtainFailedException ex) { logger.trace("failed to obtain node lock on {}", dir.toAbsolutePath()); @@ -445,11 +446,11 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl * @param indexSettings settings for the index being deleted */ public void deleteIndexDirectoryUnderLock(Index index, IndexSettings indexSettings) throws IOException { - final Path[] indexPaths = indexPaths(index.getName()); + final Path[] indexPaths = indexPaths(index); logger.trace("deleting index {} directory, paths({}): [{}]", index, indexPaths.length, indexPaths); IOUtils.rm(indexPaths); if (indexSettings.hasCustomDataPath()) { - Path customLocation = resolveCustomLocation(indexSettings, index.getName()); + Path customLocation = resolveIndexCustomLocation(indexSettings); logger.trace("deleting custom index {} directory [{}]", index, customLocation); IOUtils.rm(customLocation); } @@ -517,17 +518,16 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl */ public ShardLock shardLock(final ShardId shardId, long lockTimeoutMS) throws IOException { logger.trace("acquiring node shardlock on [{}], timeout [{}]", shardId, lockTimeoutMS); - final ShardLockKey shardLockKey = new ShardLockKey(shardId); final InternalShardLock shardLock; final boolean acquired; synchronized (shardLocks) { - if (shardLocks.containsKey(shardLockKey)) { - shardLock = shardLocks.get(shardLockKey); + if (shardLocks.containsKey(shardId)) { + shardLock = shardLocks.get(shardId); shardLock.incWaitCount(); acquired = false; } else { - shardLock = new InternalShardLock(shardLockKey); - shardLocks.put(shardLockKey, shardLock); + shardLock = new InternalShardLock(shardId); + shardLocks.put(shardId, shardLock); acquired = true; } } @@ -547,7 +547,7 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl @Override protected void closeInternal() { shardLock.release(); - logger.trace("released shard lock for [{}]", shardLockKey); + logger.trace("released shard lock for [{}]", shardId); } }; } @@ -559,51 +559,7 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl */ public Set lockedShards() { synchronized (shardLocks) { - Set lockedShards = shardLocks.keySet().stream() - .map(shardLockKey -> new ShardId(new Index(shardLockKey.indexName, "_na_"), shardLockKey.shardId)).collect(Collectors.toSet()); - return unmodifiableSet(lockedShards); - } - } - - // a key for the shard lock. we can't use shardIds, because the contain - // the index uuid, but we want the lock semantics to the same as we map indices to disk folders, i.e., without the uuid (for now). - private final class ShardLockKey { - final String indexName; - final int shardId; - - public ShardLockKey(final ShardId shardId) { - this.indexName = shardId.getIndexName(); - this.shardId = shardId.id(); - } - - @Override - public String toString() { - return "[" + indexName + "][" + shardId + "]"; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - ShardLockKey that = (ShardLockKey) o; - - if (shardId != that.shardId) { - return false; - } - return indexName.equals(that.indexName); - - } - - @Override - public int hashCode() { - int result = indexName.hashCode(); - result = 31 * result + shardId; - return result; + return unmodifiableSet(new HashSet<>(shardLocks.keySet())); } } @@ -616,10 +572,10 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl */ private final Semaphore mutex = new Semaphore(1); private int waitCount = 1; // guarded by shardLocks - private final ShardLockKey lockKey; + private final ShardId shardId; - InternalShardLock(ShardLockKey id) { - lockKey = id; + InternalShardLock(ShardId shardId) { + this.shardId = shardId; mutex.acquireUninterruptibly(); } @@ -639,10 +595,10 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl synchronized (shardLocks) { assert waitCount > 0 : "waitCount is " + waitCount + " but should be > 0"; --waitCount; - logger.trace("shard lock wait count for [{}] is now [{}]", lockKey, waitCount); + logger.trace("shard lock wait count for {} is now [{}]", shardId, waitCount); if (waitCount == 0) { - logger.trace("last shard lock wait decremented, removing lock for [{}]", lockKey); - InternalShardLock remove = shardLocks.remove(lockKey); + logger.trace("last shard lock wait decremented, removing lock for {}", shardId); + InternalShardLock remove = shardLocks.remove(shardId); assert remove != null : "Removed lock was null"; } } @@ -651,11 +607,11 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl void acquire(long timeoutInMillis) throws LockObtainFailedException{ try { if (mutex.tryAcquire(timeoutInMillis, TimeUnit.MILLISECONDS) == false) { - throw new LockObtainFailedException("Can't lock shard " + lockKey + ", timed out after " + timeoutInMillis + "ms"); + throw new LockObtainFailedException("Can't lock shard " + shardId + ", timed out after " + timeoutInMillis + "ms"); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); - throw new LockObtainFailedException("Can't lock shard " + lockKey + ", interrupted", e); + throw new LockObtainFailedException("Can't lock shard " + shardId + ", interrupted", e); } } } @@ -698,11 +654,11 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl /** * Returns all index paths. */ - public Path[] indexPaths(String indexName) { + public Path[] indexPaths(Index index) { assert assertEnvIsLocked(); Path[] indexPaths = new Path[nodePaths.length]; for (int i = 0; i < nodePaths.length; i++) { - indexPaths[i] = nodePaths[i].indicesPath.resolve(indexName); + indexPaths[i] = nodePaths[i].resolve(index); } return indexPaths; } @@ -725,25 +681,47 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl return shardLocations; } - public Set findAllIndices() throws IOException { + /** + * Returns all folder names in ${data.paths}/nodes/{node.id}/indices folder + */ + public Set availableIndexFolders() throws IOException { if (nodePaths == null || locks == null) { throw new IllegalStateException("node is not configured to store local location"); } assert assertEnvIsLocked(); - Set indices = new HashSet<>(); + Set indexFolders = new HashSet<>(); for (NodePath nodePath : nodePaths) { Path indicesLocation = nodePath.indicesPath; if (Files.isDirectory(indicesLocation)) { try (DirectoryStream stream = Files.newDirectoryStream(indicesLocation)) { for (Path index : stream) { if (Files.isDirectory(index)) { - indices.add(index.getFileName().toString()); + indexFolders.add(index.getFileName().toString()); } } } } } - return indices; + return indexFolders; + + } + + /** + * Resolves all existing paths to indexFolderName in ${data.paths}/nodes/{node.id}/indices + */ + public Path[] resolveIndexFolder(String indexFolderName) throws IOException { + if (nodePaths == null || locks == null) { + throw new IllegalStateException("node is not configured to store local location"); + } + assert assertEnvIsLocked(); + List paths = new ArrayList<>(nodePaths.length); + for (NodePath nodePath : nodePaths) { + Path indexFolder = nodePath.indicesPath.resolve(indexFolderName); + if (Files.exists(indexFolder)) { + paths.add(indexFolder); + } + } + return paths.toArray(new Path[paths.size()]); } /** @@ -761,13 +739,13 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl } assert assertEnvIsLocked(); final Set shardIds = new HashSet<>(); - String indexName = index.getName(); + final String indexUniquePathId = index.getUUID(); for (final NodePath nodePath : nodePaths) { Path location = nodePath.indicesPath; if (Files.isDirectory(location)) { try (DirectoryStream indexStream = Files.newDirectoryStream(location)) { for (Path indexPath : indexStream) { - if (indexName.equals(indexPath.getFileName().toString())) { + if (indexUniquePathId.equals(indexPath.getFileName().toString())) { shardIds.addAll(findAllShardsForIndex(indexPath, index)); } } @@ -778,7 +756,7 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl } private static Set findAllShardsForIndex(Path indexPath, Index index) throws IOException { - assert indexPath.getFileName().toString().equals(index.getName()); + assert indexPath.getFileName().toString().equals(index.getUUID()); Set shardIds = new HashSet<>(); if (Files.isDirectory(indexPath)) { try (DirectoryStream stream = Files.newDirectoryStream(indexPath)) { @@ -861,7 +839,7 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl * * @param indexSettings settings for the index */ - private Path resolveCustomLocation(IndexSettings indexSettings) { + public Path resolveBaseCustomLocation(IndexSettings indexSettings) { String customDataDir = indexSettings.customDataPath(); if (customDataDir != null) { // This assert is because this should be caught by MetaDataCreateIndexService @@ -882,10 +860,9 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl * the root path for the index. * * @param indexSettings settings for the index - * @param indexName index to resolve the path for */ - private Path resolveCustomLocation(IndexSettings indexSettings, final String indexName) { - return resolveCustomLocation(indexSettings).resolve(indexName); + private Path resolveIndexCustomLocation(IndexSettings indexSettings) { + return resolveBaseCustomLocation(indexSettings).resolve(indexSettings.getUUID()); } /** @@ -897,7 +874,7 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl * @param shardId shard to resolve the path to */ public Path resolveCustomLocation(IndexSettings indexSettings, final ShardId shardId) { - return resolveCustomLocation(indexSettings, shardId.getIndexName()).resolve(Integer.toString(shardId.id())); + return resolveIndexCustomLocation(indexSettings).resolve(Integer.toString(shardId.id())); } /** @@ -921,22 +898,24 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl for (Path path : nodeDataPaths()) { // check node-paths are writable tryWriteTempFile(path); } - for (String index : this.findAllIndices()) { - for (Path path : this.indexPaths(index)) { // check index paths are writable - Path statePath = path.resolve(MetaDataStateFormat.STATE_DIR_NAME); - tryWriteTempFile(statePath); - tryWriteTempFile(path); - } - for (ShardId shardID : this.findAllShardIds(new Index(index, IndexMetaData.INDEX_UUID_NA_VALUE))) { - Path[] paths = this.availableShardPaths(shardID); - for (Path path : paths) { // check shard paths are writable - Path indexDir = path.resolve(ShardPath.INDEX_FOLDER_NAME); - Path statePath = path.resolve(MetaDataStateFormat.STATE_DIR_NAME); - Path translogDir = path.resolve(ShardPath.TRANSLOG_FOLDER_NAME); - tryWriteTempFile(indexDir); - tryWriteTempFile(translogDir); - tryWriteTempFile(statePath); - tryWriteTempFile(path); + for (String indexFolderName : this.availableIndexFolders()) { + for (Path indexPath : this.resolveIndexFolder(indexFolderName)) { // check index paths are writable + Path indexStatePath = indexPath.resolve(MetaDataStateFormat.STATE_DIR_NAME); + tryWriteTempFile(indexStatePath); + tryWriteTempFile(indexPath); + try (DirectoryStream stream = Files.newDirectoryStream(indexPath)) { + for (Path shardPath : stream) { + String fileName = shardPath.getFileName().toString(); + if (Files.isDirectory(shardPath) && fileName.chars().allMatch(Character::isDigit)) { + Path indexDir = shardPath.resolve(ShardPath.INDEX_FOLDER_NAME); + Path statePath = shardPath.resolve(MetaDataStateFormat.STATE_DIR_NAME); + Path translogDir = shardPath.resolve(ShardPath.TRANSLOG_FOLDER_NAME); + tryWriteTempFile(indexDir); + tryWriteTempFile(translogDir); + tryWriteTempFile(statePath); + tryWriteTempFile(shardPath); + } + } } } } diff --git a/core/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java b/core/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java index e2fcb56b1e1b..b4d8eeae5321 100644 --- a/core/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java +++ b/core/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java @@ -19,6 +19,7 @@ package org.elasticsearch.gateway; +import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.component.AbstractComponent; @@ -26,12 +27,17 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.index.Index; +import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; @@ -47,7 +53,7 @@ public class DanglingIndicesState extends AbstractComponent { private final MetaStateService metaStateService; private final LocalAllocateDangledIndices allocateDangledIndices; - private final Map danglingIndices = ConcurrentCollections.newConcurrentMap(); + private final Map danglingIndices = ConcurrentCollections.newConcurrentMap(); @Inject public DanglingIndicesState(Settings settings, NodeEnvironment nodeEnv, MetaStateService metaStateService, @@ -74,7 +80,7 @@ public class DanglingIndicesState extends AbstractComponent { /** * The current set of dangling indices. */ - Map getDanglingIndices() { + Map getDanglingIndices() { // This might be a good use case for CopyOnWriteHashMap return unmodifiableMap(new HashMap<>(danglingIndices)); } @@ -83,10 +89,16 @@ public class DanglingIndicesState extends AbstractComponent { * Cleans dangling indices if they are already allocated on the provided meta data. */ void cleanupAllocatedDangledIndices(MetaData metaData) { - for (String danglingIndex : danglingIndices.keySet()) { - if (metaData.hasIndex(danglingIndex)) { - logger.debug("[{}] no longer dangling (created), removing from dangling list", danglingIndex); - danglingIndices.remove(danglingIndex); + for (Index index : danglingIndices.keySet()) { + final IndexMetaData indexMetaData = metaData.index(index); + if (indexMetaData != null && indexMetaData.getIndex().getName().equals(index.getName())) { + if (indexMetaData.getIndex().getUUID().equals(index.getUUID()) == false) { + logger.warn("[{}] can not be imported as a dangling index, as there is already another index " + + "with the same name but a different uuid. local index will be ignored (but not deleted)", index); + } else { + logger.debug("[{}] no longer dangling (created), removing from dangling list", index); + } + danglingIndices.remove(index); } } } @@ -104,36 +116,30 @@ public class DanglingIndicesState extends AbstractComponent { * that have state on disk, but are not part of the provided meta data, or not detected * as dangled already. */ - Map findNewDanglingIndices(MetaData metaData) { - final Set indices; + Map findNewDanglingIndices(MetaData metaData) { + final Set excludeIndexPathIds = new HashSet<>(metaData.indices().size() + danglingIndices.size()); + for (ObjectCursor cursor : metaData.indices().values()) { + excludeIndexPathIds.add(cursor.value.getIndex().getUUID()); + } + excludeIndexPathIds.addAll(danglingIndices.keySet().stream().map(Index::getUUID).collect(Collectors.toList())); try { - indices = nodeEnv.findAllIndices(); - } catch (Throwable e) { + final List indexMetaDataList = metaStateService.loadIndicesStates(excludeIndexPathIds::contains); + Map newIndices = new HashMap<>(indexMetaDataList.size()); + for (IndexMetaData indexMetaData : indexMetaDataList) { + if (metaData.hasIndex(indexMetaData.getIndex().getName())) { + logger.warn("[{}] can not be imported as a dangling index, as index with same name already exists in cluster metadata", + indexMetaData.getIndex()); + } else { + logger.info("[{}] dangling index, exists on local file system, but not in cluster metadata, auto import to cluster state", + indexMetaData.getIndex()); + newIndices.put(indexMetaData.getIndex(), indexMetaData); + } + } + return newIndices; + } catch (IOException e) { logger.warn("failed to list dangling indices", e); return emptyMap(); } - - Map newIndices = new HashMap<>(); - for (String indexName : indices) { - if (metaData.hasIndex(indexName) == false && danglingIndices.containsKey(indexName) == false) { - try { - IndexMetaData indexMetaData = metaStateService.loadIndexState(indexName); - if (indexMetaData != null) { - logger.info("[{}] dangling index, exists on local file system, but not in cluster metadata, auto import to cluster state", indexName); - if (!indexMetaData.getIndex().getName().equals(indexName)) { - logger.info("dangled index directory name is [{}], state name is [{}], renaming to directory name", indexName, indexMetaData.getIndex()); - indexMetaData = IndexMetaData.builder(indexMetaData).index(indexName).build(); - } - newIndices.put(indexName, indexMetaData); - } else { - logger.debug("[{}] dangling index directory detected, but no state found", indexName); - } - } catch (Throwable t) { - logger.warn("[{}] failed to load index state for detected dangled index", t, indexName); - } - } - } - return newIndices; } /** diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index 4f0a3bd714a9..950b4351e1d4 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.IndexFolderUpgrader; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; @@ -86,6 +87,7 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL try { ensureNoPre019State(); pre20Upgrade(); + IndexFolderUpgrader.upgradeIndicesIfNeeded(settings, nodeEnv); long startNS = System.nanoTime(); metaStateService.loadFullState(); logger.debug("took {} to load state", TimeValue.timeValueMillis(TimeValue.nsecToMSec(System.nanoTime() - startNS))); @@ -130,7 +132,7 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL for (IndexMetaData indexMetaData : newMetaData) { IndexMetaData indexMetaDataOnDisk = null; if (indexMetaData.getState().equals(IndexMetaData.State.CLOSE)) { - indexMetaDataOnDisk = metaStateService.loadIndexState(indexMetaData.getIndex().getName()); + indexMetaDataOnDisk = metaStateService.loadIndexState(indexMetaData.getIndex()); } if (indexMetaDataOnDisk != null) { newPreviouslyWrittenIndices.add(indexMetaDataOnDisk.getIndex()); @@ -158,7 +160,7 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL // check and write changes in indices for (IndexMetaWriteInfo indexMetaWrite : writeInfo) { try { - metaStateService.writeIndex(indexMetaWrite.reason, indexMetaWrite.newMetaData, indexMetaWrite.previousMetaData); + metaStateService.writeIndex(indexMetaWrite.reason, indexMetaWrite.newMetaData); } catch (Throwable e) { success = false; } @@ -166,7 +168,6 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL } danglingIndicesState.processDanglingIndices(newMetaData); - if (success) { previousMetaData = newMetaData; previouslyWrittenIndices = unmodifiableSet(relevantIndices); @@ -233,7 +234,8 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL // We successfully checked all indices for backward compatibility and found no non-upgradable indices, which // means the upgrade can continue. Now it's safe to overwrite index metadata with the new version. for (IndexMetaData indexMetaData : updateIndexMetaData) { - metaStateService.writeIndex("upgrade", indexMetaData, null); + // since we still haven't upgraded the index folders, we write index state in the old folder + metaStateService.writeIndex("upgrade", indexMetaData, nodeEnv.resolveIndexFolder(indexMetaData.getIndex().getName())); } } diff --git a/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java b/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java index 9ef09753c432..1f4cc310fdbd 100644 --- a/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java +++ b/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java @@ -33,9 +33,12 @@ import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import java.io.IOException; +import java.nio.file.Path; +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Map; -import java.util.Set; +import java.util.function.Predicate; /** * Handles writing and loading both {@link MetaData} and {@link IndexMetaData} @@ -45,7 +48,7 @@ public class MetaStateService extends AbstractComponent { static final String FORMAT_SETTING = "gateway.format"; static final String GLOBAL_STATE_FILE_PREFIX = "global-"; - private static final String INDEX_STATE_FILE_PREFIX = "state-"; + public static final String INDEX_STATE_FILE_PREFIX = "state-"; private final NodeEnvironment nodeEnv; @@ -91,14 +94,12 @@ public class MetaStateService extends AbstractComponent { } else { metaDataBuilder = MetaData.builder(); } - - final Set indices = nodeEnv.findAllIndices(); - for (String index : indices) { - IndexMetaData indexMetaData = loadIndexState(index); - if (indexMetaData == null) { - logger.debug("[{}] failed to find metadata for existing index location", index); - } else { + for (String indexFolderName : nodeEnv.availableIndexFolders()) { + IndexMetaData indexMetaData = indexStateFormat.loadLatestState(logger, nodeEnv.resolveIndexFolder(indexFolderName)); + if (indexMetaData != null) { metaDataBuilder.put(indexMetaData, false); + } else { + logger.debug("[{}] failed to find metadata for existing index location", indexFolderName); } } return metaDataBuilder.build(); @@ -108,10 +109,35 @@ public class MetaStateService extends AbstractComponent { * Loads the index state for the provided index name, returning null if doesn't exists. */ @Nullable - IndexMetaData loadIndexState(String index) throws IOException { + IndexMetaData loadIndexState(Index index) throws IOException { return indexStateFormat.loadLatestState(logger, nodeEnv.indexPaths(index)); } + /** + * Loads all indices states available on disk + */ + List loadIndicesStates(Predicate excludeIndexPathIdsPredicate) throws IOException { + List indexMetaDataList = new ArrayList<>(); + for (String indexFolderName : nodeEnv.availableIndexFolders()) { + if (excludeIndexPathIdsPredicate.test(indexFolderName)) { + continue; + } + IndexMetaData indexMetaData = indexStateFormat.loadLatestState(logger, + nodeEnv.resolveIndexFolder(indexFolderName)); + if (indexMetaData != null) { + final String indexPathId = indexMetaData.getIndex().getUUID(); + if (indexFolderName.equals(indexPathId)) { + indexMetaDataList.add(indexMetaData); + } else { + throw new IllegalStateException("[" + indexFolderName+ "] invalid index folder name, rename to [" + indexPathId + "]"); + } + } else { + logger.debug("[{}] failed to find metadata for existing index location", indexFolderName); + } + } + return indexMetaDataList; + } + /** * Loads the global state, *without* index state, see {@link #loadFullState()} for that. */ @@ -129,13 +155,22 @@ public class MetaStateService extends AbstractComponent { /** * Writes the index state. */ - void writeIndex(String reason, IndexMetaData indexMetaData, @Nullable IndexMetaData previousIndexMetaData) throws Exception { - logger.trace("[{}] writing state, reason [{}]", indexMetaData.getIndex(), reason); + void writeIndex(String reason, IndexMetaData indexMetaData) throws IOException { + writeIndex(reason, indexMetaData, nodeEnv.indexPaths(indexMetaData.getIndex())); + } + + /** + * Writes the index state in locations, use {@link #writeGlobalState(String, MetaData)} + * to write index state in index paths + */ + void writeIndex(String reason, IndexMetaData indexMetaData, Path[] locations) throws IOException { + final Index index = indexMetaData.getIndex(); + logger.trace("[{}] writing state, reason [{}]", index, reason); try { - indexStateFormat.write(indexMetaData, indexMetaData.getVersion(), nodeEnv.indexPaths(indexMetaData.getIndex().getName())); + indexStateFormat.write(indexMetaData, indexMetaData.getVersion(), locations); } catch (Throwable ex) { - logger.warn("[{}]: failed to write index state", ex, indexMetaData.getIndex()); - throw new IOException("failed to write state for [" + indexMetaData.getIndex() + "]", ex); + logger.warn("[{}]: failed to write index state", ex, index); + throw new IOException("failed to write state for [" + index + "]", ex); } } diff --git a/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java b/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java index 3d6fbf081025..be0d51bd2b6d 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java +++ b/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java @@ -29,7 +29,6 @@ import java.io.IOException; import java.nio.file.FileStore; import java.nio.file.Files; import java.nio.file.Path; -import java.util.HashMap; import java.util.Map; public final class ShardPath { @@ -37,22 +36,20 @@ public final class ShardPath { public static final String TRANSLOG_FOLDER_NAME = "translog"; private final Path path; - private final String indexUUID; private final ShardId shardId; private final Path shardStatePath; private final boolean isCustomDataPath; - public ShardPath(boolean isCustomDataPath, Path dataPath, Path shardStatePath, String indexUUID, ShardId shardId) { + public ShardPath(boolean isCustomDataPath, Path dataPath, Path shardStatePath, ShardId shardId) { assert dataPath.getFileName().toString().equals(Integer.toString(shardId.id())) : "dataPath must end with the shard ID but didn't: " + dataPath.toString(); assert shardStatePath.getFileName().toString().equals(Integer.toString(shardId.id())) : "shardStatePath must end with the shard ID but didn't: " + dataPath.toString(); - assert dataPath.getParent().getFileName().toString().equals(shardId.getIndexName()) : "dataPath must end with index/shardID but didn't: " + dataPath.toString(); - assert shardStatePath.getParent().getFileName().toString().equals(shardId.getIndexName()) : "shardStatePath must end with index/shardID but didn't: " + dataPath.toString(); + assert dataPath.getParent().getFileName().toString().equals(shardId.getIndex().getUUID()) : "dataPath must end with index path id but didn't: " + dataPath.toString(); + assert shardStatePath.getParent().getFileName().toString().equals(shardId.getIndex().getUUID()) : "shardStatePath must end with index path id but didn't: " + dataPath.toString(); if (isCustomDataPath && dataPath.equals(shardStatePath)) { throw new IllegalArgumentException("shard state path must be different to the data path when using custom data paths"); } this.isCustomDataPath = isCustomDataPath; this.path = dataPath; - this.indexUUID = indexUUID; this.shardId = shardId; this.shardStatePath = shardStatePath; } @@ -73,10 +70,6 @@ public final class ShardPath { return Files.exists(path); } - public String getIndexUUID() { - return indexUUID; - } - public ShardId getShardId() { return shardId; } @@ -144,7 +137,7 @@ public final class ShardPath { dataPath = statePath; } logger.debug("{} loaded data path [{}], state path [{}]", shardId, dataPath, statePath); - return new ShardPath(indexSettings.hasCustomDataPath(), dataPath, statePath, indexUUID, shardId); + return new ShardPath(indexSettings.hasCustomDataPath(), dataPath, statePath, shardId); } } @@ -168,34 +161,6 @@ public final class ShardPath { } } - /** Maps each path.data path to a "guess" of how many bytes the shards allocated to that path might additionally use over their - * lifetime; we do this so a bunch of newly allocated shards won't just all go the path with the most free space at this moment. */ - private static Map getEstimatedReservedBytes(NodeEnvironment env, long avgShardSizeInBytes, Iterable shards) throws IOException { - long totFreeSpace = 0; - for (NodeEnvironment.NodePath nodePath : env.nodePaths()) { - totFreeSpace += nodePath.fileStore.getUsableSpace(); - } - - // Very rough heuristic of how much disk space we expect the shard will use over its lifetime, the max of current average - // shard size across the cluster and 5% of the total available free space on this node: - long estShardSizeInBytes = Math.max(avgShardSizeInBytes, (long) (totFreeSpace/20.0)); - - // Collate predicted (guessed!) disk usage on each path.data: - Map reservedBytes = new HashMap<>(); - for (IndexShard shard : shards) { - Path dataPath = NodeEnvironment.shardStatePathToDataPath(shard.shardPath().getShardStatePath()); - - // Remove indices// subdirs from the statePath to get back to the path.data/: - Long curBytes = reservedBytes.get(dataPath); - if (curBytes == null) { - curBytes = 0L; - } - reservedBytes.put(dataPath, curBytes + estShardSizeInBytes); - } - - return reservedBytes; - } - public static ShardPath selectNewPathForShard(NodeEnvironment env, ShardId shardId, IndexSettings indexSettings, long avgShardSizeInBytes, Map dataPathToShardCount) throws IOException { @@ -206,7 +171,6 @@ public final class ShardPath { dataPath = env.resolveCustomLocation(indexSettings, shardId); statePath = env.nodePaths()[0].resolve(shardId); } else { - long totFreeSpace = 0; for (NodeEnvironment.NodePath nodePath : env.nodePaths()) { totFreeSpace += nodePath.fileStore.getUsableSpace(); @@ -241,9 +205,7 @@ public final class ShardPath { statePath = bestPath.resolve(shardId); dataPath = statePath; } - - final String indexUUID = indexSettings.getUUID(); - return new ShardPath(indexSettings.hasCustomDataPath(), dataPath, statePath, indexUUID, shardId); + return new ShardPath(indexSettings.hasCustomDataPath(), dataPath, statePath, shardId); } @Override @@ -258,9 +220,6 @@ public final class ShardPath { if (shardId != null ? !shardId.equals(shardPath.shardId) : shardPath.shardId != null) { return false; } - if (indexUUID != null ? !indexUUID.equals(shardPath.indexUUID) : shardPath.indexUUID != null) { - return false; - } if (path != null ? !path.equals(shardPath.path) : shardPath.path != null) { return false; } @@ -271,7 +230,6 @@ public final class ShardPath { @Override public int hashCode() { int result = path != null ? path.hashCode() : 0; - result = 31 * result + (indexUUID != null ? indexUUID.hashCode() : 0); result = 31 * result + (shardId != null ? shardId.hashCode() : 0); return result; } @@ -280,7 +238,6 @@ public final class ShardPath { public String toString() { return "ShardPath{" + "path=" + path + - ", indexUUID='" + indexUUID + '\'' + ", shard=" + shardId + '}'; } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index 0c79f7d701d3..06eb71724c87 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -531,7 +531,7 @@ public class IndicesService extends AbstractLifecycleComponent i } // this is a pure protection to make sure this index doesn't get re-imported as a dangling index. // we should in the future rather write a tombstone rather than wiping the metadata. - MetaDataStateFormat.deleteMetaState(nodeEnv.indexPaths(index.getName())); + MetaDataStateFormat.deleteMetaState(nodeEnv.indexPaths(index)); } } diff --git a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index d255a80fbb8b..3c13351a125b 100644 --- a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.gateway.MetaDataStateFormat; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.ShardId; @@ -36,7 +37,11 @@ import org.elasticsearch.test.IndexSettingsModule; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; @@ -129,21 +134,22 @@ public class NodeEnvironmentTests extends ESTestCase { public void testShardLock() throws IOException { final NodeEnvironment env = newNodeEnvironment(); - ShardLock fooLock = env.shardLock(new ShardId("foo", "_na_", 0)); - assertEquals(new ShardId("foo", "_na_", 0), fooLock.getShardId()); + Index index = new Index("foo", "fooUUID"); + ShardLock fooLock = env.shardLock(new ShardId(index, 0)); + assertEquals(new ShardId(index, 0), fooLock.getShardId()); try { - env.shardLock(new ShardId("foo", "_na_", 0)); + env.shardLock(new ShardId(index, 0)); fail("shard is locked"); } catch (LockObtainFailedException ex) { // expected } - for (Path path : env.indexPaths("foo")) { + for (Path path : env.indexPaths(index)) { Files.createDirectories(path.resolve("0")); Files.createDirectories(path.resolve("1")); } try { - env.lockAllForIndex(new Index("foo", "_na_"), idxSettings, randomIntBetween(0, 10)); + env.lockAllForIndex(index, idxSettings, randomIntBetween(0, 10)); fail("shard 0 is locked"); } catch (LockObtainFailedException ex) { // expected @@ -151,11 +157,11 @@ public class NodeEnvironmentTests extends ESTestCase { fooLock.close(); // can lock again? - env.shardLock(new ShardId("foo", "_na_", 0)).close(); + env.shardLock(new ShardId(index, 0)).close(); - List locks = env.lockAllForIndex(new Index("foo", "_na_"), idxSettings, randomIntBetween(0, 10)); + List locks = env.lockAllForIndex(index, idxSettings, randomIntBetween(0, 10)); try { - env.shardLock(new ShardId("foo", "_na_", 0)); + env.shardLock(new ShardId(index, 0)); fail("shard is locked"); } catch (LockObtainFailedException ex) { // expected @@ -165,18 +171,45 @@ public class NodeEnvironmentTests extends ESTestCase { env.close(); } - public void testGetAllIndices() throws Exception { + public void testAvailableIndexFolders() throws Exception { final NodeEnvironment env = newNodeEnvironment(); final int numIndices = randomIntBetween(1, 10); + Set actualPaths = new HashSet<>(); for (int i = 0; i < numIndices; i++) { - for (Path path : env.indexPaths("foo" + i)) { - Files.createDirectories(path); + Index index = new Index("foo" + i, "fooUUID" + i); + for (Path path : env.indexPaths(index)) { + Files.createDirectories(path.resolve(MetaDataStateFormat.STATE_DIR_NAME)); + actualPaths.add(path.getFileName().toString()); } } - Set indices = env.findAllIndices(); - assertEquals(indices.size(), numIndices); + + assertThat(actualPaths, equalTo(env.availableIndexFolders())); + assertTrue("LockedShards: " + env.lockedShards(), env.lockedShards().isEmpty()); + env.close(); + } + + public void testResolveIndexFolders() throws Exception { + final NodeEnvironment env = newNodeEnvironment(); + final int numIndices = randomIntBetween(1, 10); + Map> actualIndexDataPaths = new HashMap<>(); for (int i = 0; i < numIndices; i++) { - assertTrue(indices.contains("foo" + i)); + Index index = new Index("foo" + i, "fooUUID" + i); + Path[] indexPaths = env.indexPaths(index); + for (Path path : indexPaths) { + Files.createDirectories(path); + String fileName = path.getFileName().toString(); + List paths = actualIndexDataPaths.get(fileName); + if (paths == null) { + paths = new ArrayList<>(); + } + paths.add(path); + actualIndexDataPaths.put(fileName, paths); + } + } + for (Map.Entry> actualIndexDataPathEntry : actualIndexDataPaths.entrySet()) { + List actual = actualIndexDataPathEntry.getValue(); + Path[] actualPaths = actual.toArray(new Path[actual.size()]); + assertThat(actualPaths, equalTo(env.resolveIndexFolder(actualIndexDataPathEntry.getKey()))); } assertTrue("LockedShards: " + env.lockedShards(), env.lockedShards().isEmpty()); env.close(); @@ -184,44 +217,45 @@ public class NodeEnvironmentTests extends ESTestCase { public void testDeleteSafe() throws IOException, InterruptedException { final NodeEnvironment env = newNodeEnvironment(); - ShardLock fooLock = env.shardLock(new ShardId("foo", "_na_", 0)); - assertEquals(new ShardId("foo", "_na_", 0), fooLock.getShardId()); + final Index index = new Index("foo", "fooUUID"); + ShardLock fooLock = env.shardLock(new ShardId(index, 0)); + assertEquals(new ShardId(index, 0), fooLock.getShardId()); - for (Path path : env.indexPaths("foo")) { + for (Path path : env.indexPaths(index)) { Files.createDirectories(path.resolve("0")); Files.createDirectories(path.resolve("1")); } try { - env.deleteShardDirectorySafe(new ShardId("foo", "_na_", 0), idxSettings); + env.deleteShardDirectorySafe(new ShardId(index, 0), idxSettings); fail("shard is locked"); } catch (LockObtainFailedException ex) { // expected } - for (Path path : env.indexPaths("foo")) { + for (Path path : env.indexPaths(index)) { assertTrue(Files.exists(path.resolve("0"))); assertTrue(Files.exists(path.resolve("1"))); } - env.deleteShardDirectorySafe(new ShardId("foo", "_na_", 1), idxSettings); + env.deleteShardDirectorySafe(new ShardId(index, 1), idxSettings); - for (Path path : env.indexPaths("foo")) { + for (Path path : env.indexPaths(index)) { assertTrue(Files.exists(path.resolve("0"))); assertFalse(Files.exists(path.resolve("1"))); } try { - env.deleteIndexDirectorySafe(new Index("foo", "_na_"), randomIntBetween(0, 10), idxSettings); + env.deleteIndexDirectorySafe(index, randomIntBetween(0, 10), idxSettings); fail("shard is locked"); } catch (LockObtainFailedException ex) { // expected } fooLock.close(); - for (Path path : env.indexPaths("foo")) { + for (Path path : env.indexPaths(index)) { assertTrue(Files.exists(path)); } @@ -242,7 +276,7 @@ public class NodeEnvironmentTests extends ESTestCase { @Override protected void doRun() throws Exception { start.await(); - try (ShardLock autoCloses = env.shardLock(new ShardId("foo", "_na_", 0))) { + try (ShardLock autoCloses = env.shardLock(new ShardId(index, 0))) { blockLatch.countDown(); Thread.sleep(randomIntBetween(1, 10)); } @@ -257,11 +291,11 @@ public class NodeEnvironmentTests extends ESTestCase { start.countDown(); blockLatch.await(); - env.deleteIndexDirectorySafe(new Index("foo", "_na_"), 5000, idxSettings); + env.deleteIndexDirectorySafe(index, 5000, idxSettings); assertNull(threadException.get()); - for (Path path : env.indexPaths("foo")) { + for (Path path : env.indexPaths(index)) { assertFalse(Files.exists(path)); } latch.await(); @@ -300,7 +334,7 @@ public class NodeEnvironmentTests extends ESTestCase { for (int i = 0; i < iters; i++) { int shard = randomIntBetween(0, counts.length - 1); try { - try (ShardLock autoCloses = env.shardLock(new ShardId("foo", "_na_", shard), scaledRandomIntBetween(0, 10))) { + try (ShardLock autoCloses = env.shardLock(new ShardId("foo", "fooUUID", shard), scaledRandomIntBetween(0, 10))) { counts[shard].value++; countsAtomic[shard].incrementAndGet(); assertEquals(flipFlop[shard].incrementAndGet(), 1); @@ -334,37 +368,38 @@ public class NodeEnvironmentTests extends ESTestCase { String[] dataPaths = tmpPaths(); NodeEnvironment env = newNodeEnvironment(dataPaths, "/tmp", Settings.EMPTY); - IndexSettings s1 = IndexSettingsModule.newIndexSettings("myindex", Settings.EMPTY); - IndexSettings s2 = IndexSettingsModule.newIndexSettings("myindex", Settings.builder().put(IndexMetaData.SETTING_DATA_PATH, "/tmp/foo").build()); - Index index = new Index("myindex", "_na_"); + final Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_INDEX_UUID, "myindexUUID").build(); + IndexSettings s1 = IndexSettingsModule.newIndexSettings("myindex", indexSettings); + IndexSettings s2 = IndexSettingsModule.newIndexSettings("myindex", Settings.builder().put(indexSettings).put(IndexMetaData.SETTING_DATA_PATH, "/tmp/foo").build()); + Index index = new Index("myindex", "myindexUUID"); ShardId sid = new ShardId(index, 0); assertFalse("no settings should mean no custom data path", s1.hasCustomDataPath()); assertTrue("settings with path_data should have a custom data path", s2.hasCustomDataPath()); assertThat(env.availableShardPaths(sid), equalTo(env.availableShardPaths(sid))); - assertThat(env.resolveCustomLocation(s2, sid), equalTo(PathUtils.get("/tmp/foo/0/myindex/0"))); + assertThat(env.resolveCustomLocation(s2, sid), equalTo(PathUtils.get("/tmp/foo/0/" + index.getUUID() + "/0"))); assertThat("shard paths with a custom data_path should contain only regular paths", env.availableShardPaths(sid), - equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/myindex/0"))); + equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/" + index.getUUID() + "/0"))); assertThat("index paths uses the regular template", - env.indexPaths(index.getName()), equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/myindex"))); + env.indexPaths(index), equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/" + index.getUUID()))); env.close(); NodeEnvironment env2 = newNodeEnvironment(dataPaths, "/tmp", Settings.builder().put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), false).build()); assertThat(env2.availableShardPaths(sid), equalTo(env2.availableShardPaths(sid))); - assertThat(env2.resolveCustomLocation(s2, sid), equalTo(PathUtils.get("/tmp/foo/myindex/0"))); + assertThat(env2.resolveCustomLocation(s2, sid), equalTo(PathUtils.get("/tmp/foo/" + index.getUUID() + "/0"))); assertThat("shard paths with a custom data_path should contain only regular paths", env2.availableShardPaths(sid), - equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/myindex/0"))); + equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/" + index.getUUID() + "/0"))); assertThat("index paths uses the regular template", - env2.indexPaths(index.getName()), equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/myindex"))); + env2.indexPaths(index), equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/" + index.getUUID()))); env2.close(); } diff --git a/core/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java b/core/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java index 95c52f89933b..51536375dca5 100644 --- a/core/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java @@ -29,6 +29,7 @@ import org.hamcrest.Matchers; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.StandardCopyOption; import java.util.Map; import static org.hamcrest.Matchers.equalTo; @@ -53,6 +54,47 @@ public class DanglingIndicesStateTests extends ESTestCase { assertTrue(danglingState.getDanglingIndices().isEmpty()); } } + public void testDanglingIndicesDiscovery() throws Exception { + try (NodeEnvironment env = newNodeEnvironment()) { + MetaStateService metaStateService = new MetaStateService(Settings.EMPTY, env); + DanglingIndicesState danglingState = new DanglingIndicesState(Settings.EMPTY, env, metaStateService, null); + + assertTrue(danglingState.getDanglingIndices().isEmpty()); + MetaData metaData = MetaData.builder().build(); + final Settings.Builder settings = Settings.builder().put(indexSettings).put(IndexMetaData.SETTING_INDEX_UUID, "test1UUID"); + IndexMetaData dangledIndex = IndexMetaData.builder("test1").settings(settings).build(); + metaStateService.writeIndex("test_write", dangledIndex); + Map newDanglingIndices = danglingState.findNewDanglingIndices(metaData); + assertTrue(newDanglingIndices.containsKey(dangledIndex.getIndex())); + metaData = MetaData.builder().put(dangledIndex, false).build(); + newDanglingIndices = danglingState.findNewDanglingIndices(metaData); + assertFalse(newDanglingIndices.containsKey(dangledIndex.getIndex())); + } + } + + public void testInvalidIndexFolder() throws Exception { + try (NodeEnvironment env = newNodeEnvironment()) { + MetaStateService metaStateService = new MetaStateService(Settings.EMPTY, env); + DanglingIndicesState danglingState = new DanglingIndicesState(Settings.EMPTY, env, metaStateService, null); + + MetaData metaData = MetaData.builder().build(); + final String uuid = "test1UUID"; + final Settings.Builder settings = Settings.builder().put(indexSettings).put(IndexMetaData.SETTING_INDEX_UUID, uuid); + IndexMetaData dangledIndex = IndexMetaData.builder("test1").settings(settings).build(); + metaStateService.writeIndex("test_write", dangledIndex); + for (Path path : env.resolveIndexFolder(uuid)) { + if (Files.exists(path)) { + Files.move(path, path.resolveSibling("invalidUUID"), StandardCopyOption.ATOMIC_MOVE); + } + } + try { + danglingState.findNewDanglingIndices(metaData); + fail("no exception thrown for invalid folder name"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), equalTo("[invalidUUID] invalid index folder name, rename to [test1UUID]")); + } + } + } public void testDanglingProcessing() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { @@ -61,15 +103,16 @@ public class DanglingIndicesStateTests extends ESTestCase { MetaData metaData = MetaData.builder().build(); - IndexMetaData dangledIndex = IndexMetaData.builder("test1").settings(indexSettings).build(); - metaStateService.writeIndex("test_write", dangledIndex, null); + final Settings.Builder settings = Settings.builder().put(indexSettings).put(IndexMetaData.SETTING_INDEX_UUID, "test1UUID"); + IndexMetaData dangledIndex = IndexMetaData.builder("test1").settings(settings).build(); + metaStateService.writeIndex("test_write", dangledIndex); // check that several runs when not in the metadata still keep the dangled index around int numberOfChecks = randomIntBetween(1, 10); for (int i = 0; i < numberOfChecks; i++) { - Map newDanglingIndices = danglingState.findNewDanglingIndices(metaData); + Map newDanglingIndices = danglingState.findNewDanglingIndices(metaData); assertThat(newDanglingIndices.size(), equalTo(1)); - assertThat(newDanglingIndices.keySet(), Matchers.hasItems("test1")); + assertThat(newDanglingIndices.keySet(), Matchers.hasItems(dangledIndex.getIndex())); assertTrue(danglingState.getDanglingIndices().isEmpty()); } @@ -77,7 +120,7 @@ public class DanglingIndicesStateTests extends ESTestCase { danglingState.findNewAndAddDanglingIndices(metaData); assertThat(danglingState.getDanglingIndices().size(), equalTo(1)); - assertThat(danglingState.getDanglingIndices().keySet(), Matchers.hasItems("test1")); + assertThat(danglingState.getDanglingIndices().keySet(), Matchers.hasItems(dangledIndex.getIndex())); } // simulate allocation to the metadata @@ -85,35 +128,15 @@ public class DanglingIndicesStateTests extends ESTestCase { // check that several runs when in the metadata, but not cleaned yet, still keeps dangled for (int i = 0; i < numberOfChecks; i++) { - Map newDanglingIndices = danglingState.findNewDanglingIndices(metaData); + Map newDanglingIndices = danglingState.findNewDanglingIndices(metaData); assertTrue(newDanglingIndices.isEmpty()); assertThat(danglingState.getDanglingIndices().size(), equalTo(1)); - assertThat(danglingState.getDanglingIndices().keySet(), Matchers.hasItems("test1")); + assertThat(danglingState.getDanglingIndices().keySet(), Matchers.hasItems(dangledIndex.getIndex())); } danglingState.cleanupAllocatedDangledIndices(metaData); assertTrue(danglingState.getDanglingIndices().isEmpty()); } } - - public void testRenameOfIndexState() throws Exception { - try (NodeEnvironment env = newNodeEnvironment()) { - MetaStateService metaStateService = new MetaStateService(Settings.EMPTY, env); - DanglingIndicesState danglingState = new DanglingIndicesState(Settings.EMPTY, env, metaStateService, null); - - MetaData metaData = MetaData.builder().build(); - - IndexMetaData dangledIndex = IndexMetaData.builder("test1").settings(indexSettings).build(); - metaStateService.writeIndex("test_write", dangledIndex, null); - - for (Path path : env.indexPaths("test1")) { - Files.move(path, path.getParent().resolve("test1_renamed")); - } - - Map newDanglingIndices = danglingState.findNewDanglingIndices(metaData); - assertThat(newDanglingIndices.size(), equalTo(1)); - assertThat(newDanglingIndices.keySet(), Matchers.hasItems("test1_renamed")); - } - } } From c3078f4d659156489ca35db8f5e92630572ac319 Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Mon, 14 Mar 2016 23:22:12 -0400 Subject: [PATCH 234/320] adapt tests to use index uuid as folder name --- .../OldIndexBackwardsCompatibilityIT.java | 22 ++++- .../elasticsearch/cluster/DiskUsageTests.java | 10 +- .../cluster/allocation/ClusterRerouteIT.java | 6 +- .../gateway/MetaDataWriteDataNodesIT.java | 14 +-- .../gateway/MetaStateServiceTests.java | 9 +- .../index/shard/IndexShardTests.java | 18 ++-- .../index/shard/ShardPathTests.java | 31 +++--- .../index/store/CorruptedFileIT.java | 16 +-- .../index/store/CorruptedTranslogIT.java | 4 +- .../index/store/FsDirectoryServiceTests.java | 8 +- .../index/store/IndexStoreTests.java | 10 +- .../store/IndicesStoreIntegrationIT.java | 98 ++++++++++--------- .../Murmur3FieldMapperUpgradeTests.java | 2 + .../size/SizeFieldMapperUpgradeTests.java | 2 + 14 files changed, 150 insertions(+), 100 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index 1b0988f21bae..784813c3b4ef 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.IndexFolderUpgrader; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; @@ -105,6 +106,8 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { List indexes; List unsupportedIndexes; + static String singleDataPathNodeName; + static String multiDataPathNodeName; static Path singleDataPath; static Path[] multiDataPath; @@ -127,6 +130,8 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { @AfterClass public static void tearDownStatics() { + singleDataPathNodeName = null; + multiDataPathNodeName = null; singleDataPath = null; multiDataPath = null; } @@ -157,7 +162,8 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { InternalTestCluster.Async multiDataPathNode = internalCluster().startNodeAsync(nodeSettings.build()); // find single data path dir - Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, singleDataPathNode.get()).nodeDataPaths(); + singleDataPathNodeName = singleDataPathNode.get(); + Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, singleDataPathNodeName).nodeDataPaths(); assertEquals(1, nodePaths.length); singleDataPath = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER); assertFalse(Files.exists(singleDataPath)); @@ -165,7 +171,8 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { logger.info("--> Single data path: {}", singleDataPath); // find multi data path dirs - nodePaths = internalCluster().getInstance(NodeEnvironment.class, multiDataPathNode.get()).nodeDataPaths(); + multiDataPathNodeName = multiDataPathNode.get(); + nodePaths = internalCluster().getInstance(NodeEnvironment.class, multiDataPathNodeName).nodeDataPaths(); assertEquals(2, nodePaths.length); multiDataPath = new Path[] {nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER), nodePaths[1].resolve(NodeEnvironment.INDICES_FOLDER)}; @@ -178,6 +185,13 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { replicas.get(); // wait for replicas } + void upgradeIndexFolder() throws Exception { + final NodeEnvironment nodeEnvironment = internalCluster().getInstance(NodeEnvironment.class, singleDataPathNodeName); + IndexFolderUpgrader.upgradeIndicesIfNeeded(Settings.EMPTY, nodeEnvironment); + final NodeEnvironment nodeEnv = internalCluster().getInstance(NodeEnvironment.class, multiDataPathNodeName); + IndexFolderUpgrader.upgradeIndicesIfNeeded(Settings.EMPTY, nodeEnv); + } + String loadIndex(String indexFile) throws Exception { Path unzipDir = createTempDir(); Path unzipDataDir = unzipDir.resolve("data"); @@ -296,6 +310,10 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { void assertOldIndexWorks(String index) throws Exception { Version version = extractVersion(index); String indexName = loadIndex(index); + // we explicitly upgrade the index folders as these indices + // are imported as dangling indices and not available on + // node startup + upgradeIndexFolder(); importIndex(indexName); assertIndexSanity(indexName, version); assertBasicSearchWorks(indexName); diff --git a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java index c18a36e01429..424565f13bf3 100644 --- a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java @@ -92,22 +92,22 @@ public class DiskUsageTests extends ESTestCase { } public void testFillShardLevelInfo() { - final Index index = new Index("test", "_na_"); + final Index index = new Index("test", "0xdeadbeef"); ShardRouting test_0 = ShardRouting.newUnassigned(index, 0, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ShardRoutingHelper.initialize(test_0, "node1"); ShardRoutingHelper.moveToStarted(test_0); - Path test0Path = createTempDir().resolve("indices").resolve("test").resolve("0"); + Path test0Path = createTempDir().resolve("indices").resolve(index.getUUID()).resolve("0"); CommonStats commonStats0 = new CommonStats(); commonStats0.store = new StoreStats(100, 1); ShardRouting test_1 = ShardRouting.newUnassigned(index, 1, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ShardRoutingHelper.initialize(test_1, "node2"); ShardRoutingHelper.moveToStarted(test_1); - Path test1Path = createTempDir().resolve("indices").resolve("test").resolve("1"); + Path test1Path = createTempDir().resolve("indices").resolve(index.getUUID()).resolve("1"); CommonStats commonStats1 = new CommonStats(); commonStats1.store = new StoreStats(1000, 1); ShardStats[] stats = new ShardStats[] { - new ShardStats(test_0, new ShardPath(false, test0Path, test0Path, "0xdeadbeef", test_0.shardId()), commonStats0 , null), - new ShardStats(test_1, new ShardPath(false, test1Path, test1Path, "0xdeadbeef", test_1.shardId()), commonStats1 , null) + new ShardStats(test_0, new ShardPath(false, test0Path, test0Path, test_0.shardId()), commonStats0 , null), + new ShardStats(test_1, new ShardPath(false, test1Path, test1Path, test_1.shardId()), commonStats1 , null) }; ImmutableOpenMap.Builder shardSizes = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder routingToPath = ImmutableOpenMap.builder(); diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java index cc5ce05aca6a..2016175a49cc 100644 --- a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java @@ -22,8 +22,10 @@ package org.elasticsearch.cluster.allocation; import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; +import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; @@ -42,6 +44,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -226,9 +229,10 @@ public class ClusterRerouteIT extends ESIntegTestCase { assertThat(state.getRoutingNodes().node(state.nodes().resolveNode(node_1).id()).get(0).state(), equalTo(ShardRoutingState.STARTED)); client().prepareIndex("test", "type", "1").setSource("field", "value").setRefresh(true).execute().actionGet(); + final Index index = resolveIndex("test"); logger.info("--> closing all nodes"); - Path[] shardLocation = internalCluster().getInstance(NodeEnvironment.class, node_1).availableShardPaths(new ShardId("test", "_na_", 0)); + Path[] shardLocation = internalCluster().getInstance(NodeEnvironment.class, node_1).availableShardPaths(new ShardId(index, 0)); assertThat(FileSystemUtils.exists(shardLocation), equalTo(true)); // make sure the data is there! internalCluster().closeNonSharedNodes(false); // don't wipe data directories the index needs to be there! diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java index bada7faa8c83..cecaef6c1e75 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java @@ -21,6 +21,7 @@ package org.elasticsearch.gateway; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; +import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; @@ -68,14 +69,15 @@ public class MetaDataWriteDataNodesIT extends ESIntegTestCase { index(index, "doc", "1", jsonBuilder().startObject().field("text", "some text").endObject()); ensureGreen(); assertIndexInMetaState(node1, index); - assertIndexDirectoryDeleted(node2, index); + Index resolveIndex = resolveIndex(index); + assertIndexDirectoryDeleted(node2, resolveIndex); assertIndexInMetaState(masterNode, index); logger.debug("relocating index..."); client().admin().indices().prepareUpdateSettings(index).setSettings(Settings.builder().put(IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "_name", node2)).get(); client().admin().cluster().prepareHealth().setWaitForRelocatingShards(0).get(); ensureGreen(); - assertIndexDirectoryDeleted(node1, index); + assertIndexDirectoryDeleted(node1, resolveIndex); assertIndexInMetaState(node2, index); assertIndexInMetaState(masterNode, index); } @@ -146,10 +148,10 @@ public class MetaDataWriteDataNodesIT extends ESIntegTestCase { assertThat(indicesMetaData.get(index).getState(), equalTo(IndexMetaData.State.OPEN)); } - protected void assertIndexDirectoryDeleted(final String nodeName, final String indexName) throws Exception { + protected void assertIndexDirectoryDeleted(final String nodeName, final Index index) throws Exception { assertBusy(() -> { logger.info("checking if index directory exists..."); - assertFalse("Expecting index directory of " + indexName + " to be deleted from node " + nodeName, indexDirectoryExists(nodeName, indexName)); + assertFalse("Expecting index directory of " + index + " to be deleted from node " + nodeName, indexDirectoryExists(nodeName, index)); } ); } @@ -168,9 +170,9 @@ public class MetaDataWriteDataNodesIT extends ESIntegTestCase { } - private boolean indexDirectoryExists(String nodeName, String indexName) { + private boolean indexDirectoryExists(String nodeName, Index index) { NodeEnvironment nodeEnv = ((InternalTestCluster) cluster()).getInstance(NodeEnvironment.class, nodeName); - for (Path path : nodeEnv.indexPaths(indexName)) { + for (Path path : nodeEnv.indexPaths(index)) { if (Files.exists(path)) { return true; } diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaStateServiceTests.java b/core/src/test/java/org/elasticsearch/gateway/MetaStateServiceTests.java index 8bcb9c45402a..5f38456d2d11 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaStateServiceTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaStateServiceTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.equalTo; @@ -43,15 +44,15 @@ public class MetaStateServiceTests extends ESTestCase { MetaStateService metaStateService = new MetaStateService(randomSettings(), env); IndexMetaData index = IndexMetaData.builder("test1").settings(indexSettings).build(); - metaStateService.writeIndex("test_write", index, null); - assertThat(metaStateService.loadIndexState("test1"), equalTo(index)); + metaStateService.writeIndex("test_write", index); + assertThat(metaStateService.loadIndexState(index.getIndex()), equalTo(index)); } } public void testLoadMissingIndex() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { MetaStateService metaStateService = new MetaStateService(randomSettings(), env); - assertThat(metaStateService.loadIndexState("test1"), nullValue()); + assertThat(metaStateService.loadIndexState(new Index("test1", "test1UUID")), nullValue()); } } @@ -94,7 +95,7 @@ public class MetaStateServiceTests extends ESTestCase { .build(); metaStateService.writeGlobalState("test_write", metaData); - metaStateService.writeIndex("test_write", index, null); + metaStateService.writeIndex("test_write", index); MetaData loadedState = metaStateService.loadFullState(); assertThat(loadedState.persistentSettings(), equalTo(metaData.persistentSettings())); diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 1acf4e3fa1b4..2954558e73d4 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -70,6 +70,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.ShardLock; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.NodeServicesProvider; @@ -97,6 +98,7 @@ import org.elasticsearch.test.FieldMaskingReader; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.test.junit.annotations.TestLogging; import java.io.IOException; import java.nio.file.Files; @@ -141,25 +143,25 @@ public class IndexShardTests extends ESSingleNodeTestCase { public void testWriteShardState() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { - ShardId id = new ShardId("foo", "_na_", 1); + ShardId id = new ShardId("foo", "fooUUID", 1); long version = between(1, Integer.MAX_VALUE / 2); boolean primary = randomBoolean(); AllocationId allocationId = randomBoolean() ? null : randomAllocationId(); - ShardStateMetaData state1 = new ShardStateMetaData(version, primary, "foo", allocationId); + ShardStateMetaData state1 = new ShardStateMetaData(version, primary, "fooUUID", allocationId); write(state1, env.availableShardPaths(id)); ShardStateMetaData shardStateMetaData = load(logger, env.availableShardPaths(id)); assertEquals(shardStateMetaData, state1); - ShardStateMetaData state2 = new ShardStateMetaData(version, primary, "foo", allocationId); + ShardStateMetaData state2 = new ShardStateMetaData(version, primary, "fooUUID", allocationId); write(state2, env.availableShardPaths(id)); shardStateMetaData = load(logger, env.availableShardPaths(id)); assertEquals(shardStateMetaData, state1); - ShardStateMetaData state3 = new ShardStateMetaData(version + 1, primary, "foo", allocationId); + ShardStateMetaData state3 = new ShardStateMetaData(version + 1, primary, "fooUUID", allocationId); write(state3, env.availableShardPaths(id)); shardStateMetaData = load(logger, env.availableShardPaths(id)); assertEquals(shardStateMetaData, state3); - assertEquals("foo", state3.indexUUID); + assertEquals("fooUUID", state3.indexUUID); } } @@ -167,7 +169,9 @@ public class IndexShardTests extends ESSingleNodeTestCase { createIndex("test"); ensureGreen(); NodeEnvironment env = getInstanceFromNode(NodeEnvironment.class); - Path[] shardPaths = env.availableShardPaths(new ShardId("test", "_na_", 0)); + ClusterService cs = getInstanceFromNode(ClusterService.class); + final Index index = cs.state().metaData().index("test").getIndex(); + Path[] shardPaths = env.availableShardPaths(new ShardId(index, 0)); logger.info("--> paths: [{}]", (Object)shardPaths); // Should not be able to acquire the lock because it's already open try { @@ -179,7 +183,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { // Test without the regular shard lock to assume we can acquire it // (worst case, meaning that the shard lock could be acquired and // we're green to delete the shard's directory) - ShardLock sLock = new DummyShardLock(new ShardId("test", "_na_", 0)); + ShardLock sLock = new DummyShardLock(new ShardId(index, 0)); try { env.deleteShardDirectoryUnderLock(sLock, IndexSettingsModule.newIndexSettings("test", Settings.EMPTY)); fail("should not have been able to delete the directory"); diff --git a/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java b/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java index 2a52e8c557c3..537ce83d3d26 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.routing.AllocationId; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -42,13 +43,13 @@ public class ShardPathTests extends ESTestCase { Settings.Builder builder = settingsBuilder().put(IndexMetaData.SETTING_INDEX_UUID, "0xDEADBEEF") .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT); Settings settings = builder.build(); - ShardId shardId = new ShardId("foo", "_na_", 0); + ShardId shardId = new ShardId("foo", "0xDEADBEEF", 0); Path[] paths = env.availableShardPaths(shardId); Path path = randomFrom(paths); ShardStateMetaData.FORMAT.write(new ShardStateMetaData(2, true, "0xDEADBEEF", AllocationId.newInitializing()), 2, path); ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings)); assertEquals(path, shardPath.getDataPath()); - assertEquals("0xDEADBEEF", shardPath.getIndexUUID()); + assertEquals("0xDEADBEEF", shardPath.getShardId().getIndex().getUUID()); assertEquals("foo", shardPath.getShardId().getIndexName()); assertEquals(path.resolve("translog"), shardPath.resolveTranslog()); assertEquals(path.resolve("index"), shardPath.resolveIndex()); @@ -57,14 +58,15 @@ public class ShardPathTests extends ESTestCase { public void testFailLoadShardPathOnMultiState() throws IOException { try (final NodeEnvironment env = newNodeEnvironment(settingsBuilder().build())) { - Settings.Builder builder = settingsBuilder().put(IndexMetaData.SETTING_INDEX_UUID, "0xDEADBEEF") + final String indexUUID = "0xDEADBEEF"; + Settings.Builder builder = settingsBuilder().put(IndexMetaData.SETTING_INDEX_UUID, indexUUID) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT); Settings settings = builder.build(); - ShardId shardId = new ShardId("foo", "_na_", 0); + ShardId shardId = new ShardId("foo", indexUUID, 0); Path[] paths = env.availableShardPaths(shardId); assumeTrue("This test tests multi data.path but we only got one", paths.length > 1); int id = randomIntBetween(1, 10); - ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, "0xDEADBEEF", AllocationId.newInitializing()), id, paths); + ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, indexUUID, AllocationId.newInitializing()), id, paths); ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings)); fail("Expected IllegalStateException"); } catch (IllegalStateException e) { @@ -77,7 +79,7 @@ public class ShardPathTests extends ESTestCase { Settings.Builder builder = settingsBuilder().put(IndexMetaData.SETTING_INDEX_UUID, "foobar") .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT); Settings settings = builder.build(); - ShardId shardId = new ShardId("foo", "_na_", 0); + ShardId shardId = new ShardId("foo", "foobar", 0); Path[] paths = env.availableShardPaths(shardId); Path path = randomFrom(paths); int id = randomIntBetween(1, 10); @@ -90,9 +92,10 @@ public class ShardPathTests extends ESTestCase { } public void testIllegalCustomDataPath() { - final Path path = createTempDir().resolve("foo").resolve("0"); + Index index = new Index("foo", "foo"); + final Path path = createTempDir().resolve(index.getUUID()).resolve("0"); try { - new ShardPath(true, path, path, "foo", new ShardId("foo", "_na_", 0)); + new ShardPath(true, path, path, new ShardId(index, 0)); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), is("shard state path must be different to the data path when using custom data paths")); @@ -100,8 +103,9 @@ public class ShardPathTests extends ESTestCase { } public void testValidCtor() { - final Path path = createTempDir().resolve("foo").resolve("0"); - ShardPath shardPath = new ShardPath(false, path, path, "foo", new ShardId("foo", "_na_", 0)); + Index index = new Index("foo", "foo"); + final Path path = createTempDir().resolve(index.getUUID()).resolve("0"); + ShardPath shardPath = new ShardPath(false, path, path, new ShardId(index, 0)); assertFalse(shardPath.isCustomDataPath()); assertEquals(shardPath.getDataPath(), path); assertEquals(shardPath.getShardStatePath(), path); @@ -111,8 +115,9 @@ public class ShardPathTests extends ESTestCase { boolean useCustomDataPath = randomBoolean(); final Settings indexSettings; final Settings nodeSettings; + final String indexUUID = "0xDEADBEEF"; Settings.Builder indexSettingsBuilder = settingsBuilder() - .put(IndexMetaData.SETTING_INDEX_UUID, "0xDEADBEEF") + .put(IndexMetaData.SETTING_INDEX_UUID, indexUUID) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT); final Path customPath; if (useCustomDataPath) { @@ -132,10 +137,10 @@ public class ShardPathTests extends ESTestCase { nodeSettings = Settings.EMPTY; } try (final NodeEnvironment env = newNodeEnvironment(nodeSettings)) { - ShardId shardId = new ShardId("foo", "_na_", 0); + ShardId shardId = new ShardId("foo", indexUUID, 0); Path[] paths = env.availableShardPaths(shardId); Path path = randomFrom(paths); - ShardStateMetaData.FORMAT.write(new ShardStateMetaData(2, true, "0xDEADBEEF", AllocationId.newInitializing()), 2, path); + ShardStateMetaData.FORMAT.write(new ShardStateMetaData(2, true, indexUUID, AllocationId.newInitializing()), 2, path); ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), indexSettings)); boolean found = false; for (Path p : env.nodeDataPaths()) { diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index 4031aa5da25f..aaa1671f84be 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -49,6 +49,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.gateway.PrimaryShardAllocator; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.index.shard.IndexEventListener; @@ -571,8 +572,9 @@ public class CorruptedFileIT extends ESIntegTestCase { private Map> findFilesToCorruptForReplica() throws IOException { Map> filesToNodes = new HashMap<>(); ClusterState state = client().admin().cluster().prepareState().get().getState(); + Index test = state.metaData().index("test").getIndex(); for (ShardRouting shardRouting : state.getRoutingTable().allShards("test")) { - if (shardRouting.primary() == true) { + if (shardRouting.primary()) { continue; } assertTrue(shardRouting.assignedToNode()); @@ -582,8 +584,7 @@ public class CorruptedFileIT extends ESIntegTestCase { filesToNodes.put(nodeStats.getNode().getName(), files); for (FsInfo.Path info : nodeStats.getFs()) { String path = info.getPath(); - final String relativeDataLocationPath = "indices/test/" + Integer.toString(shardRouting.getId()) + "/index"; - Path file = PathUtils.get(path).resolve(relativeDataLocationPath); + Path file = PathUtils.get(path).resolve("indices").resolve(test.getUUID()).resolve(Integer.toString(shardRouting.getId())).resolve("index"); if (Files.exists(file)) { // multi data path might only have one path in use try (DirectoryStream stream = Files.newDirectoryStream(file)) { for (Path item : stream) { @@ -604,6 +605,7 @@ public class CorruptedFileIT extends ESIntegTestCase { private ShardRouting corruptRandomPrimaryFile(final boolean includePerCommitFiles) throws IOException { ClusterState state = client().admin().cluster().prepareState().get().getState(); + Index test = state.metaData().index("test").getIndex(); GroupShardsIterator shardIterators = state.getRoutingNodes().getRoutingTable().activePrimaryShardsGrouped(new String[]{"test"}, false); List iterators = iterableAsArrayList(shardIterators); ShardIterator shardIterator = RandomPicks.randomFrom(getRandom(), iterators); @@ -616,8 +618,7 @@ public class CorruptedFileIT extends ESIntegTestCase { Set files = new TreeSet<>(); // treeset makes sure iteration order is deterministic for (FsInfo.Path info : nodeStatses.getNodes()[0].getFs()) { String path = info.getPath(); - final String relativeDataLocationPath = "indices/test/" + Integer.toString(shardRouting.getId()) + "/index"; - Path file = PathUtils.get(path).resolve(relativeDataLocationPath); + Path file = PathUtils.get(path).resolve("indices").resolve(test.getUUID()).resolve(Integer.toString(shardRouting.getId())).resolve("index"); if (Files.exists(file)) { // multi data path might only have one path in use try (DirectoryStream stream = Files.newDirectoryStream(file)) { for (Path item : stream) { @@ -676,12 +677,13 @@ public class CorruptedFileIT extends ESIntegTestCase { public List listShardFiles(ShardRouting routing) throws IOException { NodesStatsResponse nodeStatses = client().admin().cluster().prepareNodesStats(routing.currentNodeId()).setFs(true).get(); - + ClusterState state = client().admin().cluster().prepareState().get().getState(); + final Index test = state.metaData().index("test").getIndex(); assertThat(routing.toString(), nodeStatses.getNodes().length, equalTo(1)); List files = new ArrayList<>(); for (FsInfo.Path info : nodeStatses.getNodes()[0].getFs()) { String path = info.getPath(); - Path file = PathUtils.get(path).resolve("indices/test/" + Integer.toString(routing.getId()) + "/index"); + Path file = PathUtils.get(path).resolve("indices/" + test.getUUID() + "/" + Integer.toString(routing.getId()) + "/index"); if (Files.exists(file)) { // multi data path might only have one path in use try (DirectoryStream stream = Files.newDirectoryStream(file)) { for (Path item : stream) { diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java index ae158b87c592..0a8cd9a6fe03 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.MockEngineFactoryPlugin; import org.elasticsearch.monitor.fs.FsInfo; @@ -110,6 +111,7 @@ public class CorruptedTranslogIT extends ESIntegTestCase { private void corruptRandomTranslogFiles() throws IOException { ClusterState state = client().admin().cluster().prepareState().get().getState(); GroupShardsIterator shardIterators = state.getRoutingNodes().getRoutingTable().activePrimaryShardsGrouped(new String[]{"test"}, false); + final Index test = state.metaData().index("test").getIndex(); List iterators = iterableAsArrayList(shardIterators); ShardIterator shardIterator = RandomPicks.randomFrom(getRandom(), iterators); ShardRouting shardRouting = shardIterator.nextOrNull(); @@ -121,7 +123,7 @@ public class CorruptedTranslogIT extends ESIntegTestCase { Set files = new TreeSet<>(); // treeset makes sure iteration order is deterministic for (FsInfo.Path fsPath : nodeStatses.getNodes()[0].getFs()) { String path = fsPath.getPath(); - final String relativeDataLocationPath = "indices/test/" + Integer.toString(shardRouting.getId()) + "/translog"; + final String relativeDataLocationPath = "indices/"+ test.getUUID() +"/" + Integer.toString(shardRouting.getId()) + "/translog"; Path file = PathUtils.get(path).resolve(relativeDataLocationPath); if (Files.exists(file)) { logger.info("--> path: {}", file); diff --git a/core/src/test/java/org/elasticsearch/index/store/FsDirectoryServiceTests.java b/core/src/test/java/org/elasticsearch/index/store/FsDirectoryServiceTests.java index 9da39b8da718..f7d793f03ed1 100644 --- a/core/src/test/java/org/elasticsearch/index/store/FsDirectoryServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/FsDirectoryServiceTests.java @@ -46,9 +46,9 @@ public class FsDirectoryServiceTests extends ESTestCase { IndexSettings settings = IndexSettingsModule.newIndexSettings("foo", build); IndexStoreConfig config = new IndexStoreConfig(build); IndexStore store = new IndexStore(settings, config); - Path tempDir = createTempDir().resolve("foo").resolve("0"); + Path tempDir = createTempDir().resolve(settings.getUUID()).resolve("0"); Files.createDirectories(tempDir); - ShardPath path = new ShardPath(false, tempDir, tempDir, settings.getUUID(), new ShardId(settings.getIndex(), 0)); + ShardPath path = new ShardPath(false, tempDir, tempDir, new ShardId(settings.getIndex(), 0)); FsDirectoryService fsDirectoryService = new FsDirectoryService(settings, store, path); Directory directory = fsDirectoryService.newDirectory(); assertTrue(directory instanceof RateLimitedFSDirectory); @@ -62,9 +62,9 @@ public class FsDirectoryServiceTests extends ESTestCase { IndexSettings settings = IndexSettingsModule.newIndexSettings("foo", build); IndexStoreConfig config = new IndexStoreConfig(build); IndexStore store = new IndexStore(settings, config); - Path tempDir = createTempDir().resolve("foo").resolve("0"); + Path tempDir = createTempDir().resolve(settings.getUUID()).resolve("0"); Files.createDirectories(tempDir); - ShardPath path = new ShardPath(false, tempDir, tempDir, settings.getUUID(), new ShardId(settings.getIndex(), 0)); + ShardPath path = new ShardPath(false, tempDir, tempDir, new ShardId(settings.getIndex(), 0)); FsDirectoryService fsDirectoryService = new FsDirectoryService(settings, store, path); Directory directory = fsDirectoryService.newDirectory(); assertTrue(directory instanceof RateLimitedFSDirectory); diff --git a/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java b/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java index d9000e23a61f..da5c1f3ecfa2 100644 --- a/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java @@ -47,13 +47,14 @@ import java.util.Locale; public class IndexStoreTests extends ESTestCase { public void testStoreDirectory() throws IOException { - final Path tempDir = createTempDir().resolve("foo").resolve("0"); + Index index = new Index("foo", "fooUUID"); + final Path tempDir = createTempDir().resolve(index.getUUID()).resolve("0"); final IndexModule.Type[] values = IndexModule.Type.values(); final IndexModule.Type type = RandomPicks.randomFrom(random(), values); Settings settings = Settings.settingsBuilder().put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), type.name().toLowerCase(Locale.ROOT)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings); - FsDirectoryService service = new FsDirectoryService(indexSettings, null, new ShardPath(false, tempDir, tempDir, "foo", new ShardId("foo", "_na_", 0))); + FsDirectoryService service = new FsDirectoryService(indexSettings, null, new ShardPath(false, tempDir, tempDir, new ShardId(index, 0))); try (final Directory directory = service.newFSDirectory(tempDir, NoLockFactory.INSTANCE)) { switch (type) { case NIOFS: @@ -84,8 +85,9 @@ public class IndexStoreTests extends ESTestCase { } public void testStoreDirectoryDefault() throws IOException { - final Path tempDir = createTempDir().resolve("foo").resolve("0"); - FsDirectoryService service = new FsDirectoryService(IndexSettingsModule.newIndexSettings("foo", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()), null, new ShardPath(false, tempDir, tempDir, "foo", new ShardId("foo", "_na_", 0))); + Index index = new Index("bar", "foo"); + final Path tempDir = createTempDir().resolve(index.getUUID()).resolve("0"); + FsDirectoryService service = new FsDirectoryService(IndexSettingsModule.newIndexSettings("bar", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()), null, new ShardPath(false, tempDir, tempDir, new ShardId(index, 0))); try (final Directory directory = service.newFSDirectory(tempDir, NoLockFactory.INSTANCE)) { if (Constants.WINDOWS) { assertTrue(directory.toString(), directory instanceof MMapDirectory || directory instanceof SimpleFSDirectory); diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index 26e2b7702c82..bdc53d0de306 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -112,12 +112,14 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)) ); ensureGreen("test"); + ClusterState state = client().admin().cluster().prepareState().get().getState(); + Index index = state.metaData().index("test").getIndex(); logger.info("--> making sure that shard and its replica are allocated on node_1 and node_2"); - assertThat(Files.exists(shardDirectory(node_1, "test", 0)), equalTo(true)); - assertThat(Files.exists(indexDirectory(node_1, "test")), equalTo(true)); - assertThat(Files.exists(shardDirectory(node_2, "test", 0)), equalTo(true)); - assertThat(Files.exists(indexDirectory(node_2, "test")), equalTo(true)); + assertThat(Files.exists(shardDirectory(node_1, index, 0)), equalTo(true)); + assertThat(Files.exists(indexDirectory(node_1, index)), equalTo(true)); + assertThat(Files.exists(shardDirectory(node_2, index, 0)), equalTo(true)); + assertThat(Files.exists(indexDirectory(node_2, index)), equalTo(true)); logger.info("--> starting node server3"); final String node_3 = internalCluster().startNode(Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false)); @@ -128,12 +130,12 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { .get(); assertThat(clusterHealth.isTimedOut(), equalTo(false)); - assertThat(Files.exists(shardDirectory(node_1, "test", 0)), equalTo(true)); - assertThat(Files.exists(indexDirectory(node_1, "test")), equalTo(true)); - assertThat(Files.exists(shardDirectory(node_2, "test", 0)), equalTo(true)); - assertThat(Files.exists(indexDirectory(node_2, "test")), equalTo(true)); - assertThat(Files.exists(shardDirectory(node_3, "test", 0)), equalTo(false)); - assertThat(Files.exists(indexDirectory(node_3, "test")), equalTo(false)); + assertThat(Files.exists(shardDirectory(node_1, index, 0)), equalTo(true)); + assertThat(Files.exists(indexDirectory(node_1, index)), equalTo(true)); + assertThat(Files.exists(shardDirectory(node_2, index, 0)), equalTo(true)); + assertThat(Files.exists(indexDirectory(node_2, index)), equalTo(true)); + assertThat(Files.exists(shardDirectory(node_3, index, 0)), equalTo(false)); + assertThat(Files.exists(indexDirectory(node_3, index)), equalTo(false)); logger.info("--> move shard from node_1 to node_3, and wait for relocation to finish"); @@ -161,12 +163,12 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { .get(); assertThat(clusterHealth.isTimedOut(), equalTo(false)); - assertThat(waitForShardDeletion(node_1, "test", 0), equalTo(false)); - assertThat(waitForIndexDeletion(node_1, "test"), equalTo(false)); - assertThat(Files.exists(shardDirectory(node_2, "test", 0)), equalTo(true)); - assertThat(Files.exists(indexDirectory(node_2, "test")), equalTo(true)); - assertThat(Files.exists(shardDirectory(node_3, "test", 0)), equalTo(true)); - assertThat(Files.exists(indexDirectory(node_3, "test")), equalTo(true)); + assertThat(waitForShardDeletion(node_1, index, 0), equalTo(false)); + assertThat(waitForIndexDeletion(node_1, index), equalTo(false)); + assertThat(Files.exists(shardDirectory(node_2, index, 0)), equalTo(true)); + assertThat(Files.exists(indexDirectory(node_2, index)), equalTo(true)); + assertThat(Files.exists(shardDirectory(node_3, index, 0)), equalTo(true)); + assertThat(Files.exists(indexDirectory(node_3, index)), equalTo(true)); } @@ -180,16 +182,18 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)) ); ensureGreen("test"); - assertThat(Files.exists(shardDirectory(node_1, "test", 0)), equalTo(true)); - assertThat(Files.exists(indexDirectory(node_1, "test")), equalTo(true)); + ClusterState state = client().admin().cluster().prepareState().get().getState(); + Index index = state.metaData().index("test").getIndex(); + assertThat(Files.exists(shardDirectory(node_1, index, 0)), equalTo(true)); + assertThat(Files.exists(indexDirectory(node_1, index)), equalTo(true)); final String node_2 = internalCluster().startDataOnlyNode(Settings.builder().build()); assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes("2").get().isTimedOut()); - assertThat(Files.exists(shardDirectory(node_1, "test", 0)), equalTo(true)); - assertThat(Files.exists(indexDirectory(node_1, "test")), equalTo(true)); - assertThat(Files.exists(shardDirectory(node_2, "test", 0)), equalTo(false)); - assertThat(Files.exists(indexDirectory(node_2, "test")), equalTo(false)); + assertThat(Files.exists(shardDirectory(node_1, index, 0)), equalTo(true)); + assertThat(Files.exists(indexDirectory(node_1, index)), equalTo(true)); + assertThat(Files.exists(shardDirectory(node_2, index, 0)), equalTo(false)); + assertThat(Files.exists(indexDirectory(node_2, index)), equalTo(false)); // add a transport delegate that will prevent the shard active request to succeed the first time after relocation has finished. // node_1 will then wait for the next cluster state change before it tries a next attempt to delete the shard. @@ -220,14 +224,14 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { // it must still delete the shard, even if it cannot find it anymore in indicesservice client().admin().indices().prepareDelete("test").get(); - assertThat(waitForShardDeletion(node_1, "test", 0), equalTo(false)); - assertThat(waitForIndexDeletion(node_1, "test"), equalTo(false)); - assertThat(Files.exists(shardDirectory(node_1, "test", 0)), equalTo(false)); - assertThat(Files.exists(indexDirectory(node_1, "test")), equalTo(false)); - assertThat(waitForShardDeletion(node_2, "test", 0), equalTo(false)); - assertThat(waitForIndexDeletion(node_2, "test"), equalTo(false)); - assertThat(Files.exists(shardDirectory(node_2, "test", 0)), equalTo(false)); - assertThat(Files.exists(indexDirectory(node_2, "test")), equalTo(false)); + assertThat(waitForShardDeletion(node_1, index, 0), equalTo(false)); + assertThat(waitForIndexDeletion(node_1, index), equalTo(false)); + assertThat(Files.exists(shardDirectory(node_1, index, 0)), equalTo(false)); + assertThat(Files.exists(indexDirectory(node_1, index)), equalTo(false)); + assertThat(waitForShardDeletion(node_2, index, 0), equalTo(false)); + assertThat(waitForIndexDeletion(node_2, index), equalTo(false)); + assertThat(Files.exists(shardDirectory(node_2, index, 0)), equalTo(false)); + assertThat(Files.exists(indexDirectory(node_2, index)), equalTo(false)); } public void testShardsCleanup() throws Exception { @@ -241,9 +245,11 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { ); ensureGreen("test"); + ClusterState state = client().admin().cluster().prepareState().get().getState(); + Index index = state.metaData().index("test").getIndex(); logger.info("--> making sure that shard and its replica are allocated on node_1 and node_2"); - assertThat(Files.exists(shardDirectory(node_1, "test", 0)), equalTo(true)); - assertThat(Files.exists(shardDirectory(node_2, "test", 0)), equalTo(true)); + assertThat(Files.exists(shardDirectory(node_1, index, 0)), equalTo(true)); + assertThat(Files.exists(shardDirectory(node_2, index, 0)), equalTo(true)); logger.info("--> starting node server3"); String node_3 = internalCluster().startNode(); @@ -255,10 +261,10 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { assertThat(clusterHealth.isTimedOut(), equalTo(false)); logger.info("--> making sure that shard is not allocated on server3"); - assertThat(waitForShardDeletion(node_3, "test", 0), equalTo(false)); + assertThat(waitForShardDeletion(node_3, index, 0), equalTo(false)); - Path server2Shard = shardDirectory(node_2, "test", 0); - logger.info("--> stopping node {}", node_2); + Path server2Shard = shardDirectory(node_2, index, 0); + logger.info("--> stopping node " + node_2); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(node_2)); logger.info("--> running cluster_health"); @@ -273,9 +279,9 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { assertThat(Files.exists(server2Shard), equalTo(true)); logger.info("--> making sure that shard and its replica exist on server1, server2 and server3"); - assertThat(Files.exists(shardDirectory(node_1, "test", 0)), equalTo(true)); + assertThat(Files.exists(shardDirectory(node_1, index, 0)), equalTo(true)); assertThat(Files.exists(server2Shard), equalTo(true)); - assertThat(Files.exists(shardDirectory(node_3, "test", 0)), equalTo(true)); + assertThat(Files.exists(shardDirectory(node_3, index, 0)), equalTo(true)); logger.info("--> starting node node_4"); final String node_4 = internalCluster().startNode(); @@ -284,9 +290,9 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { ensureGreen(); logger.info("--> making sure that shard and its replica are allocated on server1 and server3 but not on server2"); - assertThat(Files.exists(shardDirectory(node_1, "test", 0)), equalTo(true)); - assertThat(Files.exists(shardDirectory(node_3, "test", 0)), equalTo(true)); - assertThat(waitForShardDeletion(node_4, "test", 0), equalTo(false)); + assertThat(Files.exists(shardDirectory(node_1, index, 0)), equalTo(true)); + assertThat(Files.exists(shardDirectory(node_3, index, 0)), equalTo(true)); + assertThat(waitForShardDeletion(node_4, index, 0), equalTo(false)); } public void testShardActiveElsewhereDoesNotDeleteAnother() throws Exception { @@ -426,30 +432,30 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { waitNoPendingTasksOnAll(); logger.info("Checking if shards aren't removed"); for (int shard : node2Shards) { - assertTrue(waitForShardDeletion(nonMasterNode, "test", shard)); + assertTrue(waitForShardDeletion(nonMasterNode, index, shard)); } } - private Path indexDirectory(String server, String index) { + private Path indexDirectory(String server, Index index) { NodeEnvironment env = internalCluster().getInstance(NodeEnvironment.class, server); final Path[] paths = env.indexPaths(index); assert paths.length == 1; return paths[0]; } - private Path shardDirectory(String server, String index, int shard) { + private Path shardDirectory(String server, Index index, int shard) { NodeEnvironment env = internalCluster().getInstance(NodeEnvironment.class, server); - final Path[] paths = env.availableShardPaths(new ShardId(index, "_na_", shard)); + final Path[] paths = env.availableShardPaths(new ShardId(index, shard)); assert paths.length == 1; return paths[0]; } - private boolean waitForShardDeletion(final String server, final String index, final int shard) throws InterruptedException { + private boolean waitForShardDeletion(final String server, final Index index, final int shard) throws InterruptedException { awaitBusy(() -> !Files.exists(shardDirectory(server, index, shard))); return Files.exists(shardDirectory(server, index, shard)); } - private boolean waitForIndexDeletion(final String server, final String index) throws InterruptedException { + private boolean waitForIndexDeletion(final String server, final Index index) throws InterruptedException { awaitBusy(() -> !Files.exists(indexDirectory(server, index))); return Files.exists(indexDirectory(server, index)); } diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java index fe12cb042d42..c632e139955e 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java @@ -52,6 +52,7 @@ public class Murmur3FieldMapperUpgradeTests extends ESIntegTestCase { public void testUpgradeOldMapping() throws IOException, ExecutionException, InterruptedException { final String indexName = "index-mapper-murmur3-2.0.0"; + final String indexUUID = "1VzJds59TTK7lRu17W0mcg"; InternalTestCluster.Async master = internalCluster().startNodeAsync(); Path unzipDir = createTempDir(); Path unzipDataDir = unzipDir.resolve("data"); @@ -72,6 +73,7 @@ public class Murmur3FieldMapperUpgradeTests extends ESIntegTestCase { assertFalse(Files.exists(dataPath)); Path src = unzipDataDir.resolve(indexName + "/nodes/0/indices"); Files.move(src, dataPath); + Files.move(dataPath.resolve(indexName), dataPath.resolve(indexUUID)); master.get(); // force reloading dangling indices with a cluster state republish diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java index a2af6df4e75a..761fb5fd144c 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java @@ -53,6 +53,7 @@ public class SizeFieldMapperUpgradeTests extends ESIntegTestCase { public void testUpgradeOldMapping() throws IOException, ExecutionException, InterruptedException { final String indexName = "index-mapper-size-2.0.0"; + final String indexUUID = "ENCw7sG0SWuTPcH60bHheg"; InternalTestCluster.Async master = internalCluster().startNodeAsync(); Path unzipDir = createTempDir(); Path unzipDataDir = unzipDir.resolve("data"); @@ -73,6 +74,7 @@ public class SizeFieldMapperUpgradeTests extends ESIntegTestCase { assertFalse(Files.exists(dataPath)); Path src = unzipDataDir.resolve(indexName + "/nodes/0/indices"); Files.move(src, dataPath); + Files.move(dataPath.resolve(indexName), dataPath.resolve(indexUUID)); master.get(); // force reloading dangling indices with a cluster state republish client().admin().cluster().prepareReroute().get(); From a3bf57d116c12df379fc24a69592ace67cf13cc8 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Tue, 15 Mar 2016 09:18:34 +0100 Subject: [PATCH 235/320] Upgrade azure SDK to 0.9.3 We are ATM using azure SDK 0.9.0. Azure latest release is now 0.9.3 (released in February 2016). the central repository search engine google chrome aujourd hui at 08 41 12 Artifacts are on [maven central](http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22com.microsoft.azure%22%20AND%20(a%3Aazure-serviceruntime%20OR%20a%3Aazure-servicebus%20OR%20a%3Aazure-svc-*)) Change log: ## 2016.2.18 Version 0.9.3 * Fix enum bugs in azure-svc-mgmt-websites ## 2016.1.26 Version 0.9.2 * Fix HTTP Proxy for Apache HTTP Client in Service Clients * Key Vault: Fix KeyVaultKey to not attempt to load RSA Private Key ## 2016.1.8 Version 0.9.1 * Support HTTP Proxy * Fix token expiration issue #557 * Service Bus: Add missing attributes: partitionKey, viaPartitionKey * Traffic Manager: Update API version, add MinChildEndpoints for NestedEndpoints * Media: Add support for Widevine (DRM) dynamic encryption Closes #17042. --- plugins/discovery-azure/build.gradle | 2 +- plugins/discovery-azure/licenses/azure-core-0.9.0.jar.sha1 | 1 - plugins/discovery-azure/licenses/azure-core-0.9.3.jar.sha1 | 1 + .../licenses/azure-svc-mgmt-compute-0.9.0.jar.sha1 | 1 - .../licenses/azure-svc-mgmt-compute-0.9.3.jar.sha1 | 1 + 5 files changed, 3 insertions(+), 3 deletions(-) delete mode 100644 plugins/discovery-azure/licenses/azure-core-0.9.0.jar.sha1 create mode 100644 plugins/discovery-azure/licenses/azure-core-0.9.3.jar.sha1 delete mode 100644 plugins/discovery-azure/licenses/azure-svc-mgmt-compute-0.9.0.jar.sha1 create mode 100644 plugins/discovery-azure/licenses/azure-svc-mgmt-compute-0.9.3.jar.sha1 diff --git a/plugins/discovery-azure/build.gradle b/plugins/discovery-azure/build.gradle index ec4ef7cb6254..1dd2aa26f239 100644 --- a/plugins/discovery-azure/build.gradle +++ b/plugins/discovery-azure/build.gradle @@ -23,7 +23,7 @@ esplugin { } versions << [ - 'azure': '0.9.0', + 'azure': '0.9.3', 'jersey': '1.13' ] diff --git a/plugins/discovery-azure/licenses/azure-core-0.9.0.jar.sha1 b/plugins/discovery-azure/licenses/azure-core-0.9.0.jar.sha1 deleted file mode 100644 index f9696307afe7..000000000000 --- a/plugins/discovery-azure/licenses/azure-core-0.9.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -050719f91deceed1be1aaf87e85099a861295fa2 \ No newline at end of file diff --git a/plugins/discovery-azure/licenses/azure-core-0.9.3.jar.sha1 b/plugins/discovery-azure/licenses/azure-core-0.9.3.jar.sha1 new file mode 100644 index 000000000000..5947972663e4 --- /dev/null +++ b/plugins/discovery-azure/licenses/azure-core-0.9.3.jar.sha1 @@ -0,0 +1 @@ +7fe32241b738aad0f700f4277fa998230c144ae7 \ No newline at end of file diff --git a/plugins/discovery-azure/licenses/azure-svc-mgmt-compute-0.9.0.jar.sha1 b/plugins/discovery-azure/licenses/azure-svc-mgmt-compute-0.9.0.jar.sha1 deleted file mode 100644 index c971d7c57242..000000000000 --- a/plugins/discovery-azure/licenses/azure-svc-mgmt-compute-0.9.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -887ca8ee5564e8ba2351e6b5db2a1293a8d04674 \ No newline at end of file diff --git a/plugins/discovery-azure/licenses/azure-svc-mgmt-compute-0.9.3.jar.sha1 b/plugins/discovery-azure/licenses/azure-svc-mgmt-compute-0.9.3.jar.sha1 new file mode 100644 index 000000000000..d427170d5781 --- /dev/null +++ b/plugins/discovery-azure/licenses/azure-svc-mgmt-compute-0.9.3.jar.sha1 @@ -0,0 +1 @@ +602d3e6f5a9f058c2439e8fdf1270cddc811b440 \ No newline at end of file From 29268f100b0dd89e7e0ec69f6e957660ab2dbf83 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 15 Mar 2016 09:31:13 +0100 Subject: [PATCH 236/320] Reenable CreateIndexIT#testCreateAndDeleteIndexConcurrently Since #16442 is merged we should be able to reenable this test as a followup of #15853 - all issues blocking it have been resolved I guess. --- .../cluster/service/InternalClusterService.java | 8 ++++++-- .../action/admin/indices/create/CreateIndexIT.java | 9 +++++++-- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index 525a9a9af40d..c57cfd5a57cc 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -202,8 +202,12 @@ public class InternalClusterService extends AbstractLifecycleComponent Date: Tue, 15 Mar 2016 11:12:38 +0100 Subject: [PATCH 237/320] Fix compile error that's what you get for a "let me quickly try something out here" --- .../org/elasticsearch/search/sort/AbstractSortTestCase.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 6c5800c97cb0..fcc541a23ccb 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -86,8 +86,6 @@ public abstract class AbstractSortTestCase Date: Tue, 15 Mar 2016 11:41:53 +0100 Subject: [PATCH 238/320] Using SortMode enum in all sort builders --- .../search/sort/FieldSortBuilder.java | 35 +++++---- .../search/sort/GeoDistanceSortBuilder.java | 28 +++---- .../search/sort/ScoreSortBuilder.java | 3 +- .../search/sort/ScriptSortBuilder.java | 8 +- .../search/sort/SortElementParserTemp.java | 39 ---------- .../search/nested/SimpleNestedIT.java | 13 ++-- .../search/sort/AbstractSortTestCase.java | 5 +- .../search/sort/FieldSortIT.java | 77 +++++++++---------- .../search/sort/GeoDistanceIT.java | 28 +++---- .../search/sort/GeoDistanceSortBuilderIT.java | 20 ++--- .../sort/GeoDistanceSortBuilderTests.java | 22 +++--- .../{SortModeTest.java => SortModeTests.java} | 6 +- .../migration/migrate_5_0/java.asciidoc | 4 + 13 files changed, 127 insertions(+), 161 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/search/sort/SortElementParserTemp.java rename core/src/test/java/org/elasticsearch/search/sort/{SortModeTest.java => SortModeTests.java} (95%) diff --git a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index 1157457afb9f..a5707ea4a537 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -53,9 +53,9 @@ public class FieldSortBuilder extends SortBuilder implements S private String unmappedType; - private String sortMode; + private SortMode sortMode; - private QueryBuilder nestedFilter; + private QueryBuilder nestedFilter; private String nestedPath; @@ -65,7 +65,9 @@ public class FieldSortBuilder extends SortBuilder implements S this.order(template.order()); this.missing(template.missing()); this.unmappedType(template.unmappedType()); - this.sortMode(template.sortMode()); + if (template.sortMode != null) { + this.sortMode(template.sortMode()); + } this.setNestedFilter(template.getNestedFilter()); this.setNestedPath(template.getNestedPath()); } @@ -134,12 +136,12 @@ public class FieldSortBuilder extends SortBuilder implements S * Defines what values to pick in the case a document contains multiple * values for the targeted sort field. Possible values: min, max, sum and * avg - * - * TODO would love to see an enum here + * *

      * The last two values are only applicable for number based fields. */ - public FieldSortBuilder sortMode(String sortMode) { + public FieldSortBuilder sortMode(SortMode sortMode) { + Objects.requireNonNull(sortMode, "sort mode cannot be null"); this.sortMode = sortMode; return this; } @@ -148,14 +150,14 @@ public class FieldSortBuilder extends SortBuilder implements S * Returns what values to pick in the case a document contains multiple * values for the targeted sort field. */ - public String sortMode() { + public SortMode sortMode() { return this.sortMode; } /** * Sets the nested filter that the nested objects should match with in order * to be taken into account for sorting. - * + * * TODO should the above getters and setters be deprecated/ changed in * favour of real getters and setters? */ @@ -263,7 +265,10 @@ public class FieldSortBuilder extends SortBuilder implements S out.writeBoolean(false); } - out.writeOptionalString(this.sortMode); + out.writeBoolean(this.sortMode != null); + if (this.sortMode != null) { + this.sortMode.writeTo(out); + } out.writeOptionalString(this.unmappedType); } @@ -272,7 +277,7 @@ public class FieldSortBuilder extends SortBuilder implements S String fieldName = in.readString(); FieldSortBuilder result = new FieldSortBuilder(fieldName); if (in.readBoolean()) { - QueryBuilder query = in.readQuery(); + QueryBuilder query = in.readQuery(); result.setNestedFilter(query); } result.setNestedPath(in.readOptionalString()); @@ -281,7 +286,9 @@ public class FieldSortBuilder extends SortBuilder implements S if (in.readBoolean()) { result.order(SortOrder.readOrderFrom(in)); } - result.sortMode(in.readOptionalString()); + if (in.readBoolean()) { + result.sortMode(SortMode.PROTOTYPE.readFrom(in)); + } result.unmappedType(in.readOptionalString()); return result; } @@ -290,11 +297,11 @@ public class FieldSortBuilder extends SortBuilder implements S public FieldSortBuilder fromXContent(QueryParseContext context, String fieldName) throws IOException { XContentParser parser = context.parser(); - QueryBuilder nestedFilter = null; + QueryBuilder nestedFilter = null; String nestedPath = null; Object missing = null; SortOrder order = null; - String sortMode = null; + SortMode sortMode = null; String unmappedType = null; String currentFieldName = null; @@ -328,7 +335,7 @@ public class FieldSortBuilder extends SortBuilder implements S throw new IllegalStateException("Sort order " + sortOrder + " not supported."); } } else if (context.parseFieldMatcher().match(currentFieldName, SORT_MODE)) { - sortMode = parser.text(); + sortMode = SortMode.fromString(parser.text()); } else if (context.parseFieldMatcher().match(currentFieldName, UNMAPPED_TYPE)) { unmappedType = parser.text(); } else { diff --git a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 630ff635afa9..9785a0fc2404 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.search.MultiValueMode; import java.io.IOException; import java.util.ArrayList; @@ -55,8 +54,7 @@ public class GeoDistanceSortBuilder extends SortBuilder private GeoDistance geoDistance = GeoDistance.DEFAULT; private DistanceUnit unit = DistanceUnit.DEFAULT; - // TODO there is an enum that covers that parameter which we should be using here - private String sortMode = null; + private SortMode sortMode = null; @SuppressWarnings("rawtypes") private QueryBuilder nestedFilter; private String nestedPath; @@ -204,9 +202,9 @@ public class GeoDistanceSortBuilder extends SortBuilder * Defines which distance to use for sorting in the case a document contains multiple geo points. * Possible values: min and max */ - public GeoDistanceSortBuilder sortMode(String sortMode) { - MultiValueMode temp = MultiValueMode.fromString(sortMode); - if (temp == MultiValueMode.SUM) { + public GeoDistanceSortBuilder sortMode(SortMode sortMode) { + Objects.requireNonNull(sortMode, "sort mode cannot be null"); + if (sortMode == SortMode.SUM) { throw new IllegalArgumentException("sort_mode [sum] isn't supported for sorting by geo distance"); } this.sortMode = sortMode; @@ -214,7 +212,7 @@ public class GeoDistanceSortBuilder extends SortBuilder } /** Returns which distance to use for sorting in the case a document contains multiple geo points. */ - public String sortMode() { + public SortMode sortMode() { return this.sortMode; } @@ -345,7 +343,10 @@ public class GeoDistanceSortBuilder extends SortBuilder geoDistance.writeTo(out); unit.writeTo(out); order.writeTo(out); - out.writeOptionalString(sortMode); + out.writeBoolean(this.sortMode != null); + if (this.sortMode != null) { + sortMode.writeTo(out); + } if (nestedFilter != null) { out.writeBoolean(true); out.writeQuery(nestedFilter); @@ -367,9 +368,8 @@ public class GeoDistanceSortBuilder extends SortBuilder result.geoDistance(GeoDistance.readGeoDistanceFrom(in)); result.unit(DistanceUnit.readDistanceUnit(in)); result.order(SortOrder.readOrderFrom(in)); - String sortMode = in.readOptionalString(); - if (sortMode != null) { - result.sortMode(sortMode); + if (in.readBoolean()) { + result.sortMode = SortMode.PROTOTYPE.readFrom(in); } if (in.readBoolean()) { result.setNestedFilter(in.readQuery()); @@ -388,7 +388,7 @@ public class GeoDistanceSortBuilder extends SortBuilder DistanceUnit unit = DistanceUnit.DEFAULT; GeoDistance geoDistance = GeoDistance.DEFAULT; SortOrder order = SortOrder.ASC; - MultiValueMode sortMode = null; + SortMode sortMode = null; QueryBuilder nestedFilter = null; String nestedPath = null; @@ -437,7 +437,7 @@ public class GeoDistanceSortBuilder extends SortBuilder ignoreMalformed = ignore_malformed_value; } } else if ("sort_mode".equals(currentName) || "sortMode".equals(currentName) || "mode".equals(currentName)) { - sortMode = MultiValueMode.fromString(parser.text()); + sortMode = SortMode.fromString(parser.text()); } else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) { nestedPath = parser.text(); } else { @@ -454,7 +454,7 @@ public class GeoDistanceSortBuilder extends SortBuilder result.unit(unit); result.order(order); if (sortMode != null) { - result.sortMode(sortMode.name()); + result.sortMode(sortMode); } result.setNestedFilter(nestedFilter); result.setNestedPath(nestedPath); diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java index c416965f38a9..76ca56f0f9f9 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java @@ -34,8 +34,7 @@ import java.util.Objects; /** * A sort builder allowing to sort by score. */ -public class ScoreSortBuilder extends SortBuilder implements SortBuilderParser, - SortElementParserTemp { +public class ScoreSortBuilder extends SortBuilder implements SortBuilderParser { private static final String NAME = "_score"; static final ScoreSortBuilder PROTOTYPE = new ScoreSortBuilder(); diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index 9b51eeca31dd..e77d12ce478a 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.sort; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -44,8 +43,7 @@ import java.util.Objects; /** * Script sort builder allows to sort based on a custom script expression. */ -public class ScriptSortBuilder extends SortBuilder implements NamedWriteable, - SortElementParserTemp { +public class ScriptSortBuilder extends SortBuilder implements SortBuilderParser { private static final String NAME = "_script"; static final ScriptSortBuilder PROTOTYPE = new ScriptSortBuilder(new Script("_na_"), ScriptSortType.STRING); @@ -72,8 +70,8 @@ public class ScriptSortBuilder extends SortBuilder implements * @param script * The script to use. * @param type - * The type of the script, can be either {@link ScriptSortParser#STRING_SORT_TYPE} or - * {@link ScriptSortParser#NUMBER_SORT_TYPE} + * The type of the script, can be either {@link ScriptSortType#STRING} or + * {@link ScriptSortType#NUMBER} */ public ScriptSortBuilder(Script script, ScriptSortType type) { Objects.requireNonNull(script, "script cannot be null"); diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortElementParserTemp.java b/core/src/main/java/org/elasticsearch/search/sort/SortElementParserTemp.java deleted file mode 100644 index 069f1380b49c..000000000000 --- a/core/src/main/java/org/elasticsearch/search/sort/SortElementParserTemp.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.sort; - -import org.elasticsearch.index.query.QueryParseContext; - -import java.io.IOException; - -// TODO once sort refactoring is done this needs to be merged into SortBuilder -public interface SortElementParserTemp { - /** - * Creates a new SortBuilder from the json held by the {@link SortElementParserTemp} - * in {@link org.elasticsearch.common.xcontent.XContent} format - * - * @param context - * the input parse context. The state on the parser contained in - * this context will be changed as a side effect of this method - * call - * @return the new item - */ - T fromXContent(QueryParseContext context, String elementName) throws IOException; -} diff --git a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java index 2f9326041457..e38ac0ca76e2 100644 --- a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.sort.SortBuilders; +import org.elasticsearch.search.sort.SortMode; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; @@ -748,7 +749,7 @@ public class SimpleNestedIT extends ESIntegTestCase { .addSort( SortBuilders.fieldSort("parent.child.child_values") .setNestedPath("parent.child") - .sortMode("sum") + .sortMode(SortMode.SUM) .order(SortOrder.ASC) ) .execute().actionGet(); @@ -768,7 +769,7 @@ public class SimpleNestedIT extends ESIntegTestCase { .addSort( SortBuilders.fieldSort("parent.child.child_values") .setNestedPath("parent.child") - .sortMode("sum") + .sortMode(SortMode.SUM) .order(SortOrder.DESC) ) .execute().actionGet(); @@ -789,7 +790,7 @@ public class SimpleNestedIT extends ESIntegTestCase { SortBuilders.fieldSort("parent.child.child_values") .setNestedPath("parent.child") .setNestedFilter(QueryBuilders.termQuery("parent.child.filter", true)) - .sortMode("sum") + .sortMode(SortMode.SUM) .order(SortOrder.ASC) ) .execute().actionGet(); @@ -809,7 +810,7 @@ public class SimpleNestedIT extends ESIntegTestCase { .addSort( SortBuilders.fieldSort("parent.child.child_values") .setNestedPath("parent.child") - .sortMode("avg") + .sortMode(SortMode.AVG) .order(SortOrder.ASC) ) .execute().actionGet(); @@ -828,7 +829,7 @@ public class SimpleNestedIT extends ESIntegTestCase { .addSort( SortBuilders.fieldSort("parent.child.child_values") .setNestedPath("parent.child") - .sortMode("avg") + .sortMode(SortMode.AVG) .order(SortOrder.DESC) ) .execute().actionGet(); @@ -849,7 +850,7 @@ public class SimpleNestedIT extends ESIntegTestCase { SortBuilders.fieldSort("parent.child.child_values") .setNestedPath("parent.child") .setNestedFilter(QueryBuilders.termQuery("parent.child.filter", true)) - .sortMode("avg") + .sortMode(SortMode.AVG) .order(SortOrder.ASC) ) .execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 1a812a166a2b..f7f9edbc0b2e 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.sort; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -87,8 +86,6 @@ public abstract class AbstractSortTestCase> nodePlugins() { @@ -985,7 +984,7 @@ public class FieldSortIT extends ESIntegTestCase { searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) .setSize(10) - .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode("sum")) + .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.SUM)) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L)); diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java index d40cbf930021..ed733fd4cd7e 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java @@ -270,7 +270,7 @@ public class GeoDistanceIT extends ESIntegTestCase { // Order: Asc, Mode: max searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.ASC).sortMode("max")) + .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.ASC).sortMode(SortMode.MAX)) .execute().actionGet(); assertHitCount(searchResponse, 5); @@ -296,7 +296,7 @@ public class GeoDistanceIT extends ESIntegTestCase { // Order: Desc, Mode: min searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC).sortMode("min")) + .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC).sortMode(SortMode.MIN)) .execute().actionGet(); assertHitCount(searchResponse, 5); @@ -308,7 +308,7 @@ public class GeoDistanceIT extends ESIntegTestCase { assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC)) + .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode(SortMode.AVG).order(SortOrder.ASC)) .execute().actionGet(); assertHitCount(searchResponse, 5); @@ -320,7 +320,7 @@ public class GeoDistanceIT extends ESIntegTestCase { assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(5301d, 10d)); searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode("avg").order(SortOrder.DESC)) + .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode(SortMode.AVG).order(SortOrder.DESC)) .execute().actionGet(); assertHitCount(searchResponse, 5); @@ -333,7 +333,7 @@ public class GeoDistanceIT extends ESIntegTestCase { try { client().prepareSearch("test").setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode("sum")); + .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode(SortMode.SUM)); fail("sum should not be supported for sorting by geo distance"); } catch (IllegalArgumentException e) { // expected @@ -455,7 +455,7 @@ public class GeoDistanceIT extends ESIntegTestCase { // Order: Asc, Mode: max searchResponse = client() .prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location", - 40.7143528, -74.0059731).order(SortOrder.ASC).sortMode("max").setNestedPath("branches")) + 40.7143528, -74.0059731).order(SortOrder.ASC).sortMode(SortMode.MAX).setNestedPath("branches")) .execute().actionGet(); assertHitCount(searchResponse, 4); @@ -480,7 +480,7 @@ public class GeoDistanceIT extends ESIntegTestCase { // Order: Desc, Mode: min searchResponse = client() .prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location", - 40.7143528, -74.0059731).order(SortOrder.DESC).sortMode("min").setNestedPath("branches")) + 40.7143528, -74.0059731).order(SortOrder.DESC).sortMode(SortMode.MIN).setNestedPath("branches")) .execute().actionGet(); assertHitCount(searchResponse, 4); @@ -492,7 +492,7 @@ public class GeoDistanceIT extends ESIntegTestCase { searchResponse = client() .prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location", - 40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC).setNestedPath("branches")) + 40.7143528, -74.0059731).sortMode(SortMode.AVG).order(SortOrder.ASC).setNestedPath("branches")) .execute().actionGet(); assertHitCount(searchResponse, 4); @@ -504,7 +504,7 @@ public class GeoDistanceIT extends ESIntegTestCase { searchResponse = client().prepareSearch("companies") .setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) - .setNestedPath("branches").sortMode("avg").order(SortOrder.DESC).setNestedPath("branches")) + .setNestedPath("branches").sortMode(SortMode.AVG).order(SortOrder.DESC).setNestedPath("branches")) .execute().actionGet(); assertHitCount(searchResponse, 4); @@ -517,7 +517,7 @@ public class GeoDistanceIT extends ESIntegTestCase { searchResponse = client().prepareSearch("companies").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) .setNestedFilter(termQuery("branches.name", "brooklyn")) - .sortMode("avg").order(SortOrder.ASC).setNestedPath("branches")) + .sortMode(SortMode.AVG).order(SortOrder.ASC).setNestedPath("branches")) .execute().actionGet(); assertHitCount(searchResponse, 4); assertFirstHit(searchResponse, hasId("4")); @@ -529,7 +529,7 @@ public class GeoDistanceIT extends ESIntegTestCase { try { client().prepareSearch("companies").setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731).sortMode("sum") + .addSort(SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731).sortMode(SortMode.SUM) .setNestedPath("branches")); fail("Sum should not be allowed as sort mode"); } catch (IllegalArgumentException e) { @@ -567,11 +567,11 @@ public class GeoDistanceIT extends ESIntegTestCase { assertHitCount(result, 1); } - private double randomLon() { + private static double randomLon() { return randomDouble() * 360 - 180; } - private double randomLat() { + private static double randomLat() { return randomDouble() * 180 - 90; } @@ -619,7 +619,7 @@ public class GeoDistanceIT extends ESIntegTestCase { } } - private long assertDuelOptimization(SearchResponse resp) { + private static long assertDuelOptimization(SearchResponse resp) { long matches = -1; assertSearchResponse(resp); if (matches < 0) { diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java index 309c4bcdaf23..ac9270cbe210 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java @@ -95,7 +95,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) - .addSort(new GeoDistanceSortBuilder("location", q).sortMode("min").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) + .addSort(new GeoDistanceSortBuilder("location", q).sortMode(SortMode.MIN).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) .execute().actionGet(); assertOrderedSearchHits(searchResponse, "d1", "d2"); assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 3, 2, DistanceUnit.KILOMETERS), 0.01d)); @@ -103,7 +103,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) - .addSort(new GeoDistanceSortBuilder("location", q).sortMode("min").order(SortOrder.DESC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) + .addSort(new GeoDistanceSortBuilder("location", q).sortMode(SortMode.MIN).order(SortOrder.DESC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) .execute().actionGet(); assertOrderedSearchHits(searchResponse, "d2", "d1"); assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 1, 5, 1, DistanceUnit.KILOMETERS), 0.01d)); @@ -111,7 +111,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) - .addSort(new GeoDistanceSortBuilder("location", q).sortMode("max").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) + .addSort(new GeoDistanceSortBuilder("location", q).sortMode(SortMode.MAX).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) .execute().actionGet(); assertOrderedSearchHits(searchResponse, "d1", "d2"); assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 4, 1, DistanceUnit.KILOMETERS), 0.01d)); @@ -119,7 +119,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) - .addSort(new GeoDistanceSortBuilder("location", q).sortMode("max").order(SortOrder.DESC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) + .addSort(new GeoDistanceSortBuilder("location", q).sortMode(SortMode.MAX).order(SortOrder.DESC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) .execute().actionGet(); assertOrderedSearchHits(searchResponse, "d2", "d1"); assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 1, 6, 2, DistanceUnit.KILOMETERS), 0.01d)); @@ -194,7 +194,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) - .addSort(geoDistanceSortBuilder.sortMode("min").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) + .addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) .execute().actionGet(); assertOrderedSearchHits(searchResponse, "d1", "d2"); assertThat((Double) searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2.5, 1, 2, 1, DistanceUnit.KILOMETERS), 1.e-4)); @@ -202,7 +202,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) - .addSort(geoDistanceSortBuilder.sortMode("max").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) + .addSort(geoDistanceSortBuilder.sortMode(SortMode.MAX).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) .execute().actionGet(); assertOrderedSearchHits(searchResponse, "d1", "d2"); assertThat((Double) searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(3.25, 4, 2, 1, DistanceUnit.KILOMETERS), 1.e-4)); @@ -223,7 +223,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) - .addSort(geoDistanceSortBuilder.sortMode("min").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) + .addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) .execute().actionGet(); checkCorrectSortOrderForGeoSort(searchResponse); @@ -231,7 +231,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) - .addSort(geoDistanceSortBuilder.sortMode("min").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) + .addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) .execute().actionGet(); checkCorrectSortOrderForGeoSort(searchResponse); @@ -239,7 +239,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) - .addSort(geoDistanceSortBuilder.sortMode("min").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) + .addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) .execute().actionGet(); checkCorrectSortOrderForGeoSort(searchResponse); @@ -265,7 +265,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { checkCorrectSortOrderForGeoSort(searchResponse); } - private void checkCorrectSortOrderForGeoSort(SearchResponse searchResponse) { + private static void checkCorrectSortOrderForGeoSort(SearchResponse searchResponse) { assertOrderedSearchHits(searchResponse, "d2", "d1"); assertThat((Double) searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 1, 2, DistanceUnit.KILOMETERS), 1.e-4)); assertThat((Double) searchResponse.getHits().getAt(1).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 1, 1, DistanceUnit.KILOMETERS), 1.e-4)); diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index 611053b14d5d..50e4aeeb71b7 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.geo.RandomGeoGenerator; import java.io.IOException; @@ -90,16 +89,15 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase Date: Tue, 15 Mar 2016 12:50:57 +0100 Subject: [PATCH 239/320] Try to renew sync ID if `flush=true` on forceMerge Today we do a force flush which wipes the sync ID if there is one which can cause the lost of all benefits of the sync ID ie. fast recovery. This commit adds a check to renew the sync ID if possible. The flush call is now also not forced since the IW will show pending changes if the forceMerge added new segments. if we keep using force we will wipe the sync ID even if no renew was actually needed. Closes #17019 --- .../java/org/elasticsearch/index/engine/InternalEngine.java | 4 +++- .../org/elasticsearch/index/engine/InternalEngineTests.java | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 01f02025aeb2..dc0669e02b7c 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -743,7 +743,9 @@ public class InternalEngine extends Engine { indexWriter.forceMerge(maxNumSegments, true /* blocks and waits for merges*/); } if (flush) { - flush(true, true); + if (tryRenewSyncCommit() == false) { + flush(false, true); + } } if (upgrade) { logger.info("finished segment upgrade"); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index e9971a15f8e3..abe0851c2b62 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -485,7 +485,7 @@ public class InternalEngineTests extends ESTestCase { if (flush) { // we should have had just 1 merge, so last generation should be exact - assertEquals(gen2 + 1, store.readLastCommittedSegmentsInfo().getLastGeneration()); + assertEquals(gen2, store.readLastCommittedSegmentsInfo().getLastGeneration()); } } } @@ -843,7 +843,7 @@ public class InternalEngineTests extends ESTestCase { Engine.SyncedFlushResult.SUCCESS); assertEquals(3, engine.segments(false).size()); - engine.forceMerge(false, 1, false, false, false); + engine.forceMerge(forceMergeFlushes, 1, false, false, false); if (forceMergeFlushes == false) { engine.refresh("make all segments visible"); assertEquals(4, engine.segments(false).size()); From 10333e2f05c9673a85fb0a2f1e3e763f51629349 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 15 Mar 2016 11:39:45 +0100 Subject: [PATCH 240/320] IndicesStore checks for `allocated elsewhere` for every shard not allocated on the local node On each clusterstate update we check on the local node if we can delete some shards content. For this we linearly walk all shards and check if they are allocated and started on another node and if we can delete them locally. if we can delete them locally we go and ask other nodes if we can delete them and then if the shared IS active elsewhere issue a state update task to delete it. Yet, there is a bug in IndicesService#canDeleteShardContent which returns `true` even if that shards datapath doesn't exist on the node which causes tons of unnecessary node to node communciation and as many state update task to be issued. This can have large impact on the cluster state processing speed. **NOTE:** This only happens for shards that have at least one shard allocated on the node ie. if an `IndexService` exists. Closes #17106 --- .../org/elasticsearch/indices/IndicesService.java | 13 +++++++++---- .../elasticsearch/indices/IndicesServiceTests.java | 2 ++ 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index 06eb71724c87..b0e1bbdbd2bc 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -624,12 +624,17 @@ public class IndicesService extends AbstractLifecycleComponent i assert shardId.getIndex().equals(indexSettings.getIndex()); final IndexService indexService = indexService(shardId.getIndex()); if (indexSettings.isOnSharedFilesystem() == false) { - if (indexService != null && nodeEnv.hasNodeFile()) { - return indexService.hasShard(shardId.id()) == false; - } else if (nodeEnv.hasNodeFile()) { - if (indexSettings.hasCustomDataPath()) { + if (nodeEnv.hasNodeFile()) { + final boolean isAllocated = indexService != null && indexService.hasShard(shardId.id()); + if (isAllocated) { + return false; // we are allocated - can't delete the shard + } else if (indexSettings.hasCustomDataPath()) { + // lets see if it's on a custom path (return false if the shared doesn't exist) + // we don't need to delete anything that is not there return Files.exists(nodeEnv.resolveCustomLocation(indexSettings, shardId)); } else { + // lets see if it's path is available (return false if the shared doesn't exist) + // we don't need to delete anything that is not there return FileSystemUtils.exists(nodeEnv.availableShardPaths(shardId)); } } diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index 57a7f34e4b73..336d5a84a8d4 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -81,6 +81,8 @@ public class IndicesServiceTests extends ESSingleNodeTestCase { assertFalse("shard is allocated", indicesService.canDeleteShardContent(shardId, test.getIndexSettings())); test.removeShard(0, "boom"); assertTrue("shard is removed", indicesService.canDeleteShardContent(shardId, test.getIndexSettings())); + ShardId notAllocated = new ShardId(test.index(), 100); + assertFalse("shard that was never on this node should NOT be deletable", indicesService.canDeleteShardContent(notAllocated, test.getIndexSettings())); } public void testDeleteIndexStore() throws Exception { From 0793f00cb94e906f7eef7b6f9ac4870617e6c203 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Tue, 15 Mar 2016 13:48:38 +0100 Subject: [PATCH 241/320] Added version 2.2.1 and bwc indices for 2.2.1 --- .../main/java/org/elasticsearch/Version.java | 2 ++ .../test/resources/indices/bwc/index-2.2.1.zip | Bin 0 -> 90922 bytes .../test/resources/indices/bwc/repo-2.2.1.zip | Bin 0 -> 88759 bytes 3 files changed, 2 insertions(+) create mode 100644 core/src/test/resources/indices/bwc/index-2.2.1.zip create mode 100644 core/src/test/resources/indices/bwc/repo-2.2.1.zip diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index eeb4825cb90f..799beb707f58 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -60,6 +60,8 @@ public class Version { public static final Version V_2_1_2 = new Version(V_2_1_2_ID, org.apache.lucene.util.Version.LUCENE_5_3_1); public static final int V_2_2_0_ID = 2020099; public static final Version V_2_2_0 = new Version(V_2_2_0_ID, org.apache.lucene.util.Version.LUCENE_5_4_1); + public static final int V_2_2_1_ID = 2020199; + public static final Version V_2_2_1 = new Version(V_2_2_1_ID, org.apache.lucene.util.Version.LUCENE_5_4_1); public static final int V_2_3_0_ID = 2030099; public static final Version V_2_3_0 = new Version(V_2_3_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0); public static final int V_5_0_0_ID = 5000099; diff --git a/core/src/test/resources/indices/bwc/index-2.2.1.zip b/core/src/test/resources/indices/bwc/index-2.2.1.zip new file mode 100644 index 0000000000000000000000000000000000000000..8d8e55ae62d1418b6499e6e0deefab37ec338376 GIT binary patch literal 90922 zcmbrlWl&wwmNg0?K=9yhA-KC+umpF9gS)$Hkl^m_aBz2myB_R7aCbjAyxi{B^}24= z{cgYhcGcXqf9y3z&AHZ?wPY?u8JLfVQ2%()ZS24Q$H)Ksf`uZ2GBI>9WL8r}fP&eA zI#hr9YpxzhP|zRmprN2XV*UM5@n1H_{;%dn?#2L1I}=k602>n<6YKvHj__Z>+1ZxhyBNBd{wMlBg8DB4`;P#Bg8B!#S{|zl z;BU-Z{~GiE5Z&Cy-pJ5~k%P(E*Dy{s<81^qTt?BxkRW)sxq> zsXhrQ_@>|mJp6)Adh8OQ?Dz;bdD%J<_&o~ zH_X@n!>lUv%!vAJb2DnZv~k6VeX58fR47CE74Fnv5YG?&WVAh zKaMAO@HzI@9Qoe!Z5Q|d6UaXgKFWXBkAFpDY5ZSf``-@2e}-rMpThr#HW~k$HvfN9 zvHx$V{-@&lXTSLVcVz!va{mVh|5I}R;GBwJAvgXjK%c)R{(r^!A7W=@`7a&*gZ&@& zPnoo#Cx>E6TQ@;7VJrYKt)dMJD``wQGJ^f)CJCz&E6PRW1FS@P+b@Y^vcQCbIn`QC zGks|*2_H*KGsN$jv@`T%3@qdIUzcZi$2iKC8#qQQIf`_D0U-fHySOYnJHi*8X#Xu) z6a7=h;{D$__#Z>(KhxO#ze{8Ppu{&?`a;g3;>LUVndA`PHQGO>%!0UNmF|Y3K?!Fh=@FihZ>7YNaUA*XltUTL1?Y>pzGm|tA z>u>y_x3>2_goeLpI(YHDU-6$7+rMxd$wQM|J zt;`iiXzNrgs#*_Dh;8_sM!=4npTF-<-)pt{K=SS1ULxTAd2aCNbmK{H_QM`lPmP7V zZE~v|_f=xMx;Mkv6})cxmmGUp?Q{GegRg^pajMgN8N5$XUJgtkTi8e%6N9iku>OS< zS&v&M)*2InM+$-(0xz>eV>nU@;P(Lg^P2@RRGxFe0zZ?u|e2QY!Tj zETRj)-_XxH78h#}VH0MebI;#~9TzyQa49iZDqzXlr4Xlqs69Q6)%R9Yb?KT=ZEScK^Xvj zT|(OJ5Kc$OD%KPUB&)(7O`TlmJAml7M%5v~A7(9o-msy!modaUiq!KH1#GtYYusoeuuVyD^cUmhbdd%a@7 zr_gfc=3izBD)2F^atz_pTrUWPT_OM>R{W%IoaL#u)^`jEZJ2+1z|l>2vSoRP5p0+p zyL@L_@_8};B9_fj=^a%^BP?;%AUGNSQ~v==qG3o$vs2C4ZBbx8g$7HlOF~WY$B$;$ z$x&9w z)Nuxj`)tsz(Rl1VN7uFr%Czb;&^wCkRs061g2gNZz(umMLaG9{E*Cs8D=7W!%G>v= z+$!$#NI!?{4AgOWe%m6oHezl7=i27Yp?DPMdAGjqQLkub=;`igSdmmn5*1mc0Ab64 zygQ)}O)ICdsLH{IUd}9ToZc05DNDNuvsD$jjI4sUN{+P^5GnR#vTik3zbdamAL15C zG$&#$4#Qd?QD@mAK+jzHSM1wf?-N5fnY(9`YADBZnqOU_2U7@C zCl?(<1?b&5Ar+M#&PakIfE~ZLv@awTNGDSAaV;pq-^6wsZ1YQ8O`91bBSP@3)?!vk zg)Kh9X5ZxXD#}r6bF57f(0_@_O?6=g>*DI{D48#dvKl@tct7;Ps4jUaZB%?udIahU z8On;EoiWS;LZkC6EIMs}KBjUSs!1F7Z89I9B1q^~SuTDgxrICEk$Badd^3SYi=gtl zMaA(3>b#&%GcwJG?X0RE+<2wsmT*kAc=DH8Qvcm10 zF~sy6!KmGu%S4m4_3ed~6)ZAVhB@4_4>PwE**h(?edBEhFE8x^Dh+>gcZ?uiY8w9#E}bDYMTGBS`1xtGD4f_k<`${1yil z$|60z8c5gpMu?-CSqHfZ`kWfKUd;7k>^v7nBQ#6cVbuarUt_STTG%w!+B5CE(ucTD z%oB?ymHDQ@tGiMhUB@KD+nsU7b?O>6FqQ{94WX&Bvf>5i@?Y5uTWZsEC<;3pxZ5bRsy8L=^*G`h zT7K(hMoLxXx;3+p8ah*IWz`GqKz-Y-H3FHZm-^yg!rFGgcGSXG%aDsLHe_%YryN*v zDP`%;D^Y2&XL@xcg__!Hbl7vN0`1hART@A+@`)(8+mJZQGndls(@(D|AX6pm6<>C) z_0J>0irQ{oEUZ%Q*KG{VYG;%sFBcXXrn<+AjBSjL@@@l~;WW|m6<0Z2_wvKU@ng|) zuph^6z24KH%WeM4C$*4N@5Rk9*I(G}wRSGS*Z8U<#B}`2=zTy zvR)jGpvCrxrwuvh_AQta-U4Ml$y;jz(lev5%w4|9^kC6PwWu_HImeN>S_3X%uwjY3 zTHg!f+F8VVL4F~uTH?*KSe=m*$O}r)V&)E1UV`iD$cjm_eM-f|20xx2v5Z3e^`u@E z59rmC`!Zmpz{*ylr?x24#nCKT1JE=EQ!N!+foHTAS@)zx<{ zDc=?sE8|8fi^Ugx=B~(9a6BWm=80B%l~j>^^XQjxhB*V4D~z6N5cp7G9&#Of3(l{* zm;2=D+*-o455GEQxr)S%L6+^^)=GY)#XWy9(X$ zMW45~Pr-9Ei}y#X!Ag|%cF1;X-(OpQ1WVoFgN0ibUX9u@)d|zf0tG9JaRTL&ie!>e zs{$K@2s|wo{v>9vqkpTI7VoO1-5>^jR^N^x8HHqmw4zlNVRs z{S6r_y7VLmtWJcXllCU?1|%_+PckGiuJ-~8LYm^S z4#M57zkJvDe8^Z4{dKm0U}3AVz@vpK(%A_Q-^%OjFI?zfW-!=h^ryTb@(sCa&C>Ly z>o%s1zelUCvon@I+98u*zqpsMNqgWHuya;LL))~r#cHIDY-4+ja{v&;y34q-hY zKvYS3Ya^AxyE*vzA>Ihmv&sf|)hiMx2HPHym%0xvZ?Khv%pAwbi(~&!BD}aR2JT27qyFkywwoFRrP(k?{%yJtEU}+Qa+>uZY3Bnr}YtwbR@P1 z%F|yJsB#cca$8e4Ug^uNGjAvm#@@JFqe78;tBXc&*_Kz*$cExn=dcV?-nyC7i7j+h z0BkBF>~z%q*Y!0CqtNP3b#sg^;K+EuyBi;3rj&oCW|wm`v%ne4;OgL2YY-K0&rHj$ z0hvI9>}5CL&5B_;(qv4k*vx|DRX*LSvtcP9$%<4~Rhe5fEH53MU(LP*DJa##%?i%b z>gj6Qwx$)3oGhdRrc+3MjMNn5>^h|aV^n#toAXS|9!LvTI(U2gqjaks@ShY6+p*my zZxd#a`ikAmTyDeGC$e8E?M=1k>Khkg#&y^TT=2-P%FM_x4N=QC2N_Y%oOdwD=0f}V z6a?G9DLM;8M`H*V2&Lj1!}67@&>vng$`*Hm?` zGGYpcqnr_iC7LB6XdLF~R*R>1u330kchy-q+K1RD4<$DPa!Po-=N5w8`8H87+ujdqOHjAzLTa(u^a$T$q+j93-IxLOVUL>+}H^XZz3zCVdV>BFy zrr1M0N*6g&>++mVJc72ce@}1Zvll7gi{2$|Q5Ra_sVt4z@|U6R`n7#W8+UhE>h49X z@KzP}lQg8~V{{22PRd=rBeo^%LN;3z`Y>f@oJ<{~aWp_!(N#x~M7nxM$iVCwJ<9b* z9c>KFMY!V+q~0%*@5_4kRp1o2Qs1Fgc#Q{t_WAbkG@*WHb~Nsg!?Hf9WcrS^zEeri zm$lHl7-9mgZF<+OK^Sdg!|2@o&_ma1-%5PdR5x@{-Rj6U0XFl2GntMo!u-$O#dGY> z^5)LgVT7~@9W$L~YxgfcqG@WPTCm87Di35$?%H=tg6rEQ*9~f*Gn9ngLaU`8S6T_zZ*Nh=*Xs<^rI+5iq&x4naOx-#tO{nZh^5WO#$r7FQfQ9Nh1o&*e;w{NQzf&r6Gp*u%8x$9+HC+(F7 z*tk-oFcqoRC|5#!Lp%H81nGLwwXKK?fZOFAN3X0gcZik__Ha2~5M~Ac@fG$&4tcbV ze`7exaA9j{-}0=kThjBRN0zS1r34JZqM=;Zfmth1YiD3)7U=9{#i&_c1QdZ^In7ZcR44_td0KKNfAivjIY<%g z1;_HlUnRHAlUZDQ(b(t1r+wj676roYhCz{EIu1_Y*Sks-if)26OyQm3r;v=|K7M!qjyEP4ld4X2yN$>8lGyR#7K{|{}8G_+E!1jlQetp(q$_rCcG%c`3&HG^U zeTq3C{3yVX>Ma;S!P;vy%xh=d*Y6F$ z6;+LdAc&&@4>j)E?2Owj)3Clo*q|pAARiISM+~b`IBZzLNyzdZwQTQu! z7L8>$$i{n(`GOmKkAH`C0^3@CG6S3I48BkE8GoH&uZNj=Xoc*i)GCU)qg|yFhTDdQ z_(B&3>%sL|{J~m;?{^S6VAs#xrer~FFbD^ZZ%X_9IUp*Js8JXa@%}YS?p{(G+b%8x zI&~)y?XDRlJRoLXehmr?!vk~3HP2%ODDetcWx62G}UqiG!xSXN+W#&K`EOOvcM^5 z-Cv_BMSuPd`l?rowzOuJhbnm*Y@ z+lqI0f}uW3mIx&Ts#(}5027iuOhUJjb>pD^*w4(XUuR)pL1nRH6NVCY&-8O67e8A# z05O%`P=)@0f^N?;02rkN?Ji*l_{OGByo1A@+Y(}pmo0`b)8J3sDJ_95Legs$6UmQ2 zSE1o$EH8J=)*KA|u8(Yw3pO6U#CE2pRgOe>yZW56qH?bfPgP^m>CgJnszxp?$J#ne zLls<-64&q!oA?o}ghZO1(Nvj8=7>nnNr=7`lScUZ)gJFV&9B%cm+Y%JAWbY$vt+iqfNwDG% zWf3cnS#yUl!9K*?j{|NxgC!0p{NNXuwa6a3U(-r2XS~J&mMttX8YhMjtZu~iZWZPA z;Mkz3v#`W$QOes>M3kTA_Xn9YJSTpjHYE8;8!v1=m7XEtrhXfKJ(h$hR-pN+WPJkq7f0X79o{zl{%m1z!mvCvYLME1 z#k?_!Ll`VKr5dVKg&Mc1;=nj0!uIPKOoiAl$fe%&w)?|fJ>hGv`Q0-)6%;mgB=;IV9CR@$GD;PxZ0o8{ zl;<}_o^5{Ej*ZH zMaKTre@n(rz%i1l5W8Fwvb)$LgUB5!8C%8K*j?t%_n-RoH=#-ov53MC1+eSaWzOf)yE?XXYo3mPKO2-C#Sp2M zD}2W~dFuK0?@whoU*!~u23~K;6W4&!Ew1F5*ton=lJ(krcWy@=AyXYHq`jwfEv66Gmy6Dil}apPFuHCNiJ29^Nz z+MU3!sT>3667F>n#>2=jMj5bV>L2*)0Gi%!#^*D13cmMU5 zeQllFLWAT9BL8GZ0;FQHeh$KW)tyu$xo?1Y1uuFevhA5M8XV z|2ruiNyx{KEdKL*MhtLZeonu}v2}xOS)O?NTR3oFN&|UV@@F@9!4cVPVI!kckzs%N ze6a%=uII+spd%6@4H($k-(Yg0Keg3JU+!o_6d=p^OvlVeSTFNX9$~= z%b;qk-`c6(uOSwwUmK^*xm8?wn`FUZEgrV)#%_VwEG&h6JqLQ}bsl!>2Qn z)KmHP__16B*3Ymy`qp_ejP~KHoh7defn<8in`rjGg*Fs$fUrooyD}Vae3*iF`DDVN zJ#G&02He1YsM#40Y;>zH?z^V>vpm(FJqhKd8lEPCxUyH1T*ka#K0nRW$-WL7MqzfZ zXYv`7rBl?+69;&)zsvaV5-O}21t`{Py_4;B?EHbAp}D$ks7Ve=Lzzm36G0cN;yAn8 zV(`>>J;A|cBG;>0SDckD0CfExmP78=_224^_9!t@K2=2RKY&z0dQHu zqVH&N!FK6YWwxsM{>qJ`#G@4wP5DF|!B<=f&HT_T;T%v?ilV*MkcgG-kxand@k?EVq z(BP0H&i|AF+ySORRvCU)WR>(^i0XPOu zAMpWa-&(GPG}Kn0SSooV=q*cZ4DSo+Zd=#-<(&O!Ydm}-QIOi*cq($u&lo}&v_J67 zYoQn7wqX|fTYJw|Os%NzL@HNo!TExhP~5mDH(MZs(!K`P{U7!FIS>s_p_X&>Rgx^4gWK1sp5J_LS160L#d#!N zUmmck-&jaY;9a)SY9UCpyRGGOB0u~DSI%s{$yFum*1&?5XLfJw(t2&{aFd3l_QSrPo_JJs-s7m=7aQdL1v(y?M75V*O}=64HP_x(fyhgtPUhu1nf(+|=EezLv=k46t*GZSGmR}DxtzOW z+SiNOAN9^;osWyA_SnPQDSLNYbPcayDkwKIMBdEj_}c z)QDilF^oMo4N*4rp0a|15(SAZ9CVl~-cdyyjJ!rgaYVBnjh)pqB-pVtg0y~mb+s9w(dNpz&lZ4?WEN>(S*@eX;JLOt3$J4tNNnmpit?Zj~ zON{ICynu>@XuraLcn)0ib88}|!fS;{TlDELn5*7ZXrSP?uMCn@$Eap^ak}}Me}Nhx zR4r}JY9Zf%m!N-h1}J8mf7Xp$AUmK~FeK_Tip&nngv98bPs79Qyw@`ZFnvsY#U+Hj z^Ud7$p0M3pvg}XRao_9|dBjK+mD@Po6DIw%W1ne9-xb}ladJ@jWJ>b8)Vyhn!t3j z?qgvZ>fN61sq`gYx)A)=V@n$~j$cOYyeSe%jBJjW3$QAT%DaWoDlx$Y<)Rr}0!Nc* zH69kPKIDTyA)inp$v=eYNm1EPD6}w6ErLm_KO6EH{*guhTxA;ZuHn zz7%Kcig_tb$^EQL@ zF<;q>bXK=4(K4Ju9s34KJ7q>me7prGMjDR1;Bz)j)+rrIj&{Q)dS8^C(43krmI-7E z53qV}i$}4iXOq?ij2U2mJbImvJ1e-A8s)wgAU{c#f}cS}>Au4J!M`vl+a@u?~8m~Ua*I!aGA zLB9#|L9}V6qybAArv9kKz*vg|hR$c?dJg8hm9>qMty$%S@HZQxreW2HG{t%Pb|u2d zIrH-8)k|5V84KJxvUY{N;?&=E{HsETVP=`1Ojtq*?|}odtU^(CK0iNg>)Dw5 z(|@Jkj6alyfWAMTtEa}CwVQoLvp^M}6cXw1A&o|Tb^0|VG`(wF80$*69d^w^sG>K= zfR{9-toV%wB&^z2ZgGS@UVz}uDdg&->(jX)Lp*xC(Vt5SE{RTHY%^Gu_siX*|X$9 zO*Ki03j^7)+2vEL%VsD~-&`fG!y}_ycl<%}(#l-_d4vjyDtrUF42R27vtyViQ%~iV zDd=l+J!=!|G^Gv&2?k!=n}>S#dP3bID{3j{l=D8M*Pl_Nv zMa6J%`{cLLC7bRdCW{ZY&qGX}SoF?4gT!Jjftt95!Sd(C(}jW@!osqqKtdxO;t0bW z)~t=eY|`K~p<}12e$}jN@yLf;?$$|3F&=^8%Ld%2xTnxNcDc7~)h3={3vFh+>lEF1 z%$CJ!-~Pw!pOxFfX|?Nzr4&MDp@Zq6weqfJdyX!t$MbyYTBAp;JVIl;{+A(m7Lef! zuFxC6?bD;u=Sp+D0stuboggi~l-_CzBXWx&Bdpx4W5=`I)BRO8q{P5guPMOcXz6(B z2r++Pu}nT*EMHhALQ69#zU#!W33 zE4u?FugLVV7G&7G0=K$WXyl-CH`y_`0Y|WST)pL^&+XCy-|(#@UrpZ}_G+!dnfY{C zcIm7pr_Xw5npM~fU>@6OBHw$hy+hLEN6gtKlgQ)PJqDNdbKwGODTB)4jbrN|(iKk0 zskI`5e=+PP_oLqe2)WRkXi0lHMCDwuP0KtCA-%b_WB-io8!-R^^YCTYo#@2 z_od}oK}FipU7Vs&AU7Cfam{q9K zTWg=XbLWwPQOst}eRj6SLd)+Ik5W4MI=My2{St2a{A5ugA%QivVJj(JZ6Ii)BY)PtN8F~NH%rDQuG}$ULBq7zR zva5#Vcm_e8Bc8*9r1$TuW1YsTlI01Cy8BK60IhEjs{8V!kuJj!=%9jQ=P?T+M)=XD zcg4ah@`mwHM8sS{Sc@^*2^`~z*n>fgFICBkf9UuA_yT!g`fPsJ~b6x zh?^f$9$Q_qG^VtO?SWZeG4Jy0DB{xJo`;ASU8-JOtt853-Np z{cM{`&A{`)P#EiQ)PIt5=lCiC4#qzLEyEj7T!0O~a(O5Sa%%Uh6Eocx+M)P?K&#Ax0Zcfum zy1|7YpM4|3Q^`o$`NXpS(eTe>HTwR2MPFpBGGXo^ht7f0d=Rp%X97M}m?N)0@|R|4 zS6JB@aP_jziLB3d%%w7HcoVjmPaf){ZOTJ7NCGqq)^{Zh59ke$?m`_V-G;DCbNqnK zXb2%T0r6PswhNWAtDpFV2qjj|niA-vme$Qz3ZI1LNiD4xlI-?0KXlP~%bQ&_7Wg(+ z)k~*)r;KZys2ju=*5$GX*^F%SyVR9p)e*l~e!Nf>w_kd(;rEE4FqD;AsC{USq`K>S zkl|_^Vlb6N*Q?>}asGCQYM3pjVN5hYn}M#as9>iHndu@N6+G|b0hxGaV_Gy#pU90W|BCNB-_G) zgu6O=i~qqTy-Rc9src)xd-T;8zfodYZZ#WATh(7Xhd&12I4rj4NI8dTedl9iHcwNX z$TD#W>cck{Pxdw}5_s#Y&%e!<#{i-om{&WP5?S(ZL+6BRpA?=4BqR%kedn{3inuM4 zCf|mc3(_S9=BlxXrOf?dAUPMj2$-!n2*U zl#tdKK|->T8)|=LzSU-GI=9kR_7v!2Am$QB$_J7kP`Y+qLE}L~IJD%<V4${OHt^|bhfk)HA%RvGnCzbuGCqKHr-VjdGlYtzphfrjQab{~0*2VXjw%mTd7j~TF`F?=Q=+JXIbWFg(Tfz_tKqtk>?Hmfht$j zxLr8t;Fjwk?=leNXGlu%f!F-bjxp+jUGGGr)eu6OpKDh+C&03&y*%9}3W?Y6nKc@Y zOID3;(B0IG$Ci=%WjxLg)$CI(3o7`uwByvQ#H5?_sD)OeO$y5JzJ_`=e`9Toaun3t z;>!Z_!j5p>qzO}gbR8b+^h$qfQkU0zBf0-L+0_jxR5D%f+15;WMT(Jl$Bb^GJyv@O z9*QVX<^qUlsUTSe z7kdm|(nKTz?*3sn- z(%19!QkB^QhdGsq)OG6wnYtWhvt*??pc0?Qx=bEc`9`#v#U-PhlC`6TK| zo?c16f^2PsL#1Ud0?Zh5xEvz4k%Ps=gonR-HSo_5@q3u6&aC6efjqz@+AyeBB#EwA z0kSgw8l)q+tbMz(scdN~RlIz<|3x+0`k%<5Wy#Ve(rQ;6x>7k%Q@c>S>3w>%U!HKpBDfxdT&f&+ zOHI0@)an^b1wFXuN)99;j-6Ki<`co9Lm>1@b*^!r0nvg}E0@TZOY7Pt0R=x6b{ni$ z#o#X7OeoA&*ar(xL}2e^ePWCkGI#kVij~}x{(RNUx2jV>?Yf*^&i0-RCp>LXvW7s? zVjncDiFo;zN7daMBB=FoZxY90EfT5@CSwKQ8-+Ns^U8S6H7HhM4X++IZ5)iyZOFf7 zTU_?%5#>M>B*T{otTN#zZj5+C^0~Qg7E=hC99DxPRiTY(ZY?1sCuJ_*HSHg_)fR>R z^ZW@n<=K+xx{%$hXtPWAz}^*{&1&+RXWhx5c#XQdz}>c5VS#!=m;!j6tB+OQ+oZyg zK!2~CYJHJCVJF0mhtq`4^>36VYnW0TinzrO0sI;4UW)sv956Vxt?VK(Go!v4AiNa( zA*e*4*F@NIY+yhbTwlb~+sI3qWC@Ag<9(r?{8c76?O^LMw#|?F&NgDuB@M<#v(L4! zMe)E>fa^&7`UN7;0GROsC)z1WQiZMNIY1ilBuKvvs4|6J2xgBwp`kcxyGKu|e#g&- zt1rS2KIIs!Az|ykUXw8c)sj|pEbrNtIDfrNX$WTM1G*~W@)$cVVLq}l>DPJpk z@Yx|dm#UYFRObxoD0RI_BxYVTz3pwEB4koHA*p1OB8vQ5LiKy&^>6tM^O_1<{A)0r z9;-#2wO8@aNXKEvM1L#qL4AVB`KFOpwH>Oe;!j9s9Z5yA51t@}?&5mU$j#=u`q=&+ z=qQ z91ZEwHIJ9N;V?=81jV_JNb*-h!=y;0P^0Rds774Q?xpDTI3F;yaw{L75?k*_yx7v) z55ezOR4^I3<(6xQ(Wysec^-T-`CvMLF?7tyv-iYNq3P!;8){!3;<+q2>>4k{i>O4)vGm_mjz1g6d zIk|>dufe?Wh&gs^ryd#YQIpy54DO*cSL&G202Zi`s*f>+%f~P4A{#dB!m9{>XO6qv z&&SbmKQ>X)Todo&iR-ySqGZ|<8tX$J+$_-;zMEU(OcD(znTH7RyM|MF1nrtaEu=dk zjud0#j4q=1jN;0FZ0JW1|?+7p|BjsJvXxr(Ca(X0e2zt7uB*eMLeAD=+7m0(}BIU46LkC#hVP?5>sankm zmR6t9G+#Df{rHPZ)Ws{A*XrJU}zM9eEL8-~|48>N}Nbm<$q^#P4OX4Ic0fyAZb(bP}|KXnD2 zD0g)N8b~pmy6QY8|0McmWF*Ou;m*(=C8O1(`{SJaoUlOH?b*?MW9Gmyes&%PjD^@=TnLIlO0Nq zV;I6`Ume5Y_{_<8Bnxsfrf!LF{k94B|){Cu5u!;EDvp= z!G}Ck2HTCQg6#a{nF7XeNFCO@eIrzU2yN$kBhl|>H3IsZ+>+f)2{w&}V(0)InN+0X zPg~khuuIlEYF;&b8L1aro)@#I2=^5Y6&#sW4(Y8_l1AUl@`)hrK@`$CYJ8E*a>Td= zEbIDcH}14+2l}oIA(q=YLsP0IX`c(jO4u=%p*{)VZACATWtz{tSZdlL3lxPoYBh>G zQ%!s({(A$Y+*qqKo=;l>3-h`82S|RA&zC`lh3bHS!qiea(bOM_S{X9p*y@lTSl22} zIKFE&M##Y&Eqje$H6mr$UEas27i4BpehckE&BQ+cGc`v}sz|t#E@5#X!z5Y*f3QE# z1}WWWUdtez)L;~Q+**#8wXKFLt8`!FDPYX)taTW3vQB_R<&kmmJCPun%H^zv+fR0R zyE!7n*0Uvq7i`iKV^+x?w1_)p9;ZGUykAhh0q^jM&TCbte#n!fiw16S3v@Hc^w#a3 z1wSPGu4rvGruZ!L)#022+1?O6u7#H0HKOY9s!n9<2h?Sk?Q82=p~SxCs^Gmm9T;FK zDzVu!Xcf#>5-`H$tx0A9ZK1Kv0k-f6EL@M--(_;J4+d&4TgM5OTS>}yH&}M z>(+g%rji%KI4I1Abv;m!;u!&_yXg*rHZpV1Cb(OXg*y&keqM zjhMttTw_=r%RyNBe&cRSehXLSkQ{~`>gJY<7}irvj%2}l9u_SR0H-;d``L3a zB2C>ooTqMHUgM0Lrk+GACFdT0awzq+6|_*;Dbv3Ck?MjvTf!9f;3@LSTKU2Q8$Isa zDomnhzj>^wF*|%uNrM_F~P&KaG7a;wW_?%7jpX zV>W`POfQV;A??^Uih}~B^+>CqIJQmf!nLj?+%b`N;?oFoQpsE6xJ$rH z5Zq`1zmTu3C-*GqFF7v+-kNUPZeBOjfYj&ryG$t^SKHPAf8k=wrJElfE&xQ)^fZi_ zl{hP4=BE!nfR`$gBb{zUR$`RT8s`f~b)bK+XP+RJH!JtxK;Mns? zMdaT0gFthfqBz_~ROI9-Z)sSDk@eHdtL}+yPbLj5xx76nnqL*JBnuoI(pyK<6AO7fVTNp~FC2I2?S@s}DqTKMANBWbOUlUd5V7@j$0BjY0)&o*s{L><39tLWo*X`xsMu3@*vy zgC)zEa8h5O;73acZgwuhn0vZ|L$3yYPuEn5EIQEIA}5#SwO`v9CK@o)?`9vaqN?Wm zZSmdWkmwIWv9Q2L%Q9%{gnR9X-jo!}nBo_bav3MrhoB^!tL1av;wKQmD z=+m_b@Ld~oI=yebGI61G63)=EXV0zIzMK@V|B=1YITQg>L_VIc>Yu zOyx1k)h(~=+NOMMbUfE{@we1-C9Do~A&C~}Jp*<2+X%r*tkGyca6jjGX6Hf0xRf>x*;V zcDUm|K0f{C`o>~*y#3Y zm2+e+Gu^r;D)qhVNJGynjX~w7bHU1H3~6>vICT{jLH|Rd0#_4uArkdsenw97jv12-U)ul*thHPQ3Ky^fw zCup3p|21>de5wB$Zmmhqxb?fLxfDgE8^_;03i!d(cG7zojGO>y*@e0<&m{Wp)>9DE zZ69fbWrXfgb0NElZqvw%%nA*X>d^v|1p15*Exr5$3S~aHVZJk1q`!sUOECxVtPZbE zv-qS{n@N{je8vh=S}smyf7-%n3QcZvvtBPb`6qjyZEbUmd&;7W(?^x~!jm|&@5H)! z)2_Q=pR*OB=e<9cIXh-EWs!72F9b>ow}ug{(R<&KT`X9VFfjO?Awo2{iFc6a-tN=sIQpe^`5`AWfoZO}kuOw%ujhc9(72w()h@wr!)! zwr$%srp}p(_+uvinYow~8JUq6d9mJ=8JR2B-ur!ka)Ga#BAFbWeY|pFBBc30qlWkxjnTvrSOdqNAjhAr zmWCk|IKA?yOX*l&2slnPCudefKb@hkmpaYcW!pQ|tSZ^uBNV;~W9?>#3Fdc<;51jD z!Rw?@563>JeZKw2N-Vn8@d9gz+D>q6kFm5RlvWwl>2K;FZVl>Mwvd|p_!6sA#9 z_8RCvy}>*sK7qWa_(WX0!ngY!Zfnd6Deo=~u(j%k7O`{e)?7u6fL-QO?>B+$Um{C> z;k&L^!xH>R)y(1i8z;pUxoH7;ON6;qn{u393cJ$b$MJ^W);~_>rL3uuYWM7w>37s%lvH9JoaWVXseQCk4(wLBre`iIvf8T&$j5%VRrU9KAAGb;70Ps9gbDXj z>!~q%0dar_1J$MKPZ8^`>vyg?^IR=R_A-<2mH};$_${rE&2IgImFZq*(EpfU(tZLz zEe%i1Qs|IfZ%);w-jV2%8*$gawnj3iR2<=jRJLj-PrRM7&Uo`$xNn==8WXq((l#^AEdtBY;g&0 zWWXZaMYhF}imsSJefA~`63{q6^-XIqTtv-3<8Hro)Yqx3sI`Fh?l9#QX^PoA5Q5^E z2!Pb+h5&2p3+dJ}HWa#`IS9q&doh-K?kcqY76gk!hC87MdMAUW-?XSUo*+k#a(}hA zg#wNpDbtKo+a~(3Gk)U*s`Y&qzCt$j9w3@4>_E5e|A9tePu>p$pj}+P*4ujqv16CR z3&-?ol&uv?XV;+`A7J=+{vruf@{IqjPa%;rup?x~uc0!k6wo0T_pvCT2Q7`Es;T9--Rx@GU7Xc~@nd^{~`e z5GwpS-S?kzl9#GYFyo7Fl|%L3@iCQdowB4DePAg?8___nc{rzHg(oja zkCrW*urDksz01rk&x~e=v;}UVL&vOk(Wqfl9k)3UO}%ViW@jmSViM~g6g{Uh=&OLE z9B9n|aZ0~}f7bA|lv_IF3zD|Wp_K=b)%T%X@kRE)iDgPMOGD$%NxS~)<_KK@$Qzk) z20GdR!#F?`S(DqmPihH6dq995y9#y!%N95Q!HYdiDC0abHxrC2Q{I8*SJ%+knOL*J zLUyGG)>T8g`#BnE=`ZIJb6qYuwknLK6?L@EeE-ByD;+M2*0l|1w%SfyJW_>4RnI6s z;%MZtqUPd}A@jj7F6wB)F`ox^1Wf7Xe@Pd`F~hSi0!g-6o9l$@3u9eY`PN!SwHzYC zMQv3tq9!~L7JVBdRa&8EcP3-%xR*r;WXp_qU<=JD*c>+8{b`cDrNt&3cQY}3)0daH z;fL}g=M3xOK_2E}wOwmtw)3WgjAtQw6%7EcP23o0LvNR$qnf9gQ9;jKFRJ}+NAN+M z0^1JuEI>A5eEB1Td*IXcKRw)PRQ4#`=vuN+s4WefD=9JRh;9?qP)}isytF*jw9% z3bf90Yh;yCHde?5jQ|ifHJ0N}Q+c1PTczDVx!x1Z0F){o`(-`-kLS1I z@p17t`q|lef05q)PM<}i2&4*=28E-?wcn^9(_5#dV*vN61y*UUxS@7`fP(V14(3W$ zX~+c=ipnxq5Q)lCA(#SP{s$rQZX#a08L$Mr;-MuRE6B5fA9F7%^NXfV1LMMuf9)3u zoVwXCDHc6|^#gdQ0S;*!|&)vsPec)O1+_~tdoM+Bwq zHQsy6G5DD))WyGI@=EjP8vLE^-0&oYV1|Ng22X09o0Xrxi>O+Z)@B)Fs04m7a~zg_ zP$k#veDy7oV;K$-^wVC_|IAFV%ZDHbB>i!e&1)u75FQ5Vogle*x1J5GKVyS)0=?xq zmarl4K|hM%tWMYlTR_)>^J-hjJ}Lw~?un{>oXmAs?aeS@H1Kc-(YdjcGIFIc23B6` z#CY~u`dw0-C!8)(*AncN1t&jdvQjC{tP@dn57mJW!NiH@4`NxBbX!Hr1P0;wdq`583%Jf_sXEQ$u4jLq zKtnW-rjJ3Wv5ERf=>+y$kVtfN$!NvkudCHj_Jitme3Ml(*OJ_p`@uFNjl}oyrP}xP zh7ao}KLZUNj0p^opyuq79)l$eR5tpurEwYM)BwhE4_(A)R_VM&6$yA6#8^^OTx$Lf z10J!27t*Cb&=6#?i9ct!&Mbs;gfUAbtu6sWCDX#>E}kiS;Ft>ZGN~ynvCk88 zPMA;}3hLt_edV|MF`-wg)Gc zUI4l58fr^M#mq_xqgpk9xkHs;;)v3~p=&NyA@&KPrO#O+<6J*nEYn$%# zK8nh_0@fQ&Xtxn7&|+heUG%b13J&lhFogK8-Rt^-i#m$#jho(iT^9i1@$kQNui*kEZ^7qgL8S1(cabWn{m8w4=?Z90}SMdg;D=M9|h-|d8oX4KX9x>XLPml!$6d(b5? zzZk)25xAv}eika5f9M=vFJ7Ppb0oVG5+Y>cMC;*I)W5fz(8Sz<3N5UfLcaqRjzC{7 zhD0~S+dW?i@WS=E&*epnbL>Vh^~NO>>DxtN?&NW#@%wOGI#j<{Ij%Fb@wF121XO#v zVq?f(jjO))b>o=}SJ|0E?l;r!!rv~$vE=!{;fzs7qIDSo2cLZWb9glz+l9|2A(s84hr#nD%p<%Tt7(?( zx)%h}ep(m^&g?79H<7gtwFc5orReH2DEWng)tO>_QMR!>;byqy3Q*Em012SSJLa|( z%KH6v8`eMk&L1bZscF2a=S6x~aPhp(x-K4v)MQn4wZm<<J4?PjYi`gbm!W zem%Y8$Dt3$Ayfrl`*t)bX@g04yIu@|R&E&_Fx#meE$%%x$SsLJJ7<&B#qxgrJ*3Hj zi#U?N>5bbPrbsWUj8R60@=i&bsPMcAxfU>2BZAaubnR(8aBC<;DTUu6?0);{7kRmn z$-CIlb@M7Zp#HnE5-vrFTLGUv-dPw6DH zHl464ge&(RyiYnW9?U`T3nrEC9hsoyqWYfTA&fh>wg#Pf?ujRX3VhQQPc(se4XV6q z!p-yiz3qvYiHcKl7S$n12_xEE(~f;u$y^TJpxdfup>pwB26dACbsKW2Alc_{6l9hm zr`PG;zB~tcSrj!apO0i}Pa51$vn1*tbO_FhRqHB-o&iw%oNIkz&2V+Y#}?_ihm@l7 z9hYcp<`yYadzahW-V|*9UrOj$B%eHc>h#P)jf_(~C7~C^%xs{)Ce+FvzJe(%RKjOiEJCjF<$f{3eR#Vi z62lVlYZXV57&8!MYmP!TI%$Y*w@D+BSHw2Z`p^ifyu9kzYt9^6yF?Y96LxC8_otr^ z63kJ}!z2dmp{<3wA{AW$&`gWMHiplQUp|RhZox>TzNr4*EDjOsiVM(s9Zn6YN>oq0 z=bx6yl|JDXfx5q+v^K}C`{qglrB<`52*+)-^fU43Cnf$Y`hhzkO?|HvH$9I@9KFA( zVOH)QIH5PAzBx#*!V(l?sQu-gJGwjEoFEF|WTB3v?x^M=e!~kr1s}u<1*XGDFOZ1r zCMadqT6C+tWtoR7@36!gl(z){WR>}6;nIa;iAL%=K1;*n#JEe^p91V1kaRIhr6gBf zUCgCDKb_FLnj@`8|8`p&z#VN;qmMu##<;f0bi<`Q#Z&&4f)$}2p_vtP){Qp0D|)pq zVq86%rb1Q1v7~7Vja-p)CUTAfxiHdrw@#Q=SP?oE#?&>f8tg7#Z&BjPm`Z<>$1MXa z_Ln~@WgQ8ut{$mp;+ns&u99ixpdqaZe$Xp z`;I`vXb*U^he~|)?Flv1fm(Z{4sfWr9FU$8tuc%N0YAv4*Wyv^m^-LCSJ7>W+l88Q z()%w9qVb!uTClGUFGS_Dsh9WF61H$;5?BH{R!xF4on)ukabhYyJb9)01CIERVsBa{ zy$Onaq-%$1xLmKmTCBzC^VW-rr*l0n|Nhp3xVaEX^4ogIwHkmhPfD2u;fCo3ljuXM z#k21-YTQtP+fOmCZrO&Ld)=skpT)c=09*L!Tu)>9m1@m&&?&3kswQMMzVLd(G4Kk2 z!wr#_27;6nuat)-y}IR|(G_e9FrBtD=HBq|clVmOv7gYcgwzlGn})Bz(e7m_-DPhj z@cHpf6f^|T&@eak>J|`{*Oye~wkhqmKL!if8(bGbs)REFHPm_V+xn%7`QcAoEeyEa z%&p@HiZVAqh=}Aw*DS0odjig?OnqiZe?6BIp(`uLgjg8@`}8xi;c;&>ua-dk!#B&G zb1xbF9?yePPyhIvE4Z!qGfUHTxcgIdVuS$~*PC@fs-h#%)n=`5iv242Y{Iu>m2<0m zm|A`kY=?0FPCx2N2(|m9mo$TN-{Q4k>rKxqEPS0s_>-HOLdvRE$S_}=#@K!Xg|G8_<0XO%G0iW{kYz_q1xNRImKE$DWdF8Z}lVw0*-tvfD2am zhJltQ+OG9!cyfx>3*P+@F~LO}F7HG>>U$AN0gjGa*9F0`|AAS!n5)rHUm}^$=C(!J zHCE~#%ym<6^q7VKAaHTUqIDa`OVPu?2KUq=zxCinCaKx!9gZ{9tzZ55>5Gd4k+X!7 z43wt&fg!Hj*Hr8ds@HAd6U?JsEI)=<4yg)h>TkD2N(^x-+N$>=a(2Yzvl_^b8+=}w(!;%VhB3X;wQD2I*GVL+GJ4 z7Z&w38<9F1)ks%;b;RJhQz0DYyY<#t`2vD(cm6R!QAJ?=>l1}PvkSF3bFX;PUmWEL zIDF1;0*rB#1ph)6kzeF8s%CXGzCeQ&s|iAqAJv2I7N`=TI?7Wc znnjobR^j|cTyaz*_rS?lDC(?hB>1{ui-N0mxJMK^QkLX*CG5HR?WrG5E%*#N2LqRJ>zdqcsPN zHfw`C+qO1V)j{TV2os)lwh*@elSYy8K^%n!DayZMwVh-j-55h0bN~-l7IK#djcpCdSOWKz)I%<*UW8S=t5{eU$tT=SXW$E35D4Gm zYj`bz+OXjpYrdt5OfXmIAPh8)f+zCjC3NClkDif*XhLue1u>(^X-}eitnkhB15!){ ziiadhCCfb*WV4YSUb6Cxw^FP|aL!fHI2`TNd1wNW_n5hAR%!EzS(Lehrf6LvZ1w`D zZqgJCmr-s$9FIThH86GyK%Pq+YizsfH4ehTCH5nL8XCsT3ijAV(k%o(<1dn)>*-YS zn22(waDs6;(m^bqm5N2#A*zvY$JsH9y?sn*GRhQtStLd>hQZ&Q7^L*5H6u>&J%j$) z*+C$m4L6ywSS0LMj#xe4__H0EdK8uL64=C2#a!5T6?-z2$#bxdG^guqmyF>SVK*s(W^qtgbfwweMR8PiLpPX%|9rAuCGy7fhl(hCYTPA z)Psx;??s6z0}M{P2zHudASq+8Q+7r%5zcI`N^k)CHkXcT=(FOfJ$jE$r^@DU7}PDN3-+|d=seQ7nNND!>{(#9@gb>D)ri0bF_P# zgD7iC^(xkR@1E-~mN_I{FOw`kn8alxKRYo*6Iclrre&|=ZTYwb@gl-UvEOP}WH6q7 zYIP4|F7H5eR+x8?1h)M8j~CVYI~K|Xs_fd}d(WDjjm?{II-T;%EL;XU<#9c)RXC>E zsjpDBthSlT?q#!erWF%@l@wf_R3CK|0-QdstZ1Ch4@6opP$(}wvYML&I94DkKFyYZ zSb?x!CbdgyuDnYQwas>(o%O&sOLH)V9WI!n;l}B+zH`}@pIL67YPA-ch_p&) z)Ncc2Bs>gkHUNTUIFYORQ6NS5A)jWBx27tR6Xd=ET;eaaJY}B$t^_zC7=EX(a}yD6fkjRJ;fY?LZ|fv@c{rTls)dypJm@)Ywa09ls#*U(9R=puTR8qZ z=bAM&47&`=Fly~1Z-$}kmRcWiG6bF-cemqSPJiqNke*X9! z`!<<(?b({61F2a^#Wewsvati_inAU*-j$tgJTMtn1K>bZ-3>H@r_8;zicc($bS>G^ zphdO6tq`Twgt5p2D^ZGPY!Nv;Awt zRFut0;5W25Q3(F_2pKrzNH?T`2`FLTEhwQZVUHsICBVfO#CBs2D>6M+zQsK?%Lp3l zrO0ETGYw<2dNi}`*(wTPtvM!#Wsm9MUk+i*xr$D#1ooaTTW9Vo-Lisu!#(Vb@pSua z)tE&&`McO2O6kWo1rEidQvwC5!n>=Rnl6>Z+6%y-cewmgW8;8mNs-|9kbpu{qfRV5 zrOUd6huUJ0XwXyIrBA-RxyZL|5B*-7a1g`J-4Nm19%Hja-=CxgQ`bn^Vzi|X9l6V- zq^sNYi!+41HtbvP$h&&9EiKi`@?w=vvdrZu^`StiHp(YxVtZ`-X>N!g$|?+2?y=)` z1Ue10+jVlI;o}lW^JhpzXg(CQF*{DciU8|8U+Dy7O-N{pw)(VZp*$_?dnfyp&uSS3 z-ByM<*veFr>D{d_{L5vJ!SXwXIYdO=;X|%(EP*XfR%;63i`idAUJ``}c6U)^;dYPW zH*UnzaO1`)R^{=;UxD!&)tVfg4Doj9Bh;}GwONR&3i~UnbSYx=KArOessZqD+wS7X zLsHQKdmHMcYR!ynlZu0l12NH#Qd2jkFikWY!QkSbs*#LIds~96Bx4@8IJMYi)md80 zYZs4TR{#BW85ZnkR-pPGZ}h_M^61VeHmJ*6*x z848vROCXBTS|*yDoVBENfG=bzsqymjf+)gg&s6wYWjnc^i%r>x$2N2 zX}El(sOiQ5yT#&^i<0?W{V|I2-{!x~!7yN%)819SguYx@DCyXQJ+D)E{Yq=HIQgpbbL`@3-UxF$uX5fc1QcN?yI<$IIWBs-gJqG?I z34L_A_baa*6l&`Lx8Y0WU)W_{^O4GM*|!4v*hU!}ceJll*b`3qB*orLaaQacZ|XYL zv{+0yPMaPbd+J1Oo;MnIDEX?dZ}eVL%i5d=8OSFEI@e&tYExtvVAkU2TZYWb6PqtX zzG~2hiI(beZk~4LwJ{yt($Qf$Lb(HpBk1VeLXdFesqASi`AHeCq|CR3XbkB~~ zZq-moLeQ2+T*xS}=DfD}yF;he{;>l+L9t$+LgyYSexcg0uBiP%!4i~WXU*XH^-G2? z$`j`(q!|Nk1ei2a@apNe)O%=*(a`QbRh?!sO%VPK*0L--A4_HDd_fmZMiSt9hbQV8_G!9Wf7?H8oL@m7w^jUac3`8 z;zUsr(X;B657%MnUwKm~ix>Yv<7N|dntPt&w@LTNQF@@7Z7xodjfWdxJcADwK@|R4 zUr(AZX~|P7C_#%UJ>aJl=e@?JA7h2+=GJc+Yit5e1YJUB*$m6d8=fU*gf9O$5~w zJV9+~?SFC&)J^nELh7VEFBuTFu+O0FAiC4bsXVC?bW(U}7wwaupPT7-2G$qxvAuX- ziL#Gw1N7OMsq$!~o=k|$9BXe1>kY36zexRd znG^xIdVE!aQrPBE(^38VqOui|vUPpNjR9UaLnX;Qe+aql-hk6_0pw_B)b!)%Wt;Zbd~+qKk6_> z;QfR5CpWIv--RjX97G$X)f9vPLI943 zPY1ntp4;L9^*Y`{`>#QkXhfn`w<)jDB(LiZ@z@yGQ_gOM_?ORg=-u}>x~`?d2TD~q zvbo4R>j8{a)fs`8m~;V2mCTrHd&6``B*Ls>e_CAq;MkhEG(RvZzgv!Yq@dxFtzF_+gdwfTOY&FvQ0lDPedNtIs++x^zpw?kM zRdbZzl0h_b%ESB4UPgqT-Af?;E~>_OHPM4upG&g^m=SJ&y!Ho`QvscK+vbhk1rrRM zYH{Pn{v}MgM9e=<%VpLqC8s?vN}yebNZMiS#~5i$qy&q-z6GOyhqrg|gGiaV9X{2O z2dRpJVBQ#!dFb=-_V({lo71$D0QrPzg^PO17gN6MptJ4vNr-4DuqZXDr*Dl= zi(pP$%30qTw9wI}`#HTcR5xrWU?}QJJOdTU^;=H=I>(bOmG^Z8Oj0G3!@+oi!T=}t zaS7na)|esdP6N2MVWA(i6S|FM2M!Gjc15ig>sVBK)Nl9? zlJ)|owqZL-;2dosPF`fx{(Xn6m=PUq`P?^K!w}NUytD&Fr(Ey(w|b$-;w4-YA&7WS z&Ybr=?i(#z-@)0v=_gdu=pwFV1()_uWB1|9h1IoO6v*&fo#S= z?qU9FV?!ZMp9W5}!H)mQpq=^Xv0LTti<`?a#rinD3U^r4^I9vr^(U$T8?gI7^2jt8u#Mm!M64rLP&QdV~uy+-`iuG%kPJ z!VOi3p)}z@N}`5IQkSiwNuFd8LiQrs4%gJ*l@!*_7u|Iq(cN|IJ>7PRQR78y0QxM{ zF9V!Z#n0K@2`Hx%D_1Ew`6{*->f#q)rACIG;EJ3d{|GMO*3Nu1dX?ddCswX*B*b}9 z%Dy(%=}bYNXt!xOz8gDsr7&N^~bLNb0Z5!ZjcG4=fiBBjFs zuV9B0-14BAj#6GtLT}CoJn7~2j)uyX8=YgsC>>IzK1u@`Vew3MOmuvRwSQyw5Urdu z_j4GisYG~JIg*(%#&rDiJ25xUuJV^G(%1 zrvy(t;Ggf_3H*G<{A48ntwFA}9m@9XTVDv7v*tOQs9UNwv0+iuKXKG!(E{J!+8FO* zF{$~2Iak{N=9``RDdliXbJuoOZJ(kP)e;zWf%eK6wcCO-eFr=ga!tA-?NA*rtEoG_ zW^!U96M}hO?M}8Zy`x_ny{0%5rG_W7_cCt=_ChB*1NM*a5+D3QE1=Kw-W%79FnVOR zj=Oz!Ee^2dJSO$-rKA_&J{$u$$L)F3`YUUnN(v0302)ICSEZM&4{J~XwImX$LBEA zfpm)DSi;YWy(i`nm^x-1v@8G*mBV?4toMk+E;V3?`l6A=w? z7jR)q8a<#@7Ke3pqRbWJ^JcNJs}8j}b+f&m8Dc6M<7c6elYvci*Kbq5N@I*uC5Xl1 z0CzPIDG=#fWOV=|bt`zg>vS12t|)f73o>XJsyDRTl#_Xh88)dLJQ9W}ciX!l_=|6I z#5Yc}f-VsHArbmk?{P)ojr}?^xe#q2ZfXf|@?5yPuQ@AhN-4hyPr)=2b;(xr3#(rM zV>EpY9rP;>VWcxGW>ytTU>&yayTFEhf3`9*;W4-cSU+`8mdH^`T%ruig(~2y=sZ#I ztZ9xzn*UW`2VEvPQ-|Wncg9-N(Z}1xos|EY#hSoxx+rWAhy3d#57Z+mN$8#QyHLM) zj{#MHS5v5V{WAAt{*$R;MFGyCq?ieh0MyMPh|sHKy@o~j=g;SFe&ZB+6dI2zR|feSv@twWBH@}=6m|O^d@A0~(dZ97gLT01 z_v)$sIxSvm9GFe}a*tz2XH5ZsoOfjbwy4XcyZEJir3mEhD~eSSKlhtygnNCsHY+Q4 z4Rq5RVN*))jORkUlf`6gn?V?BkIwm}o%!7NsqIvBJAPc0@{(la8gx6C@oR0}SgIZH z@jJmUD{Q);cUmp@{@n&LiSK}I^sRf8oS94A_G0S3Hg`jc zU_v2I&x|g^7Bb0V+XAv`D_`#WA(kjvGN{h!WP(-vYm z(Bqq#H}(R?A24sxuNr@=6LTg(+2)Mrp}(2J-|_YYW5~(U~3hIz%&MY!lN%PND`mM(Tu6X!a?9&1p zuh~R~`jm*D;xl~4@QR0st;Fh^gMldhbiu z*S#lMF-9KpPK9+$9+clpfjkeqfGKpF;aq+`M5@5@Gj^Zpcl_5b!`py;)$Pz*V5jG5 z9{wvVJ#e}hHx{a?q$|^37v=(`DJ70KpVjyZ2bd8SOq?U2GaZQ12v>#|Y=V4+2MoWR zmn*rqv+2cIG#x85$Q1I<&X3OGh0o8!?Xt9Za2eM1r3PX$*x#yj;#4F2mJa zLZVcjJoX&?N z0D)H(ZeC_GIF+qvR3Cr-0l>%q+3-v11^4{V+tq^O2aV!oNFi@SJbIE1w9!2M)J1%R zl2H=DSrF+PiIydJ+3Af@JLTXPXlVw@;*gvR-~*f}VM2;u++f(u<1G+J$1n`$MJ zkgV6Gw;APW94qe8%PfJ#_(5u}b2#q#V8>(HwE~!t$K9j|Rpo=k`9GJqweV4YB;$BA`@E` z3X(vvMk-N?;grsY@Bj1ylzr@FL9Bf?Xv;hA@7P~#8&JadxQqU=&&EMAx1Y={5Xmj>0N>aCsh#2kF^bP+(`FuPTTdn-=uX#Gs3tdA(9n zaF;@#e$EwqZX5RV+6OnfjMVs*VkR_#N6qDNcLN2q z9F|I=Anzswx_cu?Vqf+`&maUgY|~38B^DP&cC}OM=JdSW&V+(_f7z7fNZRAnmpS`M zR0h^%>n#(}Q=8_#>_dzaE%NMmZbwXTD*xghJF}~wuxwX{UP0wv@5!I_Y%2D$7C(}q zf)|mb9Md58u60SaJUoCai}wzSOhi4d^blv_)!kppoa;kn|ap9S_T%~ z8ZhfK`@#iU<1f7?3>Iz^EUrGM$0HzGyLY)%&4-dRPo3&B;r@w}r~T_R-9@PF<~v;< z)x;jOFy?bt$jfP!A$_kl%Fj(-#V?wO#f9Mll7*Mma5Y(Hw)bXS)LvvgD8*d3zfyGe z1=c8o9uv@cqqz&$wGLx>Y@ZKtw6?R2svk-|Vu{{o9IbM1^{W~Kt(tUJ(%o=MTXCYY zC50P9{^ZWuJQfKQHs8u_>1~jDr0Xr|h2W<;Y!biebv7lEB?&^**Eb$2U{J$bVcx}_ ziYR0fU!~f!U=qVhKas}pFY#j(Lvxp9;wt*SxHXA==CCJrA0gGniFZ@^)ga3QK$VYG ziWr65chT0d9I?Nnv#}K3Sa-dL>$}&n;*>fy6f|jP@gipu7K(j9dFHe-jo- z#LKdKFYLtcO^w2hf{8Uk@*MG&|02+Wi4jbKAAs!`E!M~t(Z)crg&twfoR&Fb6-@*?-ApLSCrPIOQh`4~DoWl@ z;EmCqZ2y?jRc8htA0Oh+44+!rGi>N4u&x_xplfG}-Ut>)X8ebrRa8rb(wT2lRqxU0 zk)hMXkRSBeJ=aWvhOugB_}0?SWpc_?3eHAaOk)qXf~qTJi!A?l)Y{wk`(}U1#@jdW ze~^j)JMQ*B(5%L>^FF3Os9TGl{l5Us>i(a|>HiIy^*>OzbmvOxndvE(mWbN%dWq?K z$!X~ciWkZ1+Iwj!@tVCoJv}SLaeSX+vs`pxb8;>7Li5!O3{Xrg3``(JmA)_z422{w zsZi=9m0FEfN>LaD39$VU0BJ}TvTl8Ukd&Nxp8urX{(t&#Va%95ezY}${a5I-wfVp5 zJNO^^cK$=zkEmq5iT__w_I1M;$0_KfB_JkPU&6qkR^AdOf>gvP#4ysKqHKgszpPaS zd>tfAbv^5}d=+dB!*sDz_xv@;A(f8eb8wSWmD0m?vY_bp!=>iun5Z!wGC-AjkoD5j z`UVDmxv`+c(UBN4i+gKWDma+>;xjXJ>;$8n(+6H1O^i-UK?Zv$K*698pXb8qHt)s% z&gD~uFfE5 z#XhJ9#ZJ&;5a^w->QMb(vQGSG35xwE>)ihpPnE5!@&7gJC;ubs7b$v)S{FYs~$|JMZke}7EQ{-=O5 zG~-kMH$j6^_xt@a-_`dIghovH{!grPT-Gv%1IYiP0Hi&A$NE1o@gP7z{Qs|vWhaaO zQ^rjcWMumUQ0YXz1W_2`vS>CL(t4X~nH7i_kr>O!uF~_E2eFIiCmP|bc>=gG@O!OV zZy_)-w0oK`W)!sy_2bi0o_)68w=&WarcVX;nQBX=Sj}gk3ab5f*R&N9CzPD->fQZl z*a6o?K34R=g*0tYs|DBVl4-F1RGu0_efTIMv3k8N z{{s1+-_{cV4HG*NAfN{)ARwv#$D6Dt?P6qNYr@K)$3SOnXY_x%;g_9$>hsV4zWuVF z&p%~<5&cCJ`b9K^XcWmfgh?z`$T$=ln)tgYQBjhzD3ZxIQE{xaAW=b*(&)b*G)%LM zrZ|sZIF5OeW1OZuc<_&JIsW5>>zK#)XXBlk##_JVTz+orT7G`}dhYDXkxC?Zee~?s zeLnBXdESTj>=rWXGH0=|EvsJDL{WeGV{{+kqGgpKrB~`zcNFv(wdmJRU{|yl{qY%B zGf>j>asKaEGW-4g<=fl!kSX1cxSfPL(V^VkC6G)Zqflz81XD4}qS)Cf*d%*VpRM#c z|3%5U-z%zG(PEzgE^a+92EPX=P0DHfd_l9*vBev^NMmwvQ`u4Zz?r(-mNJlc#Nr*y ztv2ClANye4-ZBuq^YoY4pLQI!DynXdc16|nHK+S_d$0hV(k+?Cc@FSPhN&4_bC`*s zr8WOY6s{mC%;W?7MzlrcHmt~5r@Rl6&5yf1a3pa{qc<+qp~EZcrtlV$^q=DRP&;Ji z(rRv5Q1i6EOD1fGEbWgidEcmKjI9e>RNW<9A@pcn;eoNwZ47eqNxcRNY!AZSD95#+ z8tKG$w&nNP80j1MxW~6+*tYYWgM7`wWYg8{f1$qxb&Knoiv_QWE(_gS7TTkj7htxr zFt3U9uLb{9(K{ZSy|<`v!UorWz^p_O3wbD_W)-gQrBh%HK(Rn}3ZMM#-M<#o5?1Ze z;iG+*Y@oT)_pxy>2clDpXyWn;EIFWA6*h5fKBB9S=rn9M=O?_nX&iSI;ETi|apx6o z4f8kXb_Vx+cR7)ZzoHw}ot79u&thKy&qCoIPwN`f7Rnd2yGkoh%D3vb7*{gA+vW*+ z__ygl9z>WnF&!g6a8o>#ogCS~CtacBYtgL)Wb9FZi7yje%`_uKc}ZNMyn4Tq zsUeK@Kh436P<6szlm37cS@5S>96R91cbL6@%iO@y?~~z?y#sSqQe2sCQ+I|#IpDO9 z@P@(5i64M0pgqa5+2;JT9CPNYvcE+3ujr#8X$a z6^2N{s?fQ3_{u63sVx4kFZV9lox=_4w<%k~SQuI5(plk`9ahL(ys)&*{7c(H*Nvm1 z(Aie(fvRy#=-dYU3X#Mvm+;Qi4A<`*JyZ+ues`ZULQEKeOU51a;DBu~w{uh-^EJE0 z#CC<=<%2S5ZQam0jw;E;F z6_?o3)h@iiAG3jame(pUMb7@Dz$$LmbkSg6uX`XXo^?d^D&-R2c1@Xh^Tqe_Cnzh! z0+@+_RTn61%i#jg|Jus^v#1L{hjhTMC%&uu21Ytuhc~sPp&h`4%7mk}c*Qr;3;HWu zdmT=BO#BoCEwOC!PSAPG1F#|}>8Zc=7&GR_74C6EOdnc&6s18JAw*UhyL*A+?heJ>T{aZixVyW%yA;{D%f{WEjXSqj9_~4F=gfUL56MjO zlB}6zlB}%t`qVR4xJNv@!er2wRb>Y6F{yDbvNt6nsA-bsV#DTDuwU-=bvnTa zReEGG1(1v;RKkub*|YIFz^@#;6N(tQa{9G#-DJdid~lMt zMCEN_j9|p+hB-Wmx}N4(K~~Z@5lC|^|JL&==_(?svYkOoV?#-We61O}t0wB-`Q1r) z3z7hzZxw{y&ai|X^iA@13)&8(-bjgRY~%PkHvDww7Q9+7#Y6KP!AcvfG%gZY_;Qejqywc102vcOE-mj7rLuK)||U{MuxO6EOof z(b55JKA+g;*9WU3Rwr1DuCD{<9|MHnE6^SP;}s7z;z4mkjZ~1TJWU$f;R(5EX#6Kk zl2tP_tCGqn*Gk&i>Vni^)A%4Y7NoLipV4pRzLT-X5#7SH`_snW@$QE|uJ%0_%6Dve zt<2U$Dz)Pg4L+FPLb6<7it6$jUP*uS&h48kp!7JM-MIxi$)LVddW9e?4}SCff;)V6 z#I5tM>Z!;G`0@dJe#Lm=1sffP4a;PHTO-zh)9fOFX`!vi2$ADqbV+U4QLTxBP4ZK_ zNgt_UY^+JXPQ@&Y9R3AAe6fmbIBsI`W^c@ZpmXvRpKBf)m$UoXLq#4>4u)r+@>r9F zg_6aG?^}Ul_T53dP_9`|M|hat5wKcui=i`Iv?v41DuS{K*g1%BG4Zv1bkiaxgc9Wd zT`%f}tW9jU@{%Zr&imw2twHBu$M1}cL?wS=5ZdfOgDNxAdsfv)R47=Z~@2(lDg{NWXJ(>#r#xPR8n zx6JW}Fo%HdB+ZS%V1WYN`%h$kdQcYCE8NQjTyz3=R`JZbC*(aERqBy z$cl~zz?$aMq2)&Fe><*~78AJs=_htEK*e`< zFHpgmao@oOzLbv|Se_fLti{YP*xI#=Yv$kR;twmUQpjC&TMGY`=5fFoRwOaA8+Jcw za!nGG7Yx7l((*B$Z>q2-ZB1~ao2FhKOlan@fm`WNZa5XfaNk70sJB_tT0+~{=9I%s$ZnPf3E+z6!Xh!G zv=Ba|K#0}Qz3hh}QMy{5Q$pOtf7?ZkYiDQDHQgf6+hQr@_>(w{onyVAG0C6!Aw)S| zSgx^*vEksM5PVUeExt(+*bNtAJ;>nBetQ3tyFZRK=1-&Om0M)VO(M@AopTOv6XFg&2(#ma^2v|>2HVFJy~On+RknS1GF^tD4)B$BalrtZ~egaZ-n+ zuKIkEYdkc#m@AvwCf}VukiE(t|IzPw!MvL4SVg{(Dfd5%`!F7bjS2WbNNYfQ>GFw& z`V-X%XPB~;&VpX5#dD~T=Y5SfO1q7y2Q&i8X!k4(mbZ(j75MAl_d? zzizQ$YJTIWryL+7KInAL8yc21z{axRb;#@Z6eiRMoCVdr3$WXR30H^lq8{m;g?9ef z3sE?m^Vpz@2}Z(%JhKZ#4rToDK#)yif4?Hp52CYi5Wu1dj2sdSX1RtcUs0U6Np91gq9^<9 zuzQbhywi*lg>LlZa#t-(*DFl&uEjejKSGdhbwRAZ`1411t~J&i;$*xunnO))**fQ) zu(RybN=C9gzURMTSSEEUvrMg8b9hGQIJWhb4#&=a}v%EI9+Hgc;bRZ>1s;`AIk=AtTV zA;AI;>^nZajHJ!i5dKs+ke+ky8KqqwHcA!#inXKzm`Rzcr_Qj+SP**unw0>vOjtOA zGxXjinO!xl=@}SMRaP!y@BHi*4Q%Jdmt|fb5^5O973G&%jni{ZUGGfKS-)3LJs}YR zOIkfoE(^FM=vU95mvTK4E8P2RvqU!FMak+NI5T1$+!ThoU40`cb_z+1 zT1MF-t>gaHiDz8EHPwDE(pjNlgPel3PRe!2fIt0XMs^iFg6>MWXxTkY!fY5fndm!k zt)%=!RhggG8fK72PJ1Ol89NEaXNBUjpFKE)K+hzdvxE>JP0bs5o>Rf=KAM$r`V-YF z8>H+p`(>jr1%A~XA;6?y(kJ8mfV*hTHE{~DOSD=ix`)nHdW<*N2){UDu_?Ae-$1oS z-U}zdQcYJ&+hKQvV+m~dbLGdvs_e}A_Ly^AquDiwgWt9QkckC*eF%P4EP9Q150h_# zlL1LbURpr?A>jm`uV4I#!L-qf5W0r}Ww1lkBHj(>-=nX%wqnU8`J|HOE;RX4tgFj@ zf%dG;Ueg(xhwlKClveo(vf7Yfdkwjcs4y z8Hq71Q+_OPO{f?)y2sv^j~}pJoFY|Gts(hS{nI0B@A%7rljJ62`3;*{w($pXzU(d` zAO3@kotcGWT*Kmh$9o;8;?GhQ&17u(g1>?x#%47-7qp8jJPtnUS^S}8oO9qyUMN6| zL?LQK#Cnz=I!hMT9!Fr^BhMAO%?@=L3!&Jk71yF*m{rGU7deUO{1+1iZL=D2!%Lzc z`K&E!!EO~!V@tM@s_G{kH&GW+<)mUR&qkQvRs%6cJYaxa{XP7nUe*ZmokgT4R94a0 zQ$8(L?&Ekpo5a>PX;bi;eIjG6sf(*oQ}Agwe9y)iFWb1nAT0}|oB>L76wNYyeO&C} zzA!)Z$%^s1tW@)3fvbiFf|wNYg7*5nzwM}Q8xu)Hpvy=WZo_~RPdP8H_-001UeQA< zu;K=jU-bO`#LWZ=% zJU?^j)zH?q5E?80F>x<-*mC?T7 z3)PTqzowUO7_v^GM>=qy{XMdiJt72J_6{46wsAS0?mNrIz(F(Hx4$w9z>jJFi@uXt zoH!WM5y?k5WS|w%G^LQlU7=fLZD;d4F1H0oY>?AxK%Hr5pU5?s&h_xOqj0F~sCJHM zTe+%fw398d1uf#f) z9h$Yw3fYeYN{Nmz@uwq&+zZ3%ci56`mYtIeVF;T5pJ*L7%tsyn-RKL^+EP2I-A)h$9Y05h^S|2sOSQS~+69kTI!YDS& zQ7T*2nXYJs5-hsJ$WcPI@V#)FrH$?5Vyx)EYE0l2fDUE#CgdAX=SXviuZMgOo~xW> zm2=t_Z;_==Kj5gAkYrZVf%4fxQN~I~H*@REqj#PT_njt$n(?YxRZvOwm45PdGoLm_ zI1iT3N@kkl86(qkMs=xRKLd7PlU8RjW6kP>c!^O z5VCW9G!JFW*|P>1frMQ-20ukV>nSxP>O7w)0gsoK4mlIoWC(`1$?&(pscnD zCjUQ?K1@y?k?6lJ#Pv-{L@y1388nsHpi>#{o|_-HoNo@-l1_ca@~fN|vi{6hsw2dt zMljTX98~&RvO=teCPW92P5GIX4RTMviH+Ik&+A5W#~r+X^Agwb=;>@3*e)a3o9Ur= zR5F=GTX1pFgqt|?k1FtdJM`bS+J?or%_Zj6dKjf`_NW z&%Ld~J^Z}Bytyl?*3+DRwM9H16Y3C*LI$9RQjUWg@&KL>j!~dY#kr^#^yQD#0Nzqy z=U;ebe9WXbJYUQ}6-Chcj{D=tn$&UK5(nc2NMfk>|OXiaiH(ZJEw6ZzV|vEKuqG&7+y$7uJ)v z^I3d2+(v^cH-cTy-HSem{J0J)_U}HtI$p4d5PegrbYit8UJ2fWx2MdIi(!>{r)wk!J&@%*zH)u#e0FVSdT)3#2=*lG!)Yn%~c zSytV@)8L9L%N7g$lUN6$JE66-IH9C4bo0allE@W|Iv4!3MqCz{CHs#K{%p2{=I51k z<Y{)c6H@kh_tBo0G>D@nTS(d$H>zAURlSNsnaEkHspwwmzG^ zOt0Xm@(`INjoT(Qr}64K`)6xIel_!&U;`%dSW=@!(y(w+Nc8Jso*HqC#>NkMeT5Gi ztObvLPdK-}<+V2C4}}`5WbX2hoEbaGK6tYQY`BAyY=I`)Eo>)b(yXH?@Q}(awbFB& zj+w4IysTC75+JF#A(xIh!cRFHLnDT!(qj%frkBgm2ekn|qx;nipdcP^%cX8s?i^jP zp=Zsiz$R{|>Y?ma9zIYsq-McK$I-mfzLe(?8&`%5Y!bJv)w9UyUe`Iu+Se)7&DxI| zHx_CAP4cs4iErH64HzOGI=8YcAQ2PMB81Sn`4w*wA6v6mb_w;BV(;CmDY$|2#YJ@B zMwOh-JXjEajwEoFM{>~vtMP_Y@Ao9%mw7=ZqYmw>&UEv;HSZC30`n5^K-*A1;Cp@w zamB++zkb;zd80h9JS>Ie3U8Y=QdX317b%m#zRI{e2THj@n2>pOVb-Uw7q1x0FR}dd zzseeuwD{j;mWXM4TRI}eZE<3=e;pEGF6_4#=$m5eBgI0^p`Wl!m+k*yV_`z**FWF$ zd;f73Qkh>b=Mn=?k(ZR5afVqoYq)1kxlK2}n88H4wS-;GnjO?*%`c$Qsk)>X<<2Kb zEg&)Obqrhlu0$UxPqdWg=X3lIxrh58WkI_vFStLdk&LnEdCSaB`SNgskEqVnJZG<_ zziXxOtwrnuldP7chXKptiukx2TiuKTpP^=gC`4S-|4%@ih|aFJ9xv1Rt8vYjcUmhs zK8B{%4YhW$Wrk?8`mGKvkG!TP$BaHT4gi)~awF;i)xgwpEn?

      5nrZm|3{3HnPCe zJP`5gVq#*XeF^CAHy6P@{rIh#hPyqs)cx)(z!`@`ZZ>OM@q{f_p(WTFK-#>yVm*qg zFi-FUOkvj|y){Z1`b^1&|90G+;zidqn@mE=Bm%bjOv8%)Gkw6o<^H6;iMZ18~Y0`rB4bpDc~^h%E^U2SDJ-9T1)qK>x!(+??}E z)JJvP+;;HPtnKq0sT&PE!z?sh(iRF`r7Mn0Xv>?+a*Nv%$!+=Z=}fNnPO1+TYWM|G z+m_K92_r9+NZb}$Ie8YRSm&UF)KeXUcSvp2M))1m62D0f!>?Smhxp|yV{7rW#D;G; z9spm(%T=$<uCuxh}(XP2D$liA)7a+fj?_4I5rL3yxtT+x=l1P{hg| zh)O$j+d%4@ygTU_>=cqU@GBR!xWx!2!2II&4R+-Ay5a$+KlV2vTOw}ydqxLvRLMSN zFEMFBSdT)lPiIG4SMSIlxgT%-59l6>R-8Tfsrwf*-3TxKpO1Yn#8p@S@9JY(H#csn zxBUrLvFbDLQPKg&|Ae6XY-@6Q?K<+_!Qs@PZq!Jr7b3EG{UX`>bisYbb_@2eJ81__ z?gCr6pRrSfUk7}*0!_RY>6X^+W&J(|6G$)_pS%qIrQt;j`i;V7g1go_w=cN$#VSfS znNw1j7oN}gaurg`_edi8JKf%lZckE}{YeQGT29Y=Y=KlP1>KWS-HS>(cX@zH;WTM^H$lY`QXnHoQ%}NA# z?Yt-H&s0vA4>8^O8vQiyzbFQoKWTyM%9LHvW}6W{0~ zZU0r`)im5E^o}O&8!@mLGpATMacOYNjh3EG)isuBX2lj|gUrX7O*eCLSJt3@s-|(x zQ@>?FGI!Kew~^Ae3oTPfdl##EKXrAP?o1_NB~3`*xnyz}>e-h|KYpNgx=IFln2YkS z&tEFkQi|6tFfb9o2e5@TfvW3PK^g*j6EBn+;E+)vP5{xlrIhvb(quJ^8o<4bw`iWDV?fQ_pqtWFZh!B!QFtSN5fqJNoC+*POA| zrDc_6?t$rJHk>+-Dy1Al*$r85&8?w%AY-A^ss>6NDr=#Dfo9@lhlf?5W%otq@8 z?7kzHYw_n$wXE57p3%UHfvGf)h)I7In`N zgu4Afu}sXfx8M8g@lI7#a$>#oN`$fL>yaU5HIJYyA#-$}h@jpQ%v(n;L4HH?n?O@2 z;j9K^!Zc^J}0(6qD!cMeWBn4uSBw{%6qJ z#;6&g={Ys)=hH+aMV+E_ZYP#MxjP3b;7~IzDIxFmQT7H9+Hs|O(`)@%D%Bav@(GCU z*F-1IyN1R5Ac6ZN7IK-n$H)F8zBRxY_akE?y9GCOu|-k8pR9J5*?1??G?>piJB(Fc z`yZ_6&66gT6g?!C63}-mseRIaPuhLH8rU)1cJarkh!*ZE5K$u$3vMuU0$`BrhVQFb zRM05@&U3ST-8iK6g)lN{km$5n$DtVC ze<7}r?Tz9bip{%yOEa%0@gJh(FlSmxng_1R7oTpC+7WJj?FsR4ME`_>dV=@18dXg{ zuvVUZBj)PhcF!jmQ@<^Y59VAkvl_zD1(!JXS@VW9?MB^i6-f2lu%s0ls!-djcZ(>F z^(Uiwk0vex9d4o!Bu+Xre)3PtNsiVDThgw?il2X-6)cQ zmh*6nTcSRVBOgocw{>j5!oCYQa+Cx6PK5Pp@MzbQc48>e{9^^CUBpcmIjyd~?x z_q+wo`Ou;MMp zx$x|CzGlb_~I$a?*Kmpr0zb@Oi~+>ZL2pmWw%pnu<0{_8IH zHJ=$ESl}!IpY@6{U+EL8IJ30@U99zlq9~i?@bHXzFS$QMepVivqA795u8RF>vXFwx zvage-f-mHNq`*_2`iAQROZX4-edcl=VSh%6CMb85YCIHO3L%EGOELT(Rxt-Pv^^ z*GrY^+j$}Da=b5x=Zc_Q^`BBw*WDY|Ex)UGg$Bpwz44>F?stY9CocC20C{bC12?9*Dir+Y;RwRxA-F$! zlWN{D-gAZ*Qm9w90wnz+q%8E_$k}X(MD+A;whBhyebyxkjt3iE;E;cq?diHAj60}; zbTK(AROoV^xCgsoZf_%_5nJ|9kR)EgivymtEBQ=~zC_jFV&s-R!iB3yFUIyh|Dmoq zO|DTadbjwO;l_sk0E{w!)orLj*u{3dwi%7dPlQ=|V`Zhe=MPCIFZtEO;;c8`pLs$g@FGq;2L z&v(;{%8?DI+Yl_&Zs1l>(C9Th(f5L^$Clej8cSj6X?g#RDbHi%uj zQGn7pLZ)iVJdzvf_g(>co9JYwYD%%p)*y>0qMzs{$zMIql~4jpxN6^6FoSgLC&n6$ ze-A_~Sa+s@qsJT~X?gV*ZMCP4>tjhvx9ODV^xylS0wdQjhaVGO#tfHU+fiQvDpy?JV&^liPgB}>|VjS?J}tl2iy{%!Dl$ki=w~x+bAV}CP{tn zOM9~l1U8U}_(I*emG8;#Pya>p`(9)%K=bWqx8P67VrXUliNTk$lscg}+N3?eolv(7W z;`>6VL#oCT*$SC=1Fe2>SvJ=U3&aK@XyKuRQdH4r?%G*Y?KXCjm!P{YT7l22BJt*} zIPB^fq)1uM{JYjHdakSC{?Nl}!hYGQr&Pw54K2rC71i&^yEWJJpTx1}CE~nyv2~Su zn_u4X@K1guq-Od___q7@6wYn z8ydUgduKBvRK3!$Utc@VsZz4A6FQ<-U!GSXI2@mtcvt2o_QIu}_|fSZyE!~Q-TSq+ zLVmTaq>cbK{C|1iXm-CZ81qsMvGs?D3J66ff^e;*Gk_LI$qnwfRlYVm74?+ezJ#A6S zdk0!Lr=m(S37b&y*knJ@n_rD|aJ4G^y#K}iEdEmx6Bav{+$OF(?x!bXr`>pA^}Rie zH0kG#8se^_Vx%@Uz~lJ9C_iMJ7dBS=%+fA2Qx)#18gIsJ-YZ)RGWUvTLGZc*1MPXf-8DuBk(n=#4I&$NuGW3{~N64RH{EsjYML zqo$Lu;70Q*NX5SY`G(NJ%3-f|k>}SDCTUM5HYK&-^s&&WHo+`I^3%?Hlq^v}X8pRK z1N`_KglGt28*y^l5mse((3FYrfvYLIqO`D9vY|%;e5-uWl&Z-uzRmH?Bps~=M1Qy6 zDQ641f3~g6oR_~;J&XX2;(-@?#T%Sv&(vC$t)hBKokK?DK@LCh`#Nq~QVkovfvt{? zfew8)a?;#YQ{J_^W=AYnuWwzGTImP9zm~iJ^v)t$ML^e|YaZvKEbunmF3~ z;@Es#31N#L5gmxRT(ot^!!(?N)-~TN&TEZ8ssGL;e^4vgS;5F>GcNE-SgHD*s5=sC zNLb7$RzS1E^nk%VoK@iYi!RCUGbmfYpK8>)`>!oRf5EWJFaQ5L{lB$TRI%F2D}#sr zHk*WpJaY&R)kI*)zSQ-oymAhyImpnFS5ekb4(BP#y+z5AwEAO74W~r;i+M%q&ikW^ zRd;D;^m2-5sU%|Gf92KcJnZFhvw0hn%=u)BOHnGEYv*X6sN@RWeZFhLLTF~x&ewCz z6?r6Q2n?wP`AK6?)6?UM2UVttGk%V+;Lp9_P_C%IBUL$(?%B*x&~cfw{COY+2&YhQ zTFl^_S1zrG&TtH=3DU`dCxT(t5}n+%chu-jEdJ$o(c=oL*O`{3;<4YTXpX!@V~WjF z$~MU5nw0%R<}!)^GgVsAsm3myo4X%Ua=zu6o>+ zv>RtKGmQB!=f@fFA7W zDN?XYBtzUcAI=&f_j^pv*V$|C<#&ahk#C4!yubKv#C?m`zxyYPs{Y&bZ&wC&YJ)-Z zVn)4-g(OC^rZ?j#((2ru0~%axMyqKmp(5?EE=x*5cC))|Ts0UB@^smoBhZMGbU&H4 z!rUTI19bhJ1K!Q}rh*fzL(Ro_c<9ayFV|(i()8D$dBn%p+{8jEZm;ScA!2vq93*i) z1*n`bAD(g%j0mFwZ6d>iaCWAT|9oXtj5(})d9JzNwUQaM_dHuCbwK1!3F%4F4F=l3 zWNyVGDB_7zJ0=Tsup%N2jguA;CdZ10+KN{x(aKNvp(PfPQcS6s)&|-o6e5?jf`ew6 z{19)q6P}*)v4&(AirOeMJfD8S+>P_q|5i^J>(G<$^Q~&q!8~mXJ%^aP+jKrEzyB9q z6!Y3&z>_^7p6g1g}Ufxe7|B z+pL722~!uwVX`IB332yygOm(%*{w1%lbN9$?_y4LL|9sZ{pqC4vy+;#^5yn2=YID|niVz|T#q&w*^V<6Ie0 z8mciPZclSnC&tZeVyV?hl!?kQiaISE0ds2f*n{QYCn4qK(l`GVV$vGKA7i|HQ>Q9- z%_zb=6b8~dHR~X&TlWXjmfd}4TM)%@r~l_1wj=`o(yl~gigBrk5|;t|R8Pu{&q=r} z0?HXjw1w48PcMwjwk3EbB1k}*B(mdDzFRCZ$tqhH%im(D6ck~^N%vJ`N;yIBum#fV zDON1WdFzJC9Lp%H#Ed_TEh3aeM&VkkbnsGpBo1WfsOYLk`BV$4B0YvE!;rl46B*L+ zxhE4-@v!i$H{Oas#^Ivm%sWJGLq}^P4I_5*pfhm;1ab`>l|Z@GpDOV>Q=F5ETBa>E z7H45{*BsKq0QG6_r0o4Kkk#bRhnUx)A&e3=-pPh~!hsF({=J#uE5e;fmoc zew;4E(X$U&zJwD_{8*N*P~Gg`%fZ5$G?MShe=PITYKw2N;B%;E076A;tW0>IP` z3bkD$_W}G*HhPkGCcZy+dn5lCDf_q`aI$`eAKb3}BWRuXg9n~uPK6L`5{>{-;=+CX ztX;6(mCl=MT73dZ`M)kxKl{Nif^w^ENILkRN}f9=wI;A&MhC458GkQ)L~L5*7!eAR z1bY7v>Iczx{G67WjGc4-t8mcf^UZBGZf$i?7Gdp+zDw{mo^Bn^tq2(z)2n zP0YQlX4X()`Z$Jx4)_I%B~1{U+5#qfOC;bHptG%IRpEza-r8|9I`*e?2Udw;2E@q- z@S?`p)Z~>EGH}LVb}Kp~LsW~@(3Ptt6Z(DaE4sApefgs#&rksNRg+F3AgY#Z);e0i zPB5?qa7U_wonjZ+yy5NLQ{(0xbZk#!{$+cLC;HN z+9wW#m1nQ@Ggzhvh4+qn6-C{KqCse?XBU}b$cHf2>wX0X;@dz9A*f?u2OyJ>rri{m z8frhfz8`WM|LovwZ!ybm#{qm3Bhfq4uzm8bqLgW7W6$VT+nH1mxDm>%Q9cabx2OLg zpN#y#%=o%5k?P_2tXJ$Jh>;q%l=C6jSNs5-=B)bNG?|)UlZeCo z%;XpDd&Tn(Jso_HugQy(7E3L(=T@dTmj}0ng_o3G^Yo>|Kj13azyG#0eqrJK9r{^< z1#z6ROA&9{;O7ttDYzGE+5^I0gw1>_#Qx>XE6EiX;iz0Dc5g={YGyZz$rq76Bgy&& z9%z8V?*n1a8eB6b^ug39zN43^f%xV!v?ppoxIul%qwx|hdHdg*`T(BiqlUk$o$cxp z<^IWio9^;u|D^D7+vTEb>W(l9t9JN(BC&{lE9tmcRDC`~<&keu?FF&+!oT#|?6~SP zxCSoTAzo}2f%w}_ox1bphwsUk+FDMYyMC?dYPFoNc?x+Ph{a;PBaGE1-tC5t@P`yV z()gtd9n7L$+upnl3x37~mvWbc03hmD#(v=t0x3ZtmT;*3=4$Rv`f*~cgRD-2H zOz*xZ;{|uqSk@8la91hY1!Kuk9<$HvOA_}*y}jC0#G2zt4+3DdGi?93(Izxr?HXTh z>>n6|^gbCUcdG7%ep4KE_a=6G$2pnRZINGes#f-m+tDtOY)Pm-f9I7&p5-scBCnsg zVFdU4x7AqbSw8hcfu=K>6bpcnN1Sy9}vkN$%ywP z2qGly!>DxMs{~y4+wBg$^wLKw=YzA#hljS#Gn7RD97$=j+k4G~EkV-KQMGdrTRYp` zRTe9EdLP2PLhrU!>T*7EV_#M7CBb@?Jdo#xgO;1l=l=n-=mGKQ%D(D1hALnj0|aw6Ap>~ z44a7gXc6KjTejCv{Cg6yW zE9jB+VnDI72l^5APN-?xXNt_oDZ>u9%YyLvbqu=vu(=2hhuP`ZQWq$dzkl=drZvvs zd-TwO-76m1=#`kAoo7O;WJXbFHiWmgU=Sn>Qs^Sj(fbru>SB7gTSvdzel#5!@oaFe z_u^iBHSmn3T$H-n#^AAT>;(>Z&Ot0Y3|~?2WCafH?VhN-{AsY=As7@_dmIj7%w@$c6 zYU3;**awgVuQ;_>=x1 zMq;PlA|AMl@3$ebxFr54zGm@uqsmoS-asAoOb15AH`s~Tfrh)SAHu3j;t$kSp8=SG zW2{k_y|eKY`@@+0`3|bg@AL+g)_BFp@LPQezh2$<1rULLhkA`LFX4h> z>TOT$M%JB3hn+yJXE`0z*k}2waXq$iFM6=DZs(i)CdPfJX_Fcd+>%_Ypyr1a>o6gI z4~W7(bBMpDT~#@%jX57|93Hh>WKh5R0YKU8{d>#o_|m*~a>e2xQx(lmGdO^XwH5e; zIz_xln`4Fl(|ISfOtxkvDLw^~V5;Mp^6|A(>B;z&h(}bvHyYl`OGdA90vTMuuyZ_H!n9_?>G<8JoiFgx~X$GOc9!$16SxuJ?@!iBE1qbO_ z!p4wDK9oJ+m^f#+|4;V_trnPQh9$W97vsW(JAJD}n+j_T8T5rUhVqVQE`=F5K zH%NLoT49h=_wcABVC*jqN_Pc`jV=AC)>De>x+7D$S!_7L{E(`wPWigJYxp8*+7qx) zboHY>l=xY9we1vWf8HmoET+0_E$y$*wwZ^s>L1=FZAyb(Wcq=RZFSxU^!!8MK9!Mj-AG*hg%Zk7gXt*G!IqHn+qiwfZ5!21> zMgp`g;w-}-(J}z;Dmna5^xxpYit%?`DK#+^ek88}Sc}t`wqXJt7mRO7oJ<#AhEs20 zr3gt^TJQP1WOG%`s_djTqH7zXzA$JA1ZRU#Q#9QF^uCW=_-iXpMQ>*wBotWH0+h$b3b}HNvf_s zx6UGwR7xwFAVvE#s8WY>>n=~{c^Niw%n9OKv=sR^6YbP4@&AH+Fl!Rnb`D z^#ujsme*{Db+(o#^!2F1HuV+AzQ7Zv2y*M7B`I2#rcJcOS|iKet#Z}S>!*R*{rYw^ zOTBc=WnB`_9)gjUu%4J@fdWY?eL5`N?K=Jiawk;NGc~bA973UmM?wUM$md^Xsrja9 zs_AlzDFgOMYYJtiBD*;PPaAfLGg6Y5Jg06bGx|843^EGe+h}6uIeFb#M(pKe{x#$y zl}ye=MmC%eoK|UDh+?vjW*H?#p>Q!YPh#5qX@-$!>zKh~X>y-l*r6{dD{et$#~7rt zQ-u54a~zw(S|+D{{I$#F`0e##oc-E>YlZxVm$oX-wTJC*>E6d2RQt=%xE?3HD8p^i z^?OXl)ut0ZWy{XGd_nKKjbcnxn9vokM;dp^RsM7CQDId!a`Tc!jVv9vt>Z^%{N)7< zuXIzQ(+l|g!Nv%@7aIk;Jg%w1t5(t3hOFenG1<#dXPg-Yg}Xddb*V_aup?UDpB&-m zg=&wz(+OdhI~CiX&Ps!SR zsq03)vObmxmwDFTTMuZZ|K*dUFDuY9@{hJ7u2F#$EIl0WIm4FI5syYmr^(!-`Y|ui z^^0f;N&paYOVlW_i5(uR>Y6vWtIsbuq?)|>Zi%gidN_@B7Xwmynfa#N_wequq`rO; zed5PMv%cv3Wpv(zt?kAW>_~YzlP*~otbcgLnUgi;h7L$1g*$^Dz8k|LuA7drTh&B3 z4mdeQvM{N}<28DhQ?ZPL5u%~I~%PXuk@je6PQ#Pcy z4~~wav7h5VE($vz3A#xz*w1I5JJHV~@H+WFzoDpBdHJj$fnp%WnwnEH&=9ZQhlI>I z3S-9Q&iqwUkI2epTJzC1RGAnpz5MCbn&Fo+b`W4w5)hm{L#8MLT?%{+jHUG?TzVJ__2(3Bj6pQ%U+UhZnD+OD}Nz|6)d zDfugI#LQ34870$9nazQqxah=XA%D-H3kC$#Rc8v8syg9xJ3k;hA8b1C`+Zpzb(cm7 z1~fswwc zeI4Wr*eW^IZaT_VzMfDVwU`wMDx~RvdGu4M6l~7Dzu}5BK)1>~RH@a<1@uOGREa#| zCaY_ljdur~TVZ99+)$TCxIO#nUhW3ne9H*z;4S|H?6S=|YpqbO5C-D8;yR4(I0A*i zI+-@3nWlU(v>IE|MmdFh`y9M$%-3K8O27fGZ})ypj3zUW)J3eVP84|0g(h zc<1=Vc1<#+%IM&|Wg6~gwTz?C-F-yQak-5?%MKWQS#L$RC(Gz3I4R5^$mgQ z*V0+FhS$g)X0q-glOI(0*&zoD>;-$G9=>L$FhdfqcJb4NZE}6pvVMWZPYc+|+;Mej z)F!=TvW}!22l+0x$o6PeT8Q#T7)t__dAyA{2EC=+`vy){xhmhPMj=ILs>k0sANdI$ zo$s2lU1eLHbox|d-KcW%rRR7=_g52Q6nXFEd3VoI0Gp_SB$_>4$*=r^RxMzhvHDW;h$b1Q5T?YdFJ2Z!t1;5P;EzXa{Q{x{KJe@# zM#t8a$A87G+Rfg}nU&O@-lxh_q1pWxgcuvUaY}fHMGY6uZ6xjo43 zm2F#pxDInuM$+0ur&2l-m`nfq#4YIGc?qw`K6Ry^i)WPr&Mh80N8FtQ!(lR~<6Qi! zB+rSQ9zW6p?WdM5dGErER$KONL+UJ+ujqpY28~Yt#m6Gw5K?;$aEuuf861_h_`+IS zC>eO6%Pe^%9e46{>XhO-3E8Z8Cs^pSeD#M&$JEi{l1q*_G*22!tkjkXONrC*p0QzB zdZV8uXf3FmG;c(59xbk+C{4m?8)vJ+?knV6<6=p0;DAupjTtfmkLt>nnP~BXU!ICv zf%S#UvlNKBasyq04BD_*r|3>$C?ln#9x;M7n&uXqQL^_PJq1@P*k}O9!_F zl4JAy4y3&=s{5RunmDr}FI}O>;wO1chz*~rVeT%sLYv?2iJbyUeP#G494t=Gc9C@b zsKH4%Io-K#4+0Z1o_d2B-U`aSO++v3UC`vFLjU#DVOqD#B!{kmxeh3SdxRSfGwJNX z4T5Cg02j5a#G6>-PFI{mTA+3tKEBP%LH8U=Or*v7Hg*G?CqmymsMsm$eZ>s6|E-Y`J8EHhwKzK~lLKUFS9$VS)h+Ae{ zulKL12%$7JMjz8p=~MJ6Wvv#~s=d89v}Q5W=isS$69U!0Ji@i-aEoe~HlZtPYRDNu zG<<;x35{bJh*7_?jhUOU_9i@X?`;?|il@CzA$aWp#hKJF-1i=2_f%Su7M@WAgk3R4fP700m zYXhU~h?Id!Bvc%_Bhh#J-{F4<(Ov$FO2hNe?7nD?E4=(-epb`lb)Y%@^!)f3OWfP# z?cwTvXZwCS06I8X83kqD&W{(~u2~FRKu)gr$0u9Qf(9zLgkuazwg_#7-`%_MLttN- zg-`~b`W|*RFLTxJ<{sSMh7LDE2mE}Zgkq-b4RmMSX2U5mh0abse~e9vAd3HIiT&TA zb;_m~WV_=@Q;L}8T-vye4lu%Jo}M-+jq_-|K8EJwxFduR*J8;a4nt2YQ(N6dh7?YF z^7GFw3RIeSi#uJ=#;}s+3N%lK0D`D!{>x807FQO4#P$5b#h%RR-tl2Y-`c{1%p#N+ z_w0F&JE$W_#gdGtyAgrPZK2sp&-pkKEhVaMwN=FKYExMjBmc)mBh8-e)qhFQq&#?1 zJ%rBSSHkN5gSEE|s-)?*gd1p}ao5J(<>2n_aBz2N+#MPkcRjefySux)HSTVW_wc+k z_fE|FP2BnM-H3H|?UWxUswTldHGHBL1DGGIBuqOk83|~j77x`RwllO6wv2Rf1+7b^b2aT~oAj}Y_4FXIO%wf$MWXF>8)j-{EZd{ajq zQDS?FVuUTBXd_xn?S@h3!Gpn(PPMpmmjPn54r;G4>tkmj>q`FQ^gXoL_9t%Zpf7~A z8Ef5mGxA1hJLtZq#4>UYAm5<@TVF$v5uc?Ux%y>tzl*ZX^;vyC>S2>SK-~ew`a%Z9 zc@)hxx_bjMmJY3+#XCn_GCch#r0Q*z$qFqOmil^x7;k3Tal9{c;zF2`IHeae&%0bA zaEp4B#edXoBn4NBH?#M7*$W*ok#3(aDtWb7B!Csz(L;z`oKF2s0lZ=9+W&%k;C*PS z3bOV$^)QG)Xi#ur+&McmxQeSLDDHjD9a-$A|E}LgZ>T{taS85gcA9Y zqE+*w!$9@{{7Bu%?gMtxwVUbU;1BEW@2MB}6iUnJIlbG192krcvQ=gU;9qDaawmq8 zmVrplc3NJ4rG%u2b;eUJino!7JcpHmM^B&;%WfxaGUc>jOOcp8!s!=x?wT2E*ig#w zwNAVbX^0?0D1VCGdx*u$Zl@%Gu6WqitLNoC_DW;pp=|Efe41qGL#6U_a%`^ zY5y?ell{xw>5~-f)8V4r^o@{EZyhUmfG#&PFZ#{r^SW_0`9AeLby9Wu2gz5t@NJpj z@_C1`YrRY?;;PBwVO*j6<|ZT?%<4}^Hqu)2#~=BvBe&>}b1#AQBk$KcB<;5{v9j$z z@Ga1I+0j(@{O$ZS|HtCAfX&wF-n3T3n9<0?Ti8Qo(!Q%0^$hG5jUsGgUo-4NAGi|L zbVFYNxGuqsY%bM=9yMx^*i8y@P?)=Hc%KPxk*FaJ!F(PQehiLMO4zWGk_KK3d0x$a zWq=9(*d2u`bsoSDWj2x*ckC{BuBte(nBbs9gnaJyC4FB7bo#a#v*q(=QVm$$!}E@g z3*3`oLAO%h$z+CI9E&^2Q|zXh+;0qClzb07)+<8K^D8Ah=3JS>KM%3@vYKSul? zQ!6=85{4QVTk_jZ6k#hCSeR7iIFTP6uV73gokn94R9@7V&1 zlU!If+PGAPknjR?>V-J{55tM~*943*5aok*nK~J@rd`^!x(vA}!O8Cg{h!rUQ$r;s zkc(q?g|(Vj0aLIKGZ)9+yfJ0_Ys(9v@QIfbw z4_S(T%Bkq175dU-#smqL|J0fNs}J2jeNf{f^AqNbs)0Wx2TMwYvr!7RmQQzcP$a6XlJ{GANXkpQzU3DABLGuaJgORlvn3kK|z zOSX4pMC^MI;HX||WECHH$3@Ee;57as`BN>*8h4g9B8jBlbhK&tc)6ZgALnkDB^-5| z7^!SQM!KPzt0uOUVD3V5Gj(lk0Eq7pUzCJbbXS40DH7Hj-~nJGm@s+LFprM4p0uNV`ZDFidqIv86hYKv+4ce zxbkH=lp1n2NFFm`)byp9J>Jt$3KLrWLg8svs_>WuqEQA2?L?)K{S_Sa@7Bvz96j3P zm#+9}MmT&s20sQ>ahzR39`stxnhZm-41^t#O*$Rj^TGM;3GjO9h8_K74coyZS0*Pv z#`ISZszf7#nu-B=kU42ED37zKu0;}k7>G$J>L6?Cp!|x#d5VCjaV0_fIEOB~<8R zzCuh$X>weuMpNW5(@`6WQCp&eIAEES)WCZ#ugk$x6TF@{j77=PzD=PgyX3>Z4ofSA zp1K5q%fXsAP7B!B#eKbg$7`iFtli=v-zt~Y==o*IOagHYQ8~D zM)^cLVEDW0wER+HR6(XSXI-T?L3=j~iS}IAXCez789B=Dia`(=uphVJFMYCk{cojq zt0v9GO3~b3vCMQ&a&Oe-MUiU#-NnL`V&2yY3jE9hPYnl0mHgJZe%GsZew)JH*U!U* znuKpfV&w_~w$HRT^U3rA4gyxs&k6#T#G6_^W`1+G>H^bRL&e#R@ts|7d&hr2D*Z0f z6OvTMviVuHUrP=`PWL3OU$;ySzW=~T1aB1lz$k+hvl0@fmn?&%ZX~M2G+78RYRnO( zPW%IPYbLk_J)-?(_1$z2#HZinwR&QjpdN0*lH>zyQe5RT-Am+CX_8r`Hcd!;udTW@ z_v5FtA-HYUVQV#W+}!_+9Zb-sXqLN7*5hZHkcu30#ubWKS~EE|W1O_H8 z{h{VWvxY(Kj8WX6Ndz^f-!w4760;CdBw1Ho`#1c^a-Jz&Q%+)nbWX#%P4SAJVFjLoxbT)XH%QLNi(B?)6`*TH*`=b z!Wgrd{!!zN#f$z?^Bi)LeRG`o5OeT3k znWop%=xDI8nI9^ul;cT%WZ$&$T=5pvPxobiw}D!LZ@@8OSuqQpR7^Ky7cgs?{FWZa zo^LI_9$Aa?gJsIBWAbZyHq)$G{G>_x0Xwa=+FC`OrcM2jS*5PH=uBRN2bb>{clrbS zcWd@F>$+9z`a!dpN!xTt#|T(B8_ZR@CQYOI1+&yi^z>5pd24BMFxYt;vz3X4R8y99 zv*<~kbPx7B8`m{cVJN~RcEZY4EGa)`;uX>8NW`_{wH|!mzIm5($ix6f+V|;=Y-UbhoJ8gn>-dYcOy!7!!&3j2s@%P^o{G(-xq5OE&gjB~;{<>1)!Qv zFA;|Pt)b!h>g23u(6GFpJ>&wB2k(&w&wgdEW_btc+==tpe#Ef2pCM!u(Fy;MYr;)q zdqykyYls+P1b#9nor}g{&4yu6zjTP|)LBs=qB67%x*AV~gT`6SlwoH-R7ezJIvzU@ zUUDj3(60UGc6Yx+2pS?KzC2fkE9aiQ+L-W8M!!c093qzKP`k7J$?@FiL43byDk`ix zUKO{FQ_H1&|4vaqeaQN4yo_6OB-M_2;Uw*h?oMjIQwSC!BmO1lhD*zly*}*ko!klQ zAPK}6{47r1BTh#9JZn#BmD{7lV`TVe5pO=62nTY23IY=n&UlQyRPom zEt;l}KLW@?!kv{@cIxI$LE}=TsxiY13OZSByiOjMbDBFr<1(Nn0vld#cjtSHv)QTq zP6^5w24zHoOs~$O zgn7icW*9jpm!O|F*emiH`-EBAR^G5-7(1qxV2^jmOZGbDfO*|GZJi<&b%JuVh##Mg&#Suxgk$=9vJF7t@pOR^z;Oz_@f6P<=j{W5CaZ)>G z+&GLI(@1c}8|{Uc+%aNyQrmA_Jj@WYN#Mo%=vnb+WxG~suApYrxPBNjrtx`ZB+Q4-Nb+4?fK&Tc(OacSlm1fgJT(#piE6r%s1tp@xXcHu(cO3 zEEXdJKtgBm(z*EzE1TSF8%BspuS0a-cx<`T8Z*r6(~a>`KH%Tdq@3{ddbpno}}-(ub5hTyESVbr9hb7#sBX8@(O>AbIdYgUh}q$5&Q)I zfOE*QVg74cGFyp1-)HD4GU>Yt_7TgLdFM2Eb`XDrH|8tdjpk|n1S73uN>$C0dFHfZ zHU>YfkL~NmP3xJ{^ifx2R3Ah(7C)m;)(h{Q$7TJjdBU_=Ha$NJM$?P+-Rfn1zj^UA zL-r=Wm-pif@4dbR*F-$u{88;RTJ|`cNw#C*V>DYlw2x1+V=1!F(WIAn%c|krOev=+v>XnWzlxdt&`_!TQ02p(DRx<1o<} z1a;7V!$xCbH_+*M@`nLKgP?$vMNC1hQ8u*dDfAx)QwOht%0T2qvuEGd>3QCEN$jUd zEeFGbV284XUBRm%H~7)h*bU9~uk^f&h{i(H-@g zbz8IN&L7KE91#wEC`1-M75NsB<06?J+eCaJycXj8O|xW%xj;qoD3*a(SEMb-8R-ON zj-^0-gd}K-*hly!_#W$d7EqdJx}LuR$h5CGVmO-U?fQ)i z*O6}DedIY7ju=aXAxHTy|FJ5>L^jnQ!#T}^;73%CzL==2d zZZYTRW7Ju;Jneh#F~bnyKcNIDe59UY_tDsR%w$Fu12v&LC_JWIkyKGB1k9vHW&;(W zt0<(T;$o4}D}_uHM%Duvq5LR5q%UIkG1$n=Oh$?Ww$By#%;ZK^0}Y`LD0`$7qOy@G z=neujJdPG41Rni(H%ztLorZjN!7$EqBYQJ*?0Z>+Cm>tzLT(vTZgaU)sh?i7-$N;K!L_^9r+bc zO`1Sr6l0=ti%~1E8>d6yAi0zC7Ria{K(=c!P#d~eS0|d0xG!yX5EZPvpe{tKabgiv z|4WN(w^<#gU@PJ)ak?32OmH2>NX>th+N;gQ0YPVUbFcbaetk@Ez+gXbam0b0M z@cW0S<=tOt(o1R`^_EBr6qw>+T_$6iGWV*fRPDvm0Gcey?I5$R`C+U#}AIM$CT9_bhp2R?-tLj;P z4{T+0$+=eKm!j_IHP(DW&!XW~ac3wm=#$ZuRb4A-Q0-=De#WY@`m4Y0onT9|sp?sB zue@4TCuiC|hM1H|^BtI0t7MumW|l-xdgdasU?15h-L9rJ=$!^r~xO;g6~lTc_# zRmDpq)o}8Y<7gfXE${5E`HfYFN+Z=%a{x-F@na@Q)HLO)OC{bxjWlP#rSfy-)yg^v z)9Nw)q-Pp9VAq&%k{C^dYH|skYHArkt`KP2HU{4)>uilPTg|LunmlHm#6{zzdRR)Q zqFGfZY+5+BtXb%&evmbxo&-(9p=w*Yq1Kx3C{_rou3A;9qu!c(D|(PI=8*(PgQdz) zTCLq4{X21VuWn2vNrL886|n>l$W`E|aS%GDlvIYN!d2xcd{8h(oitBl4xB3ERr4rQ zjxwj2R@E!%09sbgisx5~^F%%3=ChTth#OakBSmH5cCvc@xSu^PXBIXtP;(W%ma}4# z&d(P2ivq*_)JL_1U&78~1{$Y`^Y^oHTR1Nq7tCtrM~JIO+2U@p4OuRjWDR*oLE^%( zVOr8HtrXO18g~qRt*i*m!9-)Du#{a&$zxVEP7^O3qKR6>{lRKguV$P+&AL%wK3mPP3)d?qqrA0 zWE|xY@Eiac5ELL5FbJMW9#9xy8gLMR5s(z18n77P8}N}=5!z$mZ;)dUYfxhlYEWX3 zV$fpn%b=jgB+;P3Akv`1AR`CJq{}42q{1X)z^u(A!K6N;HX}cyHzPEoG$S>m*%3`r zb3$@Vb4+o}a7=tmeN28#e@uEzYp+47L90Qj!MH-SLbXDM&6q-zLS>&ymO__8l0uV0 zu|l^(vg6!#?7aB=qY1VNrU}mVn=7mF>ZLH-$w z{I+%2l~})>n-e9ko$1Pb;lh3Fyl_&WB50mjAK(;l7XTL!5g=2y4O8{43Kq7NqJ<%) zm_^?_ZVE56mAl^A?2K+fUjhZaR?xg->T6~gcY-s;ne1$88H-#okh0e~6Nj6{rS0r~ z=A?99rnp(2FtdwW>gD8q87GHD(L8p_Ad{F|#3lFyep!lH*t}qhDsz_G*m>wQa-OTu zN$V(5&sB6g?yzEtEAx&U%bDR+XRfuvN$jX>iarxQ!nAf-v#eg$ymbmTGl{$0S?#Q1 zmfEaxiaYaxo5Q8*L}$4**GcrKWQw`-bFlu}9czOvUa z51bOul;ci!X*#xETrH~CH4m8rt(4Zwm^V+UWSVnNyXbvp;*kK3O+KIH6Ce|z4cnRR z*lcEVyJ-qPvz_~o^W|B`I7DU$cZ~DrH*K6te@>?mbRYp#B?5mzl#q6y2NWNIC(b?l zvGL4uPPUL=U<|ea(~LoEuR&lcR6YC=@n7bYA55zT z;l02>ODF;3&|bwrL#TcPaqLJ|Y*VJSoCSln-sx1s>zVDiUgJPYCzy-I<`P{RoF*xqAOfhthN2vgV@Y@FuyTiXG>Vu3PH*|S^w zLA^3KZV9dwAcQJx9kv!T`}aol-r)_V7E7kz20wd+1I3^s5R$R!*fh+pW9T?E3~S~L zl6tKI*`R(PEa6metQcIwvEy0;R;+6V4a$4j123R>;9YUg0Z+d+p}gQ9anD&+?Cg8C zb9#vb`;|q_R#Aj{7%hzfizaDOrK8RXI20^$W~miSnwGb;YN-|MsWyONld4hPgl7sk zS*`c&Z792Z3uSwo0S;DB9CQ5I0s!`pDN$n_VLLtStY;w9Y(}~60 z=)T|lYC(jQdV(#*rkrQWJ?n|-+LPRz=4W8w548I}t z%ZaB%CQgeeZ&NVJWu@`5c-YRZ?RGn??MIGkCOA07t^#LD@!Jw%fd0466gxsdC&TM)guh--j~Q zyU%A=6082+7jw2)nxx+7*}1lO`$M%2O7F&avMjZ~ELEgBDZTU) zndzmvJmzmVb1~gar~BUj8;9Pf|A_;BG9b99ZuC z>6NLMXyjV?Qkl~vRx6i^e`IB=ma36f%j+dsJ=~TpRVWubE_UEHmhY>NS8FwjKlr7X zX&^ZLo?-ff@INp5tYgpr8&l7$(T?I%tqwv{Bb8QfnHe|32i+fi7V-U-|5v2-TmIkR zK^k%=j(8M>H}#z>gTs;2Zr^@uWarI`Q5}m!$|dOR{-W%1ttzXk>aVt{uD~Xr0N?!O z{bO|cV#Uv$vU z<*`BXJmDUf1(qRh?f`s>%og~*nJdsp64}(cSND{2sTk-%6f`Ibf)oXb%$sV?nC_kd zY+SgMb>%g5p|n7U$Bx@C@%x+ce zp;|{Ujn4&Q2Uq^`9Kfc|&0<|jOks_s_|CRiRkXGUY)`XNSGlk%)0Jy1A9Y23K zauY#%l@pdE{{N>bBka_5A#HO;Z5e+67Ct z-XNgPhZzPlAETow(4E1La|Egdi=_+E_rp%cdc&791wvylsRn;F(b&Y+>8-hCA|n== zG`ofoi87WHZNm3=dMXN(ukr!0e?NKg!eAGN9x-Zy^Y>b<`~pbbMC_TiW~@P4K_V(& z6*9nRfe`$%lG$Up&G034DjIy)fA0%P|0UEV*d=7_`#G6CYzva$$O^YTR12Y?m;urk zByc3KP;xJ9P9$FN9BFs~6_1V^I-lU%mN!SSsgO3cZ(1k~6vr7vvW0C_ST1F7b&z5K z1O2R&7GxI9M6=0=-?+(+Ly2ZFGLfCNW;BKJ%-e{L^KxVY3i5^_bupdACbF zKWKd79%pPmHSgwOFA9AcvJq#5-om_~@$-02m^Q^Ej;ZkT_Zr0A=J}i%GjkH0!!Psa z8?s~qp;&khL+bl?3=C`X1+rEh?zcx(%$H)a1l1BH;R;GXo2s zZ`R7(O)lP;KhR)dRo4cQ_!;%HysI!8ZmkSrU|pK({**8k9mk439S%z!bd#B?$ z?)YYd!@>|ChzvhEGDdkALEDbYp-qT)FUoJZhXs(qY?^R0gv;X;@P>}nB~WaHP{=cr ze=`gLjG>I6=AIL$%zrzg1Lqc$#6n(3M_v*`(TFbCc8FdXbj1}6x}wO9ac>)atyaWt z&CJs`(VT)S3C$-`;BiP4oTsE1Q2TkHm)H|ug0|9R-n=f4oanLbB%4yc41-24vmsxN z_9W#`-9Mjt!aDjissC~hBV>N;%PoU66L^oDnICe(%O{Lwbjj+&6TIA_`Xt9;chhpg zP2Y`QXxAmLxj$goI}Z3UNoKnMS3eii3KBg8`I*#yGe$gWb*sS#NnwX#sGg>p(u zt4ykprlzdw^<28iz$@4eD%%GzWBK(5V^_bTV zc&F)RFT)eJcUyEGr<t^D{dE6B{Jo(nE=^Uibv=jCk(MJ+lAwH7q)q z?U5XF`qu0HMQHpODceI@nTu(@cxmr^HQ#~WYaSw8ixhXXpE&Vz|8OKdKPgJ={PERZ|(DazB-r1e~-6NP&_H$leygE)Q^s8a)&{d(ZKo zSnZz^DT)sZ({KmAuRt_1w?l?}NvZ`f!G_v3B0pIVHE#TRZbX5*Gxmjo5{CkkPdpB% z)P!^1Nm)y4jD#Qt4*2ncbi>N)}h zIz8GR0)m{9y^jEKCi=ER-uNRYT+^m>qa$5gkhg$AW5w(=sQwp7E< zJjFlB7(w7OU&M>x>Q@mC3>j8@v&RK7%fbt7*g%E6iT!W3+#pEV;Abrs5LZS$)-gM$ z`H2mK3TN>jP7q9F|Gf(fsFrRIuj%WIsV<>`DMs72) zB7FUu*bIup4GP*5h~ipK)ZQ|c;q zwH8aAclHDUQPFM>kx@@iU#oi5K>;4xc>I|TEVxB*{iB_+nWD0p!M3@9NU&h}hbK!l zt5FBd=)~H>R5?*_SzP*i9@m{48pTpreXF1?dkl?*@y3XEfCu$BA0`x&K^a#L^D`s_ zy2gbz1q4dEHQB{md!$XBRAegB~3N9dAb`gtj`Wwk!Ykgg{ zFsfUn`pWSnEJ?IGjn#2ucYXKbp~uUZMVWmy|FOp6dUoIXsv__Vtex?@((T~C+fbQ) ztQPQ5<=x4TCVY+J=)M=~+-v>oea4smr=#?*(gD|<`<3p^-#2|;vwTndpVXc5hPtOQ zhq{hG&d|qT`$pJxp(_n|5?!{^gwc`>+WbR3&6#G6;PIfc$XfglDAtwW^eQ{HA}0kY zmj;OE#CXkBsn(~O`wn z;sFo3L4=ha1_GqK!;rg7b#N~Kfwa%OWO?I;1UYs-y-(IN3Gzo6dw?77wDOsfjM209U;5QO;~ zrFS#l`%X_DCp3e?5H}uGl||f$v{D~_H6uZ^m;)A)r^LO>=gSjgez~lD#w9dc%D_dN zki{=WtBd5d%v$;IH6}KlRkQ~I6+B;7e-)(a!6~9LS?eqT^I`I;gg5cz7Ylha9aMG3 z&(ZsGlW_BfzD5epM~J!5Yd1DI!&a0Qe%~vuz?c%hch#Wd$}%V&nhiXZ^kbwCLmIp< zl6?tQVS}NgHRvZ}oIy9#=O!JuH_aFZ+*wr=TY?KJO`VZ!H@d|5f6^PXON$}Q&DVyQ z8bl2T%r)_XS^NCb@FXM`{@z3|M*evOV;%w*QNblsVN9>ZX?}k$!fDD%4WhLlbJd8M z2-zboyi{*f#fA9m}WjWJlfYBW0da`jVoZ!~MM`qX2`aPlF~uu;J>nkT<9tfg7Pwos!^ zyA2g*1p}Y^9qdckR{U1FHNEHDcxzWVY*FW!iaJdS05po=5DO+8X@>Cf5+%#$+9)qW zrCu5L065NDnsownw9ML0vt$f~^EIwUM}h z%pdD$=6a;`jBC4<20<*yAdgXqjfUyzE{O0c7oaXC;<6R>b3TqF>tT?DDy>SFQ{H3c zQaUY)Rsf0I(H4b%i&gB;;FW`j5$`1WuDq1#QlifX_Bv%LTkV7dEwQ2HgSImGm~OE$@()56u*-D)p^o( z<1gguc6V401!VziJ5g=r`h6N&IPpM_>!(MIhVe8q&wTQa3eu+P3>vHUZh?&Z2%g}% z=U8;+tk5a~@yEV*y5b~>_oj{ZBcjf`(Z$*f!Yo&0-23J%nTd-=Hr3!2OI@B=6uBuz zG=9?h#8qR=tf|#9CjHWfKX91An&em*)-6^!CwjEDB_NGl4QUld&5GEo5lq5^WPiznzOiUj!-;H)RPqkQ?s(*!hiw`JRe3#sHO!Hs zJhv$!4T8wumFkNgS8P@XUp_Q3HY_t4k1^`78>86H3_Z+1WP$vHq+#{@7+ElP|2MGg z(T2WJ5*Se~MX}9z`>ov$*b?uC)Xv3jH7ZvC(XwbkxT~w*=q!1obPbyoJ6K6Ay|VRN zO1?~X5UY$FTi}exL($_mZLg&>Rt0^GEZfXNCF=(pTM9#?+U7Ie1n#xFA7E0dAv1-D zFQGl8QalkQpoe(^rhtp8F^{+(b|m02{85b$_|qv~12}wPB1R3_G~TC$TzjTPM<)(C zM9$O+PP=D!lhupk->Zch5uc9x@>D1w0Q`*vW@kNnu2`5-d#dqA-BJmbuvvaBHQxw7pAm0R%(j4gGLc*@byeRe*k0Trik05; zcu9=mcR+ec67Z<~TZ`0jv&8uNcelQp@#4n+etN$h@UBo^U6xwy{jsk9bhmbt?eRjJ zO`k>EjzdGy^<^7lfK`T3gh2H!tT_!+q>=G-ML<;58bbuN0Hg*zPlgy0+@bh zh09J?#lHNOe+&O2`W!y%zg^EO_+2DSRO0~MuU|j^tJ7~)=diyo7k2O4LDTPN+iw@m zZ(Zt0zn`A4^gTX;Vm7+&EYB3YpWYd@^wa$6KLUSy5k5{L!AxWNy_w^7+rP|ocfTz~ zH2Zl(?S0%s{_X1102n2Oo&D@PQ|&aR}P@Epk7X$YnH4t~baQA>a9j=(NCYWF9Ol5SH&*RjrYISPC&_I=2 z?`$BMs7E{B-Yl~|SlD2l+NL8-UYETPz1XyRy#n=Alt~z97ebgM`h@9{6m2M=yU>tqThY`iZW*kl4sdZX0ik zS>vV!Q_`K$Z15&fQ7zqe5TkchB;jYW%s_#<`E)Y%v#C-XcM7R^W3|IZ3ze+l$*xg6C1N-?)}r^ctIT zR68;@h<}r?jVrAL+p=w2vDY$J<)8LI)r1c4LI_REjF}`)z7vrmKQSm9X{-rL>=w-TP`uoh?1Cv;}F-K z&~Hq}J4;(4CCnFP3wjWkvTIii2gZjq5UnS(?HUR#!mJ1(=3t&w`G`fmv_*9|2~rgvU*{aE%G%GUNO|zrD-&P`cB$jSQ5E!O21;t3 zUS7K7z;IV99|lVz7^cpF zI;W&x#wXYQrV-oM%9StAW@s{G@s}{(>AWRvt`aNX$BDRIaLbMar^j zV}Cvwo2 zqI)TYnl8KGt^*L8y@t_g3-E25Y3gZKq@JK6qbb!Hc+)YUZKmV>%ndC(OWp)ui7k+9I zrvNj*bhx<-t=7Z>ccaa!$Prv(roaK*)=DutLH6x*=jZTqghYXSi^M@yVrX7e25sA@ zH!ax&U?qBtxeWuW2snZK8`Lf@ur!gWQ=BQl1ML8owV#(}Jhn!EkS~}^-B2Q*hA~zMJJ4N%E?%Kmfi;h?P!wJ0Qrxt!$ z1e9qW3RBhOo{1>I!@`qt*=YdpF;QnK3NCGh>dpt+pQrlc;lCFWcDbup-8ZR!!T!&D zaAEY-4v)`$GT_($?){AaGRe&}fk&ZF@r(FXA%X>#6e|iEv~eWy5H|3-5(xQqUp0rm zEk!&6ZdOP$N}y-5w|j4Qz-rgP*_G8+mVZtq&mA=HG!1gKx-DhnR-3%vR^jZWa1J%nu^eaf%4 z=^rucPwZWRSKIAR3|W0D?zjqjIJ|*ZJ6`UX)4N=}AxAg7T`^aCkRAS5Hzay}GVVBx zdl(&gB7Z-&@`X5#p*C~WPgFgV^a_`c**v54@|RCsJTvu*)14D+=j7VNnvXFz^VXfy zZRc$~1GjQGjuBpdES!Zed>HVvX{(rXptJc(=QQydu!Pw07 z|G71K`M)RtwllJ|WA)QOyJ_mmiK$u>|6c+C8VDpmURjThpL#+@m(fs&!o&O_$3{R61U zkr9hofCt_!HxxuiNri$3ghB@pf@w5F)(oU22H(oE?;>(-c#ex5Cxpn^~>%gR%oHd#RdX~Ud%-(In zFA8+I%hR_J}*;8e%K3Fe}avus7Mw(tstpDkH-dgv#n%o&SQ~1MN zEqr#La30I%v2}YoRC9UiA9dwP>FfKnu0E<~cO&&6`hdbY(ZB(B5r5Fq`0~qXtg3Z^ z;bh){LMpe?^y;GR1(tOPD*?n(?vJ3I#BM;<32{LNBc--rB5@k%a>ikOetx=jZ>Vr} z7}I2ZvEIbjwjRY%9P(tFnVqV3u+buS!r4ffxuZ2`YBx)>{zPCGMtNe~kjn*eKu{;p z%@G0RMD?3dsK>ALHdn-2-;4%q=7F9p%{Hy>P$Nq>7iKb(Um@&p5nila(5BdIaa400 zzym+d^Fz7Fti8-~k@78dzlD9TK$w|(+~B!~hP?vzR|&j5sIQ2}>HYPtmm4hNLlpm1 zvcIoN*&x@Xe$lfudgG76Gv7T2bqYxHpcKnHgfIh;4fsG&RJpd%|uPxr_ zRIuXH%tOe~v!j2?X*`8euzcxo<(Z8l0D;@*&xAtGayzkoLG~m<_mSO(Q|P7vW~xmt z3)%hxLVpr^BVwDUS-@rs(OJYZwhb8EA-iJ;a$5=g9-cIht@wpWT>|ABxkNVLv4)3I zwZH`(C+wKL4M(!osq%2 zV{e)38;&adgFe~Xbg09n<=-n$#lbrehB(3nQ+LN7ls4c^N-QH6$}y(_ zE#R!wfZ*>O*S`aZe24->LnZaY*7%c0ev^<^w0o6*h@h<7pjlPCWGu+V44ok~1UPkA zHL^o$?|p+5Xhs?T+Kb#t+V9E}@&I1%9<+URs;<2yBNjrGqEWwNgC6s>=Zde#7e9Ha z?rxTXxY{(&H&cD&^{+T}<=Ag47oT|U)4#E%vuTa*y&9>@6_tJN6OGnZPlxhKZW}|Y zR;CYWsXxCFvA%e6@H#T1nW-HbaR2V6%!HX@(l&*1!f85$j;SnbJ5g+c?{Y+ZLvVk6 za}+5kl{lfF@YRV-KK*(9qoHj#tcxwjG-?|abMWf*qCntKOG~r3>#(&g0)3@75!J~p&1Uow*M4!2BrGp0#FkA z!i(O=75SrUbL z$N@xY&;6N|o14WFhof}pf^fRn9x3#o14vfXiTRBP3QHwcI-V|bXFg5b_ zx`$?z@qbizm2p+&Tl>)6-Q8W%A>G{|A>G|7A>Az zCWJpcJplbnC;;7#D{sG(C~Cxf$5E6A3@J=cPYd?90BUvao{vMh4e3c^e=JK)@D>;& zl|dHp1BOZLE;_Cs4|0^D-+6Y!TOty`B^GIq!3Lj1}p5w z>i1CmZmp5M$d$FM(5(;{>=Z1#@0Qi|%vgBesV`tiw)g%Nov(fEgRB(9EDKs6=-pBw zh6RD&N^11PBIU}zFwn?saV$L``y@iRZ6N(kd;b(j&GCSNG&%jjMvwYJX02-qmzv3V zUN+(y5rtB#sFcZjWU8)q3p`j%JPQ-iQSFKb+I!#8NGRf~X>x;fJuPIiUruR~7UO6RTDqNP}cW!@g= z`T|CSI)e$qpparSwpRUiZ+cV?SyhWSgFf}^9JH)SB6EfGigc5joYmr|`WKO+A^JyY zas;q>@@5-zI)Q3zyz)`7{eF5uhDtK7F&05(z24s|I!YxvNP%ywH%fHG7@@7>e!DNP zA+tzJJ%@<~6R78mPg@`IHVLWw<2!h#*R6eDN;C=>>+E{Is+kLfy^Fltn$Gn$GL)EvuCuN&?ll*&L)GQ*j5f{qKA`@FDI;i)O zZBMeI-=XTxw;H_V)mIVM?g)7aUq+chbAbl z!+Qn@pn6h~p-X7{D4@nB?{>&GPq7=S3-7^fX!(v0wV?(wH*F9}jT2h3toCE(ENY{6 znPr|Gs9>2He}|vwz#-p8#mOm0RjM*quv@Ql*S~0;$;e&`k~nBCwYymM#hP6r$S5V~ zOx8JcC*?I6>o~;3AJJtVdz_=ZXAtkq;xny$P{o;Pv|j4N6B!XFl`i|ze6`CkozNGH zJCSmwV5+o=ER2l}nuZufPf2lmMy+q~Mc1n*tP975F*@$>@IlENmB^xc5y8(HV5Nr1 zYe9TlH0GbFMR!tCMXm}b`Pc{%*cZQ=#n+o*uy?w6Sxe>1_8PpB_1NIeGqne!tiH$# zZm<+1GR_xHup}wZhVZ=>)bQg4AVY+!mL3S93*aGjW2x~UMa0g2W=A^fD%F7ru}2r4 z$NtLAFNTxZv&u*OP))51OiC$uKFR`jEE+0U!J-wxXF{FiknNr(M3>wr+XYb)W3Z(5 z@R(rf5(%|oQ7CA_{SK9jdz%QIb#rU#2BP@+2%k-BDRVICTTZ+(W+KKe5Gwa;U}MW+>_e zH|$-7$KJRzZIqsz!ovHya>Ke^b83#Loix-5=RU{7AUoz$sMM(BT6Q;HGwYOi-WGtR zeQ!c8l>bIBmFGf)Yt!mRbyMnwXH_-y`?U>EqtWsGPNv@th+S|QvCDn`WyBWUmt+S0 zC1Q{0NdE@r|Lif*w6$+N2k`A1p6;5OID*;2bt)dgI;@-=7Dv|cFsQddw zVBENcAvsXA*dTjPfZ-kH1BH%|p)%;~)_@N#Q`F)aXfW5bYd+uM<8SwH;i=an0=|gzmM4(GS+xv*cr(Luiem=-tz^^#Lm?V#rnNDsnU~(uZ@6- z_;AmPgF={?epF8aGm>*BhaI$#n8d-N7o=b#Aitmo1|~sZoeSg#o23*^?A1d-)6Fp;Y#eJJ@Iw5#_C~XqV=(m^M$;qJ+bL@Ty|chBJ0 zbDV5PwaQ~LfwyGwo!Vr~`X_sEJjlmgC>AyUN~QvfNZIY<68m)fiLz-weERIXxA^fc z^Fm8+1_qC5B4*8eli_N{mq78Cn~STfrggDQjPXk{$DvB3#p-i3G-9pW7=Tx(pDeLX ziF@%RKF~1gfWPFLo@&|5ncKssO-im=ln!JuQ&ihYhFuY3m^YBevo3-7S{6Y_XaDUB zHS+!Y2m$Qq>h_vARASb;D5PIuL%~aN$*XXME#bv-G~uEXG4H2@8X5Z_fu4{6tTNF= zwuV!o5QT_QNzBCoQh*8)XlU2!ZbnwW*p(nH8c>aILC7zM1N?Z^?iIZVpOscid$C(KRWt*iqB=# zD78%3!I0e04#+LWUTC*$}|CtC*B6oA%8- z__!_E7_Q8Q&}AkXOo3-h2}*#3rW&M! ziw*pf6nl4~L3_uf28n^yi2GY1bl!|I5&k|hZ593j^Nl9NXBG&=C>nt1b!~E~$z|W6 z5i(d~Rya>B@k4U8O41QT7U=i=u1fyw`R3xJo|0b>VlXgJv^U84*<~m6oNGqtgw3Lv zy^L@k;FR)MCO#am6Lo$Py&RZNnPTYz^DHf>duvXiK;2Bo(<7Cwt$V$5ZM4~ewJ38| zB0`#{;i(fon$C0Ybz`T95Yo}m(~Q&f)3npnQ`}ud6cJVrw6{_NNcLWGbu7-D^n4Vd z+X8T%Xs(alI||J<4s<69m+Z98OJ5E|V^6i)1itRUO7{hY0f)AlgAr~jy1z_@AiP#t z(`#|DKj_`^=}j^W@GUg6vh!cU4h)|yC(c*9DoB`V9F}BEHJ?*KHa2A9hbm%wbXW19Yyq@*ya@y`M`qnQP4=rHmMJH7VZPrMB_dOtzaAr1tf!8Ab{1&%V43`sgY);xJqMIiaXd z_ndtq1&OHDmd}6HH^7y8)|GD5R>}8sQRkzVo;GxD-)TFc>j`ys$Ah?<=m@KP$JRt2 zr6srtq)$XuWUV(RY~>urP$lFv6+HBOPu+X*meDCg>syQKDVwJ$g3bO*lZ#gN3_UtJ zWDq2w%_`Va za#(Nm4K4p^nYk(y6F4c)GB(0HXJ!;XCVfaFOGzlQl)|S*b& z(D+IsQbYT2LQ`Gz*rCxlxFBSPCYa6-Am5m4O8JrO6XWL?h(=J*;OxKq6dpJWm93GJCqiY9Dwq#`Xc} z2rrx}8)sK8I&{!eBFk8g(7ek!KqHpwIj-&$I62bDU;!XO0ra>4l=d??2K@aIa2J{- zj(iy&#qVjFFeporPgf7cc41Tn4=wW*Gzf;0&Oi$j4QJyo5Pnggk_b7$8z8MQ0XF#F zt{h@?slLCbHCQ^x{0OCw5QDvcfvwQG;JeAYZM zVloB3EsI?|Aju%yE+xgleaJvE7^x8(Rciiz4$HQ$Kh&dEjgZ(bxD2Orr+ROz(U@SE zmm9~;yXjvJA1^hy9NI0MGbnKGn0ACi9%@s`Yez^MvT6*1jLRe8U+j&3n_kB;pJ>*5 zl;=*co>$c5xIbI3#sN1%QK6eJw`A#323NhIt?LhM4Un{vZ)xCgwAy}6V86rRJywW6 z`0179q~NTKzuU=Km1mVIx4gu;=&-`Ocl1TF68n~Kb94MYa;e`FnPChS?R;uy{Q*|} zi4GUKLTdp#Hk+RXGcMfAwzh{Qy1hO^)M+47#%SRIu{g}Q5hgwjFhk~CiIBW#ZIsxd z?7oSnqI}3r2pO{etXXDA_#AU`GXntpV2Tz1nqjRQ6Mm?%kMy&17{%6LBG9QW5@t+} z3_q}tilKDKu6-Oy6?Ifd|7i$;bFkop_T%)L=303 z9K;2HuS0=AB;{fjI6^pY!{E6c&=oLthsTu_x2R5=h3|(HkwO)(hKz$$WYE1j*kZIF zwEbqj6Ay9EnRk%iqiKhM*R2BG_eRZF8f#mp3088=`-y`n;17_BJu!Gu6;r)uU@7VIsgWwS*4BmI z=}SFM!l4+`t$P&%bs^p~Mw;5M?nu>_axUne%w|fk-Lh%M(p&M;dO<(qb;@F+)YI8> z={DW0k$f3<(=S_lvmUK)89O;q!UpN`PpafUOob5?aPlZkYVat3bAFCEwINDS%^gP6 zm3ac4@$}j3prWLAZ$T(g8<%8`+IwQ@~}=(3GQ5 zCMCn;ga1xbKPPhnXTwz50igpofVXOvAybAIs8LjqOHfUvKnwAB1V$bIQDmAK;p6jE zwt*VWxq6*IH+y+#azNEi*<@@XDRYz$8R!W(JaFlYeIzvb3*>w*)W)pBSVN2OI9gxJ zFCB4(DLY{dbDDf#6!sQK7U32 zOayN5E=n%b!*G-_uouac-&A?nAaBdWU-(&)@)4=H#0^RL;nmtH z4uLzPWa;0Cx~$ma4v@)ic?8D^&bkOBjgznY&g$SFm4Y1}CvMp=G0uJ=*b^EDzS=eV z2z)LUT}_!?juZ~`Js9Z9Yz#M^|6PNl4n1T4FU*GV-lU{*_5YucWE*O>hhO=$(XY?>IsJQHh~0yWwG_%ZFG zK-Zb%=zEUGHI}K>rAnU19eQd3eY9gp1O-p{o#iK(<)?9< zR)yI5phEG!6+3VC4nDr(F*Z8qWwK`>dlfq`=dvvG;LSFkFk4vG3HN1U=q=L6Z((^Y z09qojeM-(QDhy;qJ04Y)uHVu<){rg{2fV+iPvHG4u6kgFruX(0noITlJuT9Z17>^B zGcxx*M;+qsNm&fD7dWV(P}6pXY>->#%b<;j$}QxRa1*auZ@3tjZHqTp#RnrV_ zaWlCuGznN69!2xZn3by`{_V@U@Hv3z%AQ~z`tKh;>N6H3a<{|tSSVUPU^*WQ?Cd{F zGgWo=(srv91_SG`7O6`jH-w4tZkn?o$o`Py+2}b?gcU3i-i?}wGET-tIj<{|tqU#9 zrODO?Aw9CB%gVp-ao0jG42F_A_CQh&dPDdaJP#W!yvS6senib^;sx$k#)_$Xf*C3` z2Rby7RVgIWiC-oT|0Tc6iWA2C%xgi+Xu9)gSnNNd3rABI7U8S((Vb zWF4asnpC0yUvyHoS(`1sx-Jc59&Hi(!63>d%CyMkU<4kbaEGZ{-AyGruHE!tVV{H!&GA-zbY#8W|5m!KFEdn7FfHl6!aEhV((3KZE(5bp{f`JbR|)Az83pm&~&kswSy4C5(>dli^G!E|!?E%Vi}!&G-w zMEhfp15{^;nT;TEaHFvw4GM89{3KyXAx1Ij zUzdeJ!{l>4H6uYA;SjEG>104j+uWTo+p{2gOu05JO^+$-<&V1^jO+vxJ7U<_N5?v& z{+g8BgofvxfXX@>QD^gT)#e-JU4G-Y@ubL}kM`1{^b{YWJc}JUqmC)}X@K{{!CWh$ zm3~T4g4Fi7eC_2B!Wrxwhy=PAg^nr&SKI82ni|F7JmG46*IV2Y4>v=A>2moq#T}n2 zD`xp_;HtL!NFp5Aly2H@oU!uoL8K^kY-}Z50w#6<6sa2qJcb99Dz;ef&%7S$Fg8xF z`@JsSL>qr%@@-20D1&D0b5k2-FT`V$OoGmK^Tk#Ahu$cFO@=pf7DyV+8c{`4#l4Re zqwqX6qrN!4B&;beLD)!5OHCPg-TYLrGjsM7bWi`g;-k}doDXEnYn19yvOX;0NtQCV zQHM=1xu)x9HXDnbZN&9_O1Svt%v6aBzpLsG+a1z@O~B9UPv8Ft9VqyX4wU`NEQw|< z!NIT93qX_;KJlq{{vx!h%r3AF zgg>izY-I0&7X8A62H8+vEB_HBu#iOfK~X+mj{f&H$V`e4I4_9Tvd51{?^o&M)YSoK zFHj3Z@Qjrq{K>W;3TeRm@Z>Da>Q6Fd#r3Yo;n@mRY7AC^S4KtAtTx7(<~(dv%(_vl9y z9#H!rZv0>G4UbLqttTdGRt!sl2KRkt-_0VfnnaQDH5>jKI0aS(Kd~sr4+1BaqhE>g z|C@U7DwU;A{{82H!G$5THgNv!0262-2q>wcC=qgW%2^-myvY>lO2B(|rI9Q_VbUkh z>7En%pm-vm?3dyhSSA{!(u3rbI4qhbXmHg@ zrHM4E!)SLz56c){t8~7W0Y|lZ*_a2DckJBYL*8L2cGyeh49$D#gR-e5nwv*TD&Y%e zsLJa}$=w6FW*s6c&eHO^yFtp7r6!J3s-6d>0TW}okOzHq74!UE(K8sRD;D%|Ob#2P znxKsj_9aN+ml20BAkX0Q;lZ(?S&<(S_xW@Ri82x@=z4*XS!ILLr_*+Vt!|NK_J##* zfuoSmLSj<3ICS^d+6{2O^T~0+FNwn3R!6L{=VN${<b*bn z6e^5JPv{6xWjUFaBLjD2%iuG9wdW4PSa359#=#LYTh5H93u$mFN>dx?LL+tf-F;i6|}(!d^gSRqw&0Ekq44h zUr|9%hiBNu`*iurVlUjXsa$y|SZJIowYz&iZf+h~R0)wZB^te9jv^Lcih1FskzL(L z@2Je!)0AdbM&Br!2j>u?-?7r7&);c91G1<8>g%OT0unF!K) zFHwuji)8Iwe}Yf+DN9Q`f+bq#nZ@4bow5{xW^surTjdN}Eczl)ybasnW06qsK3ys8 z41vjI#w`0tZ3mrBlTu-l*7e2V#ih-$kJ=L2O?JEI`|$O&5RFP3NqxCOzLRJz5tVQ_ zwT-}Lz4qJMFR|*sV63-_F&K3!(tFq1TK8lTwwe_CaJSK2pmB$95cuu|P^m`;UaFWf z!&9HF&w`Wp&!K9X~2K)y!MC|;=2SMdhoQgsE0r&W=vqHzSeD!ka_lHJv3gQ{y>E+G^bct}p_$|%R zQ`l|LISQ~8C5G&>Y_bKF#L_S5H=l#L<)`@aVGT7K^CdKet?h^9!}yB~F z_|+hNgP)z`&%MtZwg#%C>j49^f6np&z1bhPGALg@ohrddGJar|5_uGvJIL$9jEJgU zv7Trk1}Oa#YS&~t-CXJE(yj-uk5rd}1Z=s=!Vo7bk0QAFQkgBd%Erm!5YYlVsm%8P z4w3dziVF3)RcB8MJ{6`?=wL}#sp~V2c^0W!eMwd>PG2fkw=q6B(s~5p?b~8P^8R%r z?T}2A7>{GGw8S$SljAXpPdVmP5un~AjeWL{&Lsq9XaZr2AXIc~i9?YUWBu4_;uw3) z9Z(5+9=>J`O60A4pG|;B;JO?v@(!P;1AM)O?| z+a_VWrSv{(gzK?KCu#*7Mc%*kI6{S(f!$GZi2W|vAjnTPIPpKi24BEj_5y`m_5$Gr zGYdzK%HP=W_@8{}^Vv=hIxhrwJ8FtK^@u=xf^s?6ZVd>=^Gk+S?gnIi^_x04{a zq}~gaqdD$Ic97@HKj*9T@mXD(0Cg!NE0jT_Bbl4f&pvsKWG?#z#DvX0&CBW6=4XjU zul>t880A~f!5Cet@9)VVa1Q1&IRu=8F-o~tA|)L=$3rXZSDhzsf{N0?-j?r2tbt%p zT2#y*36Ezt_8>>}Obi888Dh@|d2SdjZPv$g_Dz6ei7E`aDBZrICtWC)J)NB^e`;05 z+MZ%=c-7A33=D7p=F4IKncb)5Sq)y7sYSC;xPx(uh7nu(l#CFANB62v_WSh(q@!G_ zFU*^#8Y33(^Ll+MU;9SSo5tj1*3Um`yX7)O@x8|)#1cIKr&v!DvI_l8{1gea;Rh-c zx39UYu4$?bxO7bzR3B^Ra#z+IhWkwPwetDiGp8~oWcFj}4q8-c7^+b}gDq_~|AP4{C2zq|^WOS^ zSQv19a}L{JCjAq4MX>2Sc|lILrw$*QYFg?KMc5~Xt?uEUp&s^$D`)2pJvDgrTq@qB z3_o)2{B%xNekQW3^t;bsRcbXJp0#$s43B&|+*ihPuz1$S2pN2|zT9BBPqkl-K3b1H z1+P`J|GX!E@4KL!t($6Lz&orW&UEl4*OdNGJo23}uzkywxc)Ok7CX^|n^hI2jqeC* zOloN6>A<}|*Bb0CsQIxfkeqa(^|fQ=tS`+3`9*yeq;L`B?SUUDXFpgT zq&8yj(8;kWanQIRYA!b)OgV8qdhos?Qqss`H*aL8R%KU58udcNh|jOHf$uTQ~1cNG<;<;f58Ee;MR-o0mb!?soY5><+C zl$2dc=B8vuo=W%(G{BHTOD+;8!H`3|xcozrcwDwUbryE*!4Phd-z-pj%KoB)@LWa<`_w2U}q-roftdUq`7Qh zWU_hL8HN?t2SMt;IcpejZ;TV@B($}C=n@~_Bdd!cS(j@-{v3pOP?#3+-WEfjo@1zfbaqe@Xa z6e`w4NhwVsl85q;BQ!C|XOz6rj@XT>1Y@^Nw&3U)mip5nPlS9}9G`EIC5_?i5wX1J zJ;B1RLa)~58J}XG$vTUG)>NwjkekDT3>?+ad{}+#pBp&JH2NwQ(vs=$SmQ@fZe_%* zt;~omSA8>qjqAl>rhL=nSfEp{FABGOt}`8uU}><}N$7$Mk*Y7qg7N@uZM5RofmM0J zqujfhg8`m5ch^?C`LoXQKQaeXmyL26NeljES1q#*KPKWYk(9z!`Qhapj1MRPAbOh- zB_|Uz8xvb+Cw&gbf0|PoQ2H^Sq}&G_PWtrAa1tSNGG9VZyr*L%cI!KmBu zGgZ5T^G;xc6f7Jq^$H8Ry{Q2fr%{ltrXW=s*;Xymp$Eob&Ksjr!?wXC-zlZWBZjJ` zDWw`%>JNCyEjaIwK8YgV$Xg!&hW0vLsy3IJ><3})s1?mg{K)1G&{Oyh zgKmx%&L)i3c1BjeDR3FEtbTd9xWfTJ!1sY&>EP%=v+74_%SUrHQk*aW?FAV#3Rgl{47H@OVJ4nXI> z^U%wEngiOpv6`_x0FZFSly2#`uzd{;Q$1-rg zcx-e4Uy+;avC`2(>6@@hf%Sqi5rEP05|D6&osy)Dos@)|f`g5lDjSsFMb|GU%@8`M z5XL0U(A6y~Pw&c{6w0WU7NZ%jyl(IrCWe}!Uls;o4;C7(ubdv{o^vEC0__3494j3H zO(v)khznl(`3H1PGB7eQ5t-iUjk*vB2x6%7qu$!lj{YE$mxD3iqCt>)FC;n~um&1x z#atO4&kR#>i52NRE>09{{mxvst5#TVa!hF@@+q)y=G=85vUit=^fao+)l<3y@4@(i zi^T}*q8Je+@0K8M6>zsN=nH-Eiv`ANU97Hi_c>J)lM8{bO*P4QVT38`afaT+E9$(d zMA#`87i?^2Cc2>I-fBssQWvFiMQ{U)oOW96)ScWQG)ix)4`*~1NoW3_S}-XK1`)$_w_#UNJ3 zD{;tt4#6)yXkm`N+1qeE%x52!m9?U^&%T;j{Wg)gI<56lI^actSc0_rN9no{if)|k z5N>WEc^jLJZ__p*+{qyw1syFh^Rv6cACHTj!eqFEd zZ*Glu?beb1->rXaZn%knU(XVPYXZLhy#Qw5@c~rNUj~>zHBIn~Ey=Hm-~k>ff4hC+ z=RpOO^p}U!rKHOv85(dYNiB=TSs!@MTHwX>QxX7>>zi~#NoLk|4-KsufcX~S3H5hE z|2lkA{1kfWNCv=f$bb8Ie=OIyi6(zF7RxnJEjJZ)b&tot5zBT>EG@7>G4I7&+50sx$3T^9fTI4my} zp${wx{tHF?5U>BxgVEIZhZ1gp!Z>>5u_@3L7QpK|iW9F}qj3N2({KY6_;6;dXFvp% zKo`7@VjSovx6_3!aDNi;)VTo)GRaRcoIn)3|Dg-TC$~gld%2bRW+>9Z=c_Kizi9pk z3J~vGqqwmyKpRqKE;nQ_0okeGHSc_TTNIbMf}5$L@2QrFohSgX=XAMK>*_}&-}iQ^ z(3f>FGO;z`V7}ZKZfA5;Kqv&!^ddmJ$N{hGfYN|{5V!HutAOkcJ#Gp}>Pt`l6!5gd z2O_!-$oSnI0fy#AD6gM}>^>&@BAWFDz1>*PSotqitde_TU6dS-y=D+_& z^+ylP%}`vggjFG{zI1T&m`gXs_IpvFo^ssRs2No$1hiEtbb>n9~sok#NqX_ z=~YP6cLeE25%urT{I#!eT{DrD=)X|Vf16FdRz$t3i^*F^zgklLXGi_D@~-&5>$~UQ$;h6W zk&)w#H=fp8Z+*0pLtY9L3>xSk4?JtTU;pvpKVQIsaDg06?Ct1Pl%aq?-+)e3|1n(L zVSs?a9)W>?z)=7Cq5OY4+)6Vt6!Wid|1qHd=iwH%#wPCnO2doT$=1N$$=uG_iI_`^ z*xAI%S&xxe$LGIm|EH_}c!T@jv@MKGoc@1U{NH7;{#oWftkN>kG0`#pA4=lUtXTTQtq?#!KT&{y@c*@_o*u)$!aJp=W34QT`X}AX?fGi!>lW}0Vk03V zW##pJHM1$D*wI+D@_T;0QPGK0r= z8g{T$pFt1>1$-b94q|4*%gk-f)(?4>ft~p{Go9J&Dl47ySbF%zCI;hWsAs)w9%Kb6swk-E@cIWpQ=S}7T`3UUByU#Yix-ogNdtq!!Ze|+wa;qrp z9mQR&ZS{S^eo$Cu!>UyA14mk}hA4`qCGIkR;xfLIudT8(zut0SSSGW=%JTdS2RpOb zdNahH=o4X9CJP&Bsc*_^Z{vOOQ5gm2UFr0cor0;D)4(*w7}BiRX;w;AA)pB-x(LmD zOkFtm-hIa{3axJ&PD3F~kL=6;1i_6uU+-Eq?2N+Fzf>?kB&)xjUGhrgpqNSp)+SrN?pXULq_)iT?}JbjoO1&S}f(}6JsuP?$s~- zh2rO0%*YbOKk97RGt!yW>Q^rcN>PVq$ch(f>0{G{W<$cK&XTf)_o0g9wI+^R%AQZP z!5gdE5KoiT4F|lVaXVL8sukK)9zC-Adq5FfCmuwa-!m=FyKC;f*7 zMCkD4w7oz{tnxr;vZ;na&u)lwCL%*`z>r9HA<)Tt9LAId_4Ebbum2c|t%7O(BTkZ){8nM+IU6I3Y)g!LLfa znIt8!Tt{^?4`_r)GnH42TV%wn?=ocI*T5>v&EpcqMTG^RX+auKibVNoA)cNVM7h|?;e zyzLrf6KT4(8R63C=02z3%Oti!bZWu~T@HU_9V^6Tr#?!DKFyM=XrHSHsVywC5jh)rtq)FphokXau=}qJDN5HY{EfFnlW}}}$6u}Sl~d(sJ}gZZVwRYa zaZ^uw!>A_=$=z`awi*Ko>pz+?NE?z~XUzDB(DyX#v?&q!%euWPs;ag-!MiVc(ByvX zU=mkX1Ky~%lQ@7Q{F6xIn4p@~sSg01TN8NmSZy`+P1kM}ufOb6p*zys+va-&-As)6 zeBbHL*LlY?!*BTtBH{hyrHn2$zqMlh5tjb=1=$kSqM=3?t)i50)!++KbYS)5Jr*S@gq_5^w;ggX!U;d_*CmTz+ z;F#VC#p>08)+lgdb+B0;_&yj^P%5kjyMO|ZS=y-+S}(yaAF3D3#%rA$v6dU4TS)Ub zyIfF~L<>tJ^Tnr4fF;UnX^xt~VuQ+Bh9NILYnN-2$FCro{Y`U~58_su5x|2n+ilE~ z+!cBL7+DJS`|U~}uJaPJ{DLU31>qWYcayhJkUR1mL_ymF3hMETaRIgaH+Exascbey zveB=k>0NTfWkk+UuZ109W!)-Wkq|Tx8sDF;>2}(MlH0YTIyEoK$Dw?-9CeZKn~g8g zoIIHf794eJB#H1SA;T2r4?qqv_ObGE?S)MrzGX}0nPgfRl1-|5bgP~f%2w;?idEUi zRjM||Z!a3&;C=x;CPZ^Jom8_@v7%g#6O!VolqRpMWJ?9+6wx4lM4gki-e2N;JCq^N z72rzRt%MPX@(F>=Es1m!<_^esaiBVjikol3S(2Uis#;QS| zc<-E`Fb{`HjqZW+7n;qjw26ER)5esq*F}XUo}TMfV1yfAj?_Q>EgULnA_hLBjXug;ELe+W*X6eY9R0<2n*3 ziZZ#JTz~F<-CFm!p4uHTm3wBa5juZJyoh7<*t$C#uDv?*kG}S#@b!IJR~yr{yOnqp zc|_uvY-ESIOgL<9di(7(Ufs4pcM5PIm&mI!xxQ?FgJv1VNK9p}@Q2e%X4C)K1$s#e zC84@tEPfW`a?WmjadEcwU?6vW6x(clx!%m%z8=k968d7Bos+J5xX~(e%F#rTy{kE7 zVmC*%{(^58PH}3~n8yivh+og&!yXCY^mChDu-C8bE>GB6&y)&y_K}t}!#1P-Nd1pa z9@JDepIrFSBCKeKfK7?%;+Vz+6F2Mxx0PbCX-B!mBE?7gK`YySp%5e2g#K$U65xd=f<8Zs5}MJFnnpSWf=`4nSypMUI_%9 zWp?BGgYAh0AEJ5;rcq4-O;ws*7IOUg1)mf9BI8n{g8d>mx}rk%#y_&xHyo7)hJ13gXpl$BDsZaK#6UXXhS?(o()T7F z6*gcbZ?*#oeFy_Z!hY$6ukodfZWEJKc6e2M z2_voBpjcMEWi80W4xhs{20C?GHnD+g?f(GhZ$X*>>qG1!8F1weeFSN658k;xQ`6d# z5)CCxQ*YR{L5&6Lz2@!p#Y!~sxis;%~l(I{}!XH8sD~b@k!u1`;If2%V_%S zQ%_&6tm=24Y_hg|Ig(Xy+Za}{GZ6Wdh-Z z)qDgQTUFkED&Gv-?Fjz?=l=fTC|p=3eo8y(s~wec7I0(L*uEFu&6;Zxy@QNC^!~X- zz!|f=Jgzf`p!PPw2X;;ixk{jIyXStt-Gi>$p{99xrA{*^VGvTl?vU-rm3rKYlewlm zQDL)?)R~?ctEdURZmC0u%q2t!YyfJRdnC%vO9F-*plHR)c*hjr7^P{@I9nK^CEO?P zze)>9cL>%IAnKf&?uX5Ul-M6p{5c`_Jv#T;RU>}=0r}ro6N&Zl(k*ZxAY7Thv6?Xb ztJUOEi_;cW5cQIg{1%*noe&oaX#n`gFGM2XdRnQ`U%HBspm9j{c*wT;;r_TtP9RZf zSZ-=vmo0GxS3suc)jgeuYBMjd&(E>=J)NFz&aMyEpJxL@ho`G!Lz(x0iGurevw_Ro z)0=~dsh0Dgfr@SZ7(Ie*TxRpCzp@mqs`C(U#}>> zm}y&GtvQ#uaMDb^^V4s_@hL$VQ4n)k z*CFz5YMd{4aJ)zHe3QF3Q`XXv<*QiNs7scoAGt(>ts-fW#llj5uOR*H92=JRRc?GJ zLo$ctV)jM1OC)x2ucDY${YG*~l~@Z~zn8t>Ap^bbh12}fmr`L{^AO1th8)x#B| zA1K6m<+a!?GOU?)35WHTvB&7iJZFj2-T|tIlGVr(lx7^%leFu)7B5oEj7>k}%LajE zw5-_@%Hb~ulhG~_$#WdH9hsW!-wF!J+4e_8O+RtT>*Qi+WnD`imdeF)>Uzqk*Ky%U zPjN7m9VjNl9Lu9CZ9SQgU^nXH;#>V~Cl-^8tjoiGT&X8)`OW3)vlR}3oR%pS_S&oA zt+ffDdTWBT++;hCU+O+kQYRE6uB;N!vDT6>nlxj1yAtyNb~;lOv52A?vOufH1tQwT z+m^auxVJF6;tyAW&JTvY(`g0uRTWiVt__dibAu3AEruMgc0qRfD~JG=#KhXTECL(H z*>#F~(VJ;FaCw@)IvEfAGiXm@Y<^iIGY{W#CL#T@9PP*0tRYRJA;r=4hZVI@fWktO zDBvOTq^I#R_R_SVPX-pV!i8r$7eeQQ5RY$pC=-y));e0=;Zc2p%0qZm`Y|y&Jm?Th zhiFqTGY+l7fEUp@SgC=8x{O0j+%AGf!LBzaWQ*j_jSY@(u;?>+0#e;N7h7cVBaO}q!^w3QY@e)-Co^c&?JtK}K66RP<=&W>r+9uy`bQ*(3aaf*DTue50A_Iv7 z9HMaiLWmUgm8My1)u}K22y(1uX!iv@<=Vq=dHBq-w>|yloP&#`e|a%jMl%8DZwL1^m-{*A=Oups(cb#6J!m%A zLDN54Wa@v7f7_E`{#R9_q2~_?-I-EGRz|9&U zKjG)6GdC^EB{V4YfZq@EU48#RXvEZS|KS4+hWPAd3Z@`np!#`N)*w??qP;g99t z`aAefe>*AZ8Hs!8sVRDsy}i9FB=LNo<8xee;q!7W03kp%gMHly#&Ie-SqX><)|W6a zsFk;bi69jT3JHvKm?#@z(+_J^0bd6RQ(ezGEnfv&!*E^fv^{?f3P`1+gdE(IG^LCP zoopz&{Rk<*JQFphLnf$FFS1@ndjH_y4>uN+csf#JW^r!~O9cm0UwmeU&YcjHbNaxm zqsg%uDaa5H1t=I4lJi_R-R8Xn;#@ve2-9*%1}Q~fU-)(XCmuIRUkgQ9Dvp#D$Od7J zJrpNL4)sz3ah_WX!qu0~!Hoq&S?q&)Q0zoK27$gwt4`H_Vp;@Cf;99O)5yO%p8q!H ztpAqj)&FFgm7^7>myx=cp{AIWt~vStj5<6hf*+5xM`wVpAfGxK;)&oXPirTXJYNwF z1{EKq5FLalvS+v5-#xEshX#WbPYh9WwFs0K(#RWN{A=6#?iYIr}ID z8X~+Mm$K3*aP$~`t~G>9^b_|#!6Esz1jYUr9PYo95N!Vz&dGmEi2sjd82E=B%-o%( zp$_xZUAn&|iJfNE;gACgGkYWge8TqC#Jy;bWX-(9^G?;IE?LWa+OSJo2achJ>L5 z@-sp+gjDdR99JtaZuIzc}!Lo+uq zB`0y{=m-ZNjsS1(;=-BB_zDh+x>5JWi1bhBj||dd`u=X3aQs{Je?LvA{wqxk$w0wQ z1TFMXUz0wJOWj$MriJ6_$0ma#dVb4GPn+L?BRZJk{g@h^%1>2;SR6EjU&QUpPfdYD zxd@%t@(-jWLxeA3RzCD@y(K3+PA(ESBoI6V7093>w5B5|)&EhRb03*^XY=FyUZ31y zGYDF-zlC&xWDSyqyvM)XK>Du*Mb*skuKP-Z#L z`+;SLK1RDgey>4yfj;QfX+AVw7jPfP#!tIwFlv)S;2hw71teLoyXRJFlf0*LylOlj zbHn54;&Q9s{&rXQi^A|6FSC@)6hntAg_NPVo;Tgw8<65k^-rj{*S@{L@B8N0n@{}4 z^te|Zy#^iEJ!;zTO0@K*jj=WvUih^m3ytz@dg?gQ^}$WyOgidL?*Q+ z)`B)qd5{FI4mO&Kw54#nexRpIP2uGq22-01;TQ|U^-qBrdb-*;lzYLf4h~g}DWWiz z1rkl2Y)Jch(O-?q!%SIR*t-msbk%Ku{DZ1B#H${K1p3ldjO9UPmGcqiP(>~{x2&J_ zE%p}9xm+o`cn}d)R=QjoX#ABHw74^q$sR4G(+E}uia%{D(kj_nk47k{4WcsBdlZ9U zN8G8o)dn+JtEfGsEwI(hqY0K?BqBm7BK68G)gDUot#xpyj?a7_#ilBntLH!$gcP{8 z$B$18Jln|IBOzMzUW3@fO2syMkW@hqhLL{;e$RgssFXm*(@X~0+6fGfCAFrgXC z0eqSxI1yKDRkj^z`_NEA56wI?bU7wONHtYk|EbPTnE`x$DA7ipWiYtS1s)hq#6Gih z?J2>{D8E;DMG=0A+*c@}((~zIz*t(sRIRqIb1%yEYW+N{fB{t@0f5#%mEnaKzJKF{rJ;?JQl= z(=k&FkNrBz6IX0_q|jS3);P@E>*!p~*(#42Iwhj|p1jSUIp4G4xfbU*bX}DDt_&Ws zORG)b!gHE{2Fy_Qdv+|f&f!M2vp9sEWn60re#9ZA*ezP$YKb<{EsWf)G`=inF|18H zPU%$$@~PkrGTS`Vj-Yj(RQ=S{d`=8zrSG0gsv(>B)BNciJ(Pm2JhkKy!b9!G`cz)& z?gYa-s<-d^l}3zT4s$Lp8`lEIFC=`>V3SwuV$w_-84-+Wxf!!gz;6x)o_(L&Coe;$ z$+9_(O-&q?lj=;rs*RzwuVA(!$Y}7g==IVMs=Vx}uwDKu>9sEF$@x5nSPBF_=i~BxXGPkWdpMLA-39{f_z#+!=P>$QsCnLoEB~8qLA*jlOnN&1k zTmNBbY5o#zWvKlF^9X%Qp`GJmn-FLF>dNwgUZp`Vd&hV(I)uZ%6!!MiZGuKK2hcvR zj-g>E`x7JQnn5vb`<)vN)eF4kCt3F8Nd&>dLiHXb>meV>sPEE{Txq0-X9K||*C<{z zJ>w8NcE4le?uVI9jIGDwScG~pGq_3s{AUarc?*-;W_zZsXZkSvxmjZ2lp@#6>iU5= zOV^pT4o`?J)54GP0$AI2>wre?&r>cqGMO*yp42}Dk-@cEiv*U?V!|%%ct_hzLWy5K zHGTXu1J=ohu4MaSfVDO|)dkxbhOjfm#bC60-w3OB1Kjpp7urfdt$LV${y zCH-5xycZsvDHR(;BFg!mg=neV+edv-IKQTCMLT@Kr$zAfE)(o(7pu&w*mBD9#wTt`kwyO`_RTXrV;I~}aIaa?)vKN0mOY&re$v&wUT5qfca(MO{~SpZEL(Gt!Eh@(Nt`$nEL-(uIjGlp8*+Zg z`}m;}oa(i-6Y3I<)?RDt9CU}JJc>ufy@JG>v7};cSUO=9y~)Z~MlbEj(g;{;k9gab zacbWME#}Nu6joV(j!*j@JP?=l`$(kdvZPo4dob_ zigi?$1iDz7#cK4_jr4Lz;Z;=LVcI)7hl5S{%SIFTs%%U=Y+AK-or=r$M1)J(;Ywq% z1mD@qbL1Q@39LAx6+Xq3q`%w;q?|x6E6U`?uGFx-$x%+&&b)XRw%p3RbG05UWV{0K z1|yxfYJxR-$QSrwoDrYTH+>guJcf!RUeMQ`I6eCaIkk3YD2ZJ79=W2g+S{igS(-%# zqE)~}OZz&cyEUKhtiV>qAF)>XTNXbJ+fh_;(n|w)D~r$rWRnV|lHscY8u+k1%ojHj zv$v3h%4hhzjpia&3G3O>t1}{dXf?p3-!JN@@pa=-ig`tX36RsWA6CTy7}XhY5MDl) zcdHa#i^3RL7oDm$M~a;`q9ZD9bu+I|b26Q{Tx;Yu|nbwB=noVuP0F z{J=>^YLWVDbC2RD=092)GY+VG^;U_;EK~iH=*p&Ol4!U3^zwt7;!%&o+^mRysUe=w zmPeD$Vq#f(l7?ddhns?;nNnYH2Fo_&s5DDbn`~K|H1>{F-DPL2 zfZ4+0qkXs)GfBE*0@hafOsI3f@h92i3 z7lt__2rerdAeC?NfoN=c1U~9OTE4(5hv-?(l9zNJip+IPRci-doCfC z50oT^G|`<~`&+)@>-vgLQTN#WMUCtYa1P-oj~5QGGiMmLq<+Ki97w^u+h$(Th@dHF zQz#AVv5h)_g57P1V5|By)Bic%fZEfp2Uj+%wc3h3Xh!KR9O-~(S0PJ%o3G4*O~!6T z;&7`gvqirxhZB46W(5yKKPUEX@oMMp77BnAK{yMSHU|GMg22fFb76`_-ND z5gCGH6!O^2{N!~m?W)TW@e2Gk@vN#+*JyA~Dk|TaBT)iCs=2ErgoowZ?Tk%Lt6p+4 zpVlgsT=HwAIxlP21^Fs+l{>Q;$Bgu`ByXj?mzN)0x5_d1dH#qk(?jweP6mOG@crz~ z9(a8s^NqscbbF4jQ2|O^hc(YNhs?UvtQ6fag=}-6A;s)f2aR+Nu&;N1klmNO6HjzB zGJif_DwYvASD6wOEOY=S8}Br1gm3BZ^xB$|d}@QF)__Cnk_J04tb^nd#)CbGH?d#(W-Y;FcF_L z{A)9J^K#ZZMV14t$(tED&Q=C(IfrW<7Dg%`qS-k+VKo-{$+*=qYWBF(%pvY2ODw5% zxsK=VfxBqEGuwI0g>qPek4d`}1(ujf%i}iOrSJ#7ZNCsE+?addt)*o?b=slvx*fvxV#u1$PJ2sx` zeIvPux4Lfwrr8zxPc*`6+_|%__C{uKbUU-7F^26|bP2@L_pNjtivx)_Lmp%Bur>Cm zU3P~+HH{3SbB;nzT&jI4v6NF?kp#7?Bfof<%my!|Igw?34AN4K!~RDYIg6|43lZ&q?f=o6z9*g%2OHTH z%l=f7bl!4+(AX@}Um;JSi#a0Vfv5V_j6PEl;(A zyXE5=-ait-PS=U9ZG~Rcdsx|b@XQ)_d(zNC8!5vKL@DP!yG5J)LlkZ8*BAyjQqWq` zzcQ!on)Lqao~3PkBZ^6@cCKu^7Omlsa22@a#7`mB21PT%Xle9ge?6eeA!7+jn1iIJ za5%?haLy9Y+8L0U1-SfJGi;U>07PI_&al+*6^H}vTohl*-ha5G43PwRLNL5>SIO*g zWERz4H}-pTX)Mgh>eLE&VV&w>)Tb#4>+qMN`CQaES1NyOqC#LRR+{q;^w^3NH% zU7LnNJppU=1dq>XnSS)J04>_(4BoK)iuRYq0bRxsvTG9p1dUa-n(v|J=M=ua_CbQ! z{X{LE3$j?E>HbW!(0jsRfmdP_dLY1@?3kVb`BxB>oR#NTsOSEKkMEZrTU0d+6fe3O zB+!IQvlB+QRKwOXPJ<4gzidP-7aq7;!H7ZW`yy_saF4}4|B-$4bwPOM90J2ZptaW~ z{WbgQGu9*OIe2T?`7C&j)9Q1Y_r&Kcb3N$nOY74?O0B%08^UcmPMA$dun%xSkPbw@ z`3C9|f||k?!2!7I8-L&VjAZgxQ0E>lPg5pe$xIoDiJu*#UOgqIYxq3KWGec)krl z;KOBx9&!nGyL0Fi4x7rv1dR8KdQ z)<4L)U0_VFi`MQwc(x`cRSvkA2fD1%f-;{kZ&l-x>F>JH%7)G@XPR2e!{uyZqIZz? zJ6I8|IJoMa(d3yhX3#KBNzl@n47;%gtod%1u<#hke5z#EAh6q2GN_BCXU#iqBju4!vDnIk~gQ48v~heCM^SX}Er_e4pg3BN~~T5E;vlfXq@N+Z@@v*u4g zgS_$BUk6>ahKlXaxmQ0xHzRv&!)FveE;)^OELs?1)Xoi_P`jbqyOk8Tf?@-sE<+Qu z1<4*Rpy7U-Js)TOQw86+uS$m>!DfJBVHw{>G>oC0RLjhu|mv0jMfv4K%|0Flw zcj;#muw3F%TIXhLAG8j!bCzOl{b3P@>HL9GAAO|_DF@c|1@Fne<;)WfhgXH4QcC>} zi&&V4Q2d<9;L`8imzF$8R*vAKl=Y*+uPFLn=H#KF7tvh*oMvTsOh2^&m415+9VbX; zS~*0a3O;T}$)0vtfGPZal^lA2mrb?l>p;RyHGwq8?D3t59Ec^7l0~{SgWFqhU}8p< z5OH)nifk2i+6ZOgShJ@7meh0pP|td0U1=6E$*qP99Z6W6kW5KD+p4M)?){6FV~-oW zV>~4WbD#CgU_j2T`IPiUqoO5zMhN3Bv51^+LKK+qT5kd*)-GWNIbS29`x zx}kWv@Xa!x?e!tylgx>Nk!75gWyf__$)`$__82tID(+~`Tg1xG`H(k!50^45T=^GA#L zebfAX)NZK_SFN!|$V@Ao+yEy$#0Ykm1pyu0Z@t}BbGz0X)u&@mdE5M(Z*%um-UqaH zj}#_+rMXCmj%+2L$NlP`Zy`hUi>ofAE3EG&k9ADxRvaDv8{1?ZMNi@_w^$B!vJ?yL zzurpGNoC}52R|Q(5;rR(TU>}T(J(lp#OgJ<9=}?13TKPOd^}QDP^lnt0336-`vffN zhY=+^@0wjNVURMu+L4qx#(L6N_}f+`?Ooo|?Br{;LQ$y+Ba*dSwXx>d;K6 zm*}$F?R_=1L-d`+F3L0R2{~mqS{|4^%VZZ7J1{wpuiIz3E;*8(HQ?CrcW&6eO=ZaF zHxQpgpzemgF^YQ2CVm0o$H3H9vy!u}zKubD?FA&_y!~xEt0(AV*az;2k2JL&3iQjy z0-Sp4fhq}3_ay@MwRoOfK~qlzpqBA~qL^ASyfeDIcz@c~y->o7h1B{PBVYt&I7JQx z>O%d#g*vi#3>-^kSxx!urFn>3l^j@p@zqrSSalW8e?rp{3#)V94hT=+;I&L+BIY#-0xepeF?QgwgRX|?bS^6 z`ZPXXBUecCu|a|Kt?waIbz{9W@(ck`;6aA4;wha9HYA`PT$sIE8oXWNr(Vdm$B*Yg zF(QI%>00GV(b|QrcNTvx1`z73>>$|n@@>nZSAfG{97wTzaiQ=&<`MD(4%t~&w;=|P zLQF3?z@uAzFuv8z-et)T?eNKNR4~<{L=-)nWHJ_f^SFObpC4&4As1x#c_d%bSvW@B zzp<Q3|ArCYPFy3dgcT|P19W6Hrylwq^3wN#R{d3T7H(@Z9a5iw3T4* zJelKJtu4Yx~j`8vd@@X}`gw|+vPma>#?esp9uzS?5(+}>%Wg)8>IFx&q)vGlu z<_Rw5;m3|&dBViYeqA-Hq5pXH7^{E5*rAI2w$W8A`@%Yx99LfTM)HTq;urizhzqh! zuPU`s&ht}j9K#zc7j4SJWeK`vOK9c>W(Z>enwA&rtA2`D`xW&ij_pX=pDs*xl`eoP zTlkXNVUR&vJ|#)A;m0D@Cz2GGHrx|JYKgL$T@Z%k_$Ukop7Gbo^;`?%_*MmLjLPT1 zO3Z!VN=iTu(M}A49Ca?~d!*5V&iqfcBRI>C^9){cVI&e292wU}F3rQ4pC}gKFVFTw z$?{-wA#2y6enC~~$;vyTKXrj=jjszKYx|}ZEY$-P-ugK9p`dr14M;M5a_KxSlxP@; zB)!l{3RXaP9NOS(1QKC)_+zQ+IE1mP_RM-I<>^J#kc>@n_K3dK5ezOE!{B@>@w~8! zF#m{9Xo#0*7G4@hKa9oBc5YD*S2ogO{kV=^7krGHCK%s2b83D1=)zu&fzXA1K|ivR zX(0%);VBYN-VXfA5+27SCOBy8+Payy8*7b+Y{U&zd7Mavt@#~;1B$Q#NxvC#En*XD zuDg5qZb{b)|Bb73%QTYt^6@;|d@9o#P-_vTkqN+Qjtw~mBRzPQvNQZsW}T2EKQ}#B zUK&JukgUJ7FH-}=5`&bYqYo=;R30n&I8W_7rnHzo?v9;Z#sR6W+bLl9T*(4a{N~SB zvnID>?DrvS!xK$;1o_?(Omu1I9Lf5J+`a!qukL|K%J?@9cETK&a< zZw%?YhfwEY8)V3nQg&`cD01 z-`RELKW7$ZTEVmZV-JueEBw{D)!4!ivKKmQH?nxD;026w-0zbUri+|&3Oj7ad@Hq} z?ofn3xV-%NK&9giy%Ga5`li$IaYvfST2^(7Jg{aT0UyBr-LYw{a!hrOusTrwQe3>2 z$swq@{AnzNrqf*eTm>L10Xkoh?WFgWhnt_k{?L#=39_VE$jmgdcx7|yj%nX2Vt&=R zly*9W(RY?rGiRE56Q=!DJ!|ncPVHJaMmhC`3n%t74TrTctM)C=gKqg160Sx7C5~qN zy=j=Nsqcaj5SYk|XKt@WU;Yg*U~lL-CWtPW?O^1jnjy-JmJz7&+q0{6sWpz%y?FVe zetXeE=u|?qFsAmq8g68}HcKXo0Xx9Ob168oE1R3wyimuQF;LBO7S_?dW}>~*a7LE2 z`Gr&0N+DWl(4Ne5rlykc(}{opWiRw|O7#H)ak}*^H`=UYO7%(ChJ%0Dd03@}V;xBI z{3${c^irMg8oHz+62yBXUOj>7iHUVt!mPF_8~s{)J$vvEKUoTh;;XCRzK zv|Xkzwz%0C3Q<)VKo}apGFMf}6FSbQ>Sg12?zFe8GnY9Fn$m$Ov z99v0s$%dwpc9r#2@W?S+pCrxuwP?(gAIsI?u-ey#@T+5#v%6Sbeawh~262>2nzLGn zwjo8Sznt{sv&|5-BNqveNfr%o`wb(rLo=UZbgpI~A@;xPY5nQIQa>?pz#n}w_q--; z4wo$klC|7+It5;lQw3$VFAn($e(c+2+ERB#cWj>@7ZCr(vad@1vVZFGg~Yyn45_VO za@WKiJxrv(L|z(`mDeq}MLIFtM)tBSH(9F(lfhGou#!^>x&s-utL^-OXO6vfw9YHo z#u#$5OVyELoNejHaIsYKGIkn)(KUDCT@&0fX~fbt@CQE$cpuWU^!I?-q*%636J1Vy zEZJ4g0?Q23aKn=i`L-j~`U>idzPg=V!GSAdT6f8Mrc#_FD;$+(;XO%6|A7U#`8PEm zi52~+OUm*}HRu6jm8la?r0?v_SZvSgUtO_vFz9MPYjMMbHMZ=Le{g~i4eTHE;Xlei zi2QRmf|?IgQM(syvl?cR2=5u_pFYXcGv{UsQhd*`pbhn| zZ_nhqq90vQep|66jcR8fW42x-iTH-rr}X)#<%VV5d4#Jjn1 zKvr=l@-J6&3Iy0+Pj5lr_&bu{E~bp1dZy^#g(A27_SCeJCR_b$KzStQcwcpV@CgS z$ls3hCcH*Ih_nP1vGI7QMr9;X5OdnBHS(|ilcI=94LONGYDR@3K`CsptvGU*_Tv(ZjP#W|p-F)+ zH`;V(oB;%JwloMR>{^L}DQqDmby6{;UITjJ2CS&PO;rl!E~Qxk*Jwc)@tK0)tf9V> z1Lf*LnyivAOzZ~=Ho94@#2BnfLvp}f*EEc0hH0PK@UqoHS=Y@vmsp!}ctsLbLP+bewL)0;(%!OL( zyA}u;jv)^HgC(6(WBA@)ypy91r=F01c8s^k?2AtiLMQt^6de&9o6T3Sr3#Kwdmf6$ z&}Qb6ws?$az{B6>QZE=f^jx&&CdA_2GIH?E>igrbTHiuMpYH$@H#9euO?6z_-cq6@ z%=W}@NJH`pI~XXYKc3GNE!FpVf^za0q{J!%Dh0y>A*GuWH`5<7&A<+k{G(IQk02ck z@B)}quzXo{Oq}4L8@v)8o*O<|^wNz4$$!n^~&B)Pz}7ZC@(^% z2!j{pjxZf%_pp8ilz!^k(11(ahsr$sEK66g)2e-gYBn+xB(5IRaJ^2KFBm;1I zWUQ){tI{5eK!}lgW?q!qYsYex# z$tS?C*DKZ3n9FukLo$s=d%V1{@`wc^b^5X-%ch zO~farAkFOMONM;&UR{}0sQl=~S`h*pvYy?6q3Y&_e591vpeS+;L11us>)zmb^R?#+ z!CT)TzeV_pDDswXOUR)*s&7yffIDC-W2`{ueGsKLLT>I3G4+-cT5Aze7#ji(vF@W| z>Unu^j9lGS38;4<4#UmqX~4G*nZQl9$KV7E5kg`Z#AqyooK3opXp4`bH$P&PqO z8sC{?)qrx=ok--%1AFU~m@o&=$V~%ARNPz0BeTp`wsI54h`A;`=3R<*JW9(_wa>t7 z_V3C){(~zO`kXl(6(?bX6)UySybd9moRt~=L1HYSKO!KFaYqpSky@$6~ z1;k1-%zQmS^fz`|d8hObY<(oVBmX$4gkE)RpPXzor^_=^%C7d?==Bq5ITc_&2m9?u%4r+D>TyCMs zYc0T_c@1KHv%t_^>v5`MXd421>8yI!TbJFr1F}J=I8Q~_4E%Pp+=>2TMSA(N=1;%X z@C+lrr=D4Cqp@t?o#sA%lP?}?n^Yo)L-#mD+V90{)a49v`!AN=<46|>1;^I%4DO}S z`y4ReMF4Dp7w)p=O0d$Ee4B<@C{%iLZO73U3#K$Y+8)csUbV!&$DP6^z1#B29IrfV zy9>E8l`ip?h(V@X<$|fwoWiuphdJb0_E?{H3UcH7uI1Bw|H042rkHi0k_RjAx+}B{ z<>{&qb7pgTBSpfZeez%i30iOEW2@rduQ6u1T@^0b}gv$$GcYogeBBOcI_Y>@apd#xl zOfOwLGKby$nwH!%uRdrvWR88kxJAluD*1_L-3*)X$!#`t+vBgQ_&I|T?x$G(L6DU! zjwC}GbHtMPE57WJIh=N7u8uS>4NXDeb{k3)2^LZN8G4UO8!3KLPmpo(u}tbPAA3k+ z@K*vz&P~l+tEL7_p7$nFeksukaQXw-5V8vnsgN*44-mU>1{)DJPohb zDaXli()X|R@lGRUv9bht?ITBjJq@8J^5?Ro(Jq5v;K2Mdr*U&!TF9}cZ~1~-qK1hO zXy_bXaPx7>Npz#h*yAB&;;Ll%4eG-Up9&7pew*-Nc12!TWXgSNIUMTxXys!jpL`(9 zHWDa@6}}9;NmX+Q!!DLJ@qPWkDY!YwmgSMizpg3<*@0LhkB&kGbUBRi#4QXfj;}9U z7?GLB_JD4!nRWSg6td~=EdbYJ&$%*z7YFO|s1D=P;NfyPoB%NMhM31Of45DiW?*_F z%Z+z9=)TF?G=jl-t^Ur^zjcgK3R}xBs+%&lkF?2$JtWUaYXt>-Mcbbw-Ji@p+tJ!9EFjsQ2?*V0VOCr!;?GMSl*{qE@ zTN?39Ql8)|(cyj4S4pN`li9&5OoUV3u8Q9EAp9LIT<$}v^H-oDzW`cgQ$j;Bq1{?s zt##a)T8pex;0JRy=r+CS-Z^&r;?&w-H~g8uPy&WfrPSYdF&zhLZ0FT2B^#Wv^O(0I zJQNHioz5)=UJW*0tC5bL%lji^6>)M-ShS8676M_VJrb}`LmfE%V2PW7UBIPhSF2aF z&ZWKgVr~?{!sDTZBkyc0iuApRb3Z?F#kS($u9U2g1sk(85RlfjD}!5V*rPR zb~|4Qv+B8TFkfQjoC&rrd`aCxCI5LyuK4m+0scWx^Gg?%m#pb+W4=#gRlQ`oSIUIi zxvGAAL0t}Wp!Mh;w{u+yY8~E(1=zK+h~4ssHMe^ViGj5EV(m+7B>7|iixgYqFpY^A zl1>e0kCV^|yg{~%nsw#UHAX2ptOMpsa?`MIaf6vo=i27Ocmt)z=nNdM$bG9j%XH7$ zQ4B3Hir?_4=N8`Yv-<13Jz$k&_nxH(r$(&b6B>ARODjba^!RZAF{Z_1Q8!hj7Qf>g zYUk#H3z5$`x9D49-!VLCb`@(28|Co*6N#ZO7V}*y0@e{qpM}_%or_dQ!b}Y8`mpV# z^TTcP1kU>EE1|iv7`*M1EtGYIVbrClJyaD_yT4^d@8C|iG2j9`GlF-$#WS^e`J0`E^eVChNfJF zz^I7nVa$nu&{cv*!h2Tal*5wjhjMH9GhCC$cjb4A=8u<;w>S5A9)mjyuR*1nQat5o-2D(?V2b~Ds5JDCm6DE$l@NT&0aVuBjOr{HYZ zhT8DVui8v?r&h|!o_t+o=p4LA*#M$rGMBDfU`*gA77ZCwfu@*UbFk7FBjq}y0t>t< z`2A5qwV0kQs0T9);2j4c_8W{NK(_{ttm{(trcDM@-fYXG!M8|!=MmfBr)2|LmmW-l zj&pbL1peLw(Gn{IxQYqavB25hpC$J5T5kHEADN&pjt0CyV3oF3Z-*(luG&n(!Mho7 zBA5#hku_FNVPrOpfFM^}_`m)}){kXX#;)7Qtnl46(cV$oR|Qv@`&oOO8Y=Ym8`P0; zl7_;ZUf;W?9Wt3p3LjXNXKtWpgN(}g6792fY$-R2TP4H38lhO4X#0%5N<<3bhW{&Ttmtm|X*&u(@Na5$j=$IYe*ExX#UG9P57oQ$Vc0 zu~yXF<^A=a)hX_4g~51HJd2BAiM}7A!)^w3)Y1lNsD5e-O0#bKzMJo$d)Y}2O{BEq zqs4G`pKwle#DacdA0q-eXZ3C^jPEmD>sPp8_8+02ZmiNgFy&g<&$&VNkzT*iV#kn` zRE@J-;8>9rOHGMm%R2zJe6CE>q6pXHa<)+}T$K4)53gDu)0%to0R3nFF|>?vJze2C zoY^@A$TjnrKP3cp$I85VnfkR>PF?;U_XN$ozV@S0W!}HySd#q{Vii{uvROp)v+A@Ggn$YILqrrg zqe)2$Y*1RhA6)`qD>|7DBb4>^xkB0E423_XvOlH#jp(d1NH3|4 zWfd)}oTb>)WnZ5b7C zx0M}%Gi1DuY}j;FW{+G8w@a27BT0S<#ePc4`l4`Kxrs7T78Q(?`pg9@Ck;wjfi={22^|cWUem5>uK7$v#xvqxzYg zc@*|LwT7zuC9HJ&bPVD>0cq9SM672hww%S%t*0T0$6y)Xu6g;9DosmGtIG9-s45RS zs{?McP^M!X4p?Ee#ByactAaL>u)B%UnV);s0P@XJ{A4pDOh3@we!L(`HZ`2-@Z$?4 z?wO}k(dTf|Q;L_~Ym!T`o>FF&QPQhXI(Xhopdo^EK}}A%9Im?N9V)RTz;2-)R+M>q zl!=0EY(IudOYeYYEuJ=Ti{cD}Lqk8uNP4q7)W$n_WmM4K7ERN5*&bR04)Gq7brgV1 zOMh1%$e%I7v+KZ@4bvJxRjsjKuS zAJs_+FyN)3drOAE$%kOL#As7R*=tQg9*N2JKb%i zsNDGt=Rwm;I+LrdWIG+SivA0H^YXSsPlC@2K>^(R z2p5+7X&mySGiIdVyE0x!bB}4(l71U;Wauk5c8}CKg2z z|E$iq{7maJ8yArVSOK4Zv`iqg&QQjR^cjU;+{M1vkc3iz#meGRbVbn6R^Wg?*MkCr zraZx9KWWOu#6Q`xq&UH}IVF^7_K)Q`{3p$cs6EecZFtJ!I}ZOc$Y8xn*%c3OpKq`Y zco26@uN+ausX&~=dd5pCBW3of-K6+@ z;~?%wvPX+nHHEvUb@P)QFVG(otTIO&i&lT$z~BAiD|Ij3Q!p;1$Wjd8N$y*Q{IkSdu^L@Z%jXGi3H2!$rvsXs~OW;s(+v zAL;TqdRAe_4Wjl9bHW@(iL8PN1I?$onDqmv(j_dn-%;}?L5Up${eK;M`oJ|$4dQg?Bv)F(K zKx?XQa{dHw=XD3^p94St1Wb`oG)J?Kf+^ft=U4X$3^trc$z@ym%#kf1^R@nL4`y<8 zg=^gd{y0N%zFL>*V&x|wY&gZ`4YsUXW+-3PCkw9X6F;fT@Ue@t-)@Ml8^D{(bobPd zDmVGHB`ES8wi=ke@4zOk>%G_V}@;&NejzRo3ocys54p;1N3H#Qe z;rIy3!=%Wmy-E+_nF>u^9x@4CT&Er#pXxf#8SL+q#hpN@1+~WK6uyUMpDLGGwvuUT z5D!@V;7ROgxn9c}*qqN>U(Txuar{LnuJ!&C=PoK%=fLFvM=?! zRp3~e#Y?S!;MhlU_2EEgs_?VN-dMCsE_z%mv%X$mw0veK5JsyVr^}5e0APe%J#r=F zTY$445^}wx9f`3FN=ySpF|G7H9t(jwSz`ge@A{JgaIGkEIDk!P*@ox*`2zR!RBoqqP0RL{5@(q=-6Gk$z0Zm5?mP z88h3;7Bmnqf#8C%*s!Fj%qo#-Vt}-X2xhB6xz#8vDU+V2LTMVVRa{b&IZQYyqhJD{ zWh`yqt)iWwmC&Lsi1x7*|8KMap@k{D|Fj&ywpAJ{^Pi^Vn?*8Lv!wxm7E6~4>ImUl z00q};<5haEI+~a7;d$@T4hi?Fv#Ke-FFHId6qA=cG z5A$~^-RE9)EQM*}p2p5pWS9`6ho&GT7~zf8Ae(2<{ApNi3_PTTaapuPb0*9X z9bZ@*sbg8ZkvdvM6chWlNz+c&I(;llFec|3BV}nylVyJkCsOvvaky}qfw8zm8_VQs z${REo{1qSTV=+d*4~q3tIC8=$7>NLVfd2=RbKR}%ehkgY)?jv!)hf^oD{H!WqDe)5 z@hNQI&N^wKRt9r{GX4s9@;=!ASB$Y3t?Nv(#8yn$Z?Lhr07E37QyBdE=U^02Q8I5V zS)$Rb8Z8E(-A0_NZ2=W$Uu_2oFgE#}LQB3R2ppjX?Uu$VgZ2oRneQ_?^i!HmzFr0x zTGb7jEGrk0?`;CTgZ3ng|A4VgT-g)whX#54%XC`?L^XRW+@*2`QPcK3xpbSXYtV-2 ze#C?wZ?-aF2hrbbKRFv>|NUCi#Gi3o$Ctt3w2~9dAo^JME5Jy6fgwKDRkBupwfPDt zAukcIZ3x_m8Ir)vh+59mxOei&QTn(ZL;U`lpFaMiOl{qadpM&xJip>*~ z-7MS6o(b1T^LK3_^aibPm4_9Ec6Of%hrgvE3urO!hmB5O*_d+vGn4Y>es44_3(oX# z?w8>3@)pZwm+OsDm7nSeaHoB3J!ntks9Nz!mvj!%rY?v0c|ZtjQtx;b5s>a`hef~U zw#v1%orvW}PeX2jFVGTWQS}!ajN{R9Y@*9wuLT&&{A)Hl&lUGTaYye#E(xj@X2=*) zD?bi+N^!Z#77YMd`6*tL%Hkb#Hr`o3yUd{Le6$1WtUGI!+v}3DC@2ryu`kod%3=qZ zlKB!CY=hpQoB7oY3&m{VX{*2myXbszO&n~Au6UA0SAGe^RoA`z5++vFH06Ak3OC|@ z_5n2P4RH60s-B=y;a6#gAj2z8ROLhB&6LL1bpH5IXYr%jl|1L@D{GA_8XV*ARI}Mi zH8xzlsIdONkXDHZOrcDu8fpQN^0S>9Vg5}`bfv>`Cj{1T-e>kMz8KWV*t@Jj&f(Q)?Vh=UcRxBU2C5D;PD(NgltFg;k`w$}YN&7arF5X)x_S zl)`~M1Znmcou3KHcbRZ-!B-i-z0Rgg5vyxxo}V!N!&+kljbG5ic5jGNR8@UK*81|6 zb2}{8d5XLZDrZBDzh4t;2C2UU(IzI3(8~Dpdo@*EXlO7h$2Iw}7CX3)T1DM>fbKHe z^b_4>=LuOhR;RK}0rPw5qOCUVbCt0uUE}i26g%wdM4J!NU*a{BX;^_MT7eHQMy=;MFO z%g_FG{xXXH*ZsoEe;`_wSIRuv-(gX$1wf`hX0B}hf-;!Ni{0sQOez2G(~cLLDI;lyZgQ@o4d8;Iaurv_aFO;yfkD*3|~u zs{x3Qs_sDclG2!>4brWX?P9xYUvZIAMdS?WaX=hxLZ@bY(reSp*C22YS0E2je+tN1%r@UC`9$S z>`q*TTKA)Vot>moGkW=ab1W+pcumU?=P6AueZ!=i_~qjmtoX8*6^(7Ejpyw@8qUi~ zm~N=K_Dagtn*(Er8^;mc@=FT*oXTG}J!;DHXg29QWvtB4Q|kDtDS!#Q1AO0RigUcm zjUn;J4xn&7k@M=i3<`T8FH@myFs$b)?Og zF#9;^cJ1BZsJ1oybdnargbVfs9u&6&Q9m}=la{LhZ@~0d-Wji)oC6qEPm|@_E3%MB%0O@Itsqx_z4DdMI9-^V5uIk`N?XSFM<5uQXcy8mZtb7Yudk6$J^C? z9zVnlqfB+JqisB)U+0_Ez3ejppaJi-j52a`e!nfQ5yD=;XG8!{{xW$<7w;ZGWBSE8 z82S-qTmYBb_*4|4DaF8m7@!&Ci%dWLjmADPWEDpbHG<0a2g~IeVcis4g}5 zA2{h+&Cu9I{d5ZN*1c%EPP@HVYpgbSi&6g|kZ;k)AJgDt4O-X=1+hyM<;wgkeg1^S zItzH&7Y1?8<|fC0UIrXjs78-hoxANY2lEXisTC%-_te>HLo8ENd&I-(@EGYM=m+yC z{Seyj9&aLwZe%qFPZ)do93r5drTI;yOX@$OzPpWH*wwFie~QGK<6 zPA+}}4A{JXIQkyC!^Z})qgf9A7mduf1p*%WOoiEnn%^SJmkTB{3ErAsGe z$DW={myDU-%* zx$jq$CqyQ+K(WHV8q;YPuc4HdGG?@BF(=iAho?(CiHG2asNKa4+V?A((^h5xJ zv7!w-c&HXeF&N|oI7V6+nuApB$+S=pu6E`6m7$j5Ro^w`sD42Sitn0!xm1;VR&9nc z_I5Q%%;!%XF3DfLLe`zJU_loRMK&T!UhfgN-J&chh~C zr!c#LfnPUO8k02M1%nTJH;F=<>x`hB1Uj5haV!YF#>(nOwo%lcta%ykvzmD86(y($ zbf;)uI!`vO7q!?}6}_T>l!$9xI%&At+;lduy&N4SQPoy@jXooe63V5vrpU%<#fdsA zjv&f?^fwO}Ot!g|?52;$(RDzTZk^rRAluZ+zH6dvTVt>|L_?*zt@uS{JO*jP+{mq4 zi0blX%2*ujr4J_8G4ce|wwD?>Z9Bu6O2Me6WjONCk3P@s?Hr2bP{`{r+ed|YXhyKU z+RARzX2mh;hhM9+gF4+Dj|yUZb%o}y_Rz^edZ2qMYLo)D6%^)PsX z_V2l09Ix6qrhfZAbnk_HuxzXLHU_$yI*_B^9AZ15yRmnCr2h>@C z0ybAP=e=f7eVy@B;et=W=oo<$tvqnoZ&mS>)ohioD>JBo-=y0|YT`mlSzMy8KW9;m z{l$@PG6!r_`!m0|5$@{4M!s_g_ zIuK4b&+Niq`@votz=G6;%*%N}k!Ac&VEd^SP^z+mHmdhj%gc8%WZz5gv#H2WcQ(X4 zNR#3WdCFi>cQD%N6nmZ$B!2?-ZDo>LI+5(hiE5C%s_;c1>)SgNdx$n3C+BtfAZ`0x z35*UZ8Nv)>HOw)(y#iPt{}o_&Cwljl237MwI#+4jsqydj3&8LIx@`lXH)e+sxZU2! zn-RB<@|;iLUazN35Fi&{QurFf#&~=QE&mW&HhbFQr(1#q(v%(?9^At>H$4a?@gkbh z6)1u#iA)0xFC1{(t0rIDEiNzxd$0jjwyW?fc=QPger$$1mQ}N$1VW9SXi-FsomC&s z1J}6*MEhk&u-$>y0oZw4Ly$o{+#ZkfMNJ4Su~`@FZ$H`yBn?<+$2A}vROX7|pakKfWQZMJ;XD7sjU!)tt)=FU3`$d+XWyj5yZ~2Lt_Z zN`UHTLB4$-p8}fo$u@h?j6n_y3Hu+Grs1+S1jC!$Im{Nf_mCj7g7jdGAL_9~Z5lvc zX*urSLU4_T7LoQUyUQka;aB=%lgWbR;1G1Qy(x^?w2ur>OXy_L_e}drM?8uylwN8C z`7|4%kIk%8mv=5Tpkh$Z;t{pUuF;2+gEd%W@w*{mpV&ezTwwB^UUuWKl+;mMb$1MD zCGJ+urWe216{O#o$`>2<-b^y$iLD^}z^L!V?75PGz2r8{ofg#RPl9(42f{Qb+rZQO zOy>`)!t{l`gFhY@zpe8*rT}Z+wqeb%m-dGf&H(;rzyp)A5MLbSCj=2Pel z|3M)|$<|}oVT5*T=AQIc?~=7^wmQ~N-gxBSF)T^{=j~Vj@7#Sg{XZ})E(>J~06|*O z)?YAc3kjYU*J@;`DbBBC=*eI zVMf?eWpk9#3{=HlrXUAg(i~-05=ZaXeDur^j>omus67}7%^P>P+8#QEY{#}z|b zCh;};U^H1Hchs%J9nDdO%4K*x@L5PVoL*%xj6c`;q8fQQ8DD`XPTCZ=o2$^+r!iNq zgEPEVAI#<{VR63FkWrBK2Ef5R;V$y6@qrjP~gy~73&!|tA zSD>Z5^gg(=b2vu&UV763ZvEDRghP#Qpvf@D%gRu7nc-*i zO*mtzg*UW=0^=Qu(aSJ~r)avhmp&^lB1p(=d*i>YhJ86$qIl98q%h24nqb_K3g~?b z_*6KFR=zhF2PMw=N|Q>-rk#&=H25ouy#vkkbqyt_!`bhxdmt!rxTX$7AJiJL>gS~9 zq=E9vmzA-qK!X++nc|1EZO;zIjJSHr;?T0_RrUw9@wmEP_u~^)`%xMC8;HMSH2~GL zrEiYcW0xBN)d2=a2;uLtqwKLLzufS_(_VEbRi|oyjB_qDeV_?N5*|@Ca$9hGU*sW? zA3<3V-OTvCwnvJDF8PW;r}~ELXN4sWh9`|5RbAa+9+UVDO7e0zE{wiGTGN1wto_~b zgdEG;+{=%b(HpPamq9Af+a_w50SWDoj39kcp{07&-L^|K7r>kHgS0T)T^B9D`Ys8y z(A*yjKe}CN(50$s1M9Onc>lW~QfDi!^IolSsO=Y@=i&;d3kWd;Ee|_O=JUOpX#8F0 z@73%ThB%gXARrH%8;)NctFrsMWh{^K147iD?8ZghN4t9YA03KdJL^zI zrk`)g?O)4DQZKtj3-VAKOdi4;b-Y&oH8}5^TpnALb3j(WzL1Y6`DCsB<6e?8q}ew! zhx6T;^OC}jY{(F5_CDVD@4BFxaZ8s-%d#WlJb zaDK;IY+FbAgGW{Q_k5n>C3`X%r`gUKPZHEMxL+*n6ZR#2IQCpWAsSvut%~~WM;TB) z1r1~r$c|nygXH}_KhhnqINzlAWru3R$>oOAr!&Ys+}W9GH(u-?On+M!4|F%!Gw_VC zaj;=bbEp1pubDx5py$B9`e2Aub%L}DV`DRj zu)A#GoNNtM9~=BqBdnyXzK#XO%S!zMZvR{-&A^tnhKp?t=U}oga{T3eIOPeeKD*zJ zB_*?Eio0KrA6uZQ4FNXkZ!H}2I|A_}U^h9nOYGOWhYw!IHa6GV`ckB#_zV-7K5_?tgH=} zQ#9v8NZ);2x$E5Lo8t);Sq{6!BZ3^Dy3SAQ3ZvHud|N@~_wWlRiGC(LNMrZee!qHE z5mN(UJ7PR-&+Kwii}pmO5Y=PWaJqH~MvkITGzmXN!zpu3+Ru~OLL5)~hT|pEM}{~b z2n(1#`w}0g?5^`4^ocv5u&)*~()K))Dh1tk9YvEP+6k(y%y)Y0`?~f$GAcPr(G+o& zQx}eCT5mVUQ)o{PVZUpy)U4I-O%%;H`QHcDqn3SyjR&1#IeY2i<4`y!sf{ZPK7C4< zz?luiPpFN#3WpomYq;4LUts{d(sr_xug9*l{N5$&c}r4M@;XJhoZcleyS{h&;?4}B zgeeM;5q+U>E1I07IGNuA!lKBSa4~=k7A{PPB{CwhoWfP>fLFz8;8le)hKhLv zM5}8dCCK#S|DoT`Q~U}rL2;s1JWts)DYI3zdRV+lTZ_I5?kRI%8toSkJbFdZrf0BC zL&Y*>qZlw2#&Z>SS`OY;tA@ZL(w{l*R7^i1*>afnxGPglSP9$G%JiNyskVW`W=Yr1;rhR0WKGsqe+avAE-53)pxA%G}8^LylNT6 zP;P^RPExb!bd_{c{z2wv24aguiTI z4d&e|7RU(@k9N9EzYUl6k|`of3`8r)EYU`EUXEk(rTJ{7G1=zNDil*2YAm&gjMz_f zA#{xb>|aXDo#sH<+2&ZB9BBL5 z*(O_`6%pD&{IEsapmnjPm(NjxcqQyQl*ATX&M`!E#*qy_e#i<7T*Q1gh)Mp!;zF~r z18jMi#5h%T^KLOt8l+I(F^^w`@l* zq1&b-&!Bnjgd$r8@a#mDf8O+1I}5)ANx~Jd9dN>9wA@t<u_XR+F5x_&t0LFg-v84aO&^f&!Z9pzu6Dzrkg9H|QMu3^@c!jVeh2}Rf;hHv$1(UZhx3Ye;&;4)Wmi0(%TzBMKiMoL`0vCuhiJr zdV}h%W_{2l2=}TX7F1M^hihU1eZw&}9XsDM*{24%9%r7?c&Ew#%b^Kan*4;ILEm51 z=S6nMzx_qb$S@ond^N;NtGt4CZQ7*HH}00|%zmfYIo`j=D^Se@;2`7jV2$=d6u;=f zC7UVZY5Akx`d@pAirX|p{W~itOKY%1_o2G{vm<&3Y7PYH@vY9R`(!OBdUgIsUtH-o z*?~Pl@gs2XMW$D+pQCCw0vHdzX%~B;E{DejXr47qKO!T(ZPWFtlhpv)&o71;yVIjk z1KU#%ijvYZjj))jG;KO#Z=JuYuuFI+%bqE_Tg}{%U|naSNhcJDB6-G5Kf!bz37hy4JXQhIt)YbFfA;OZ@Biy&CWG@4tV7d zlXYATq3BSJ|J4am4K6!gbq{jG^fN=II<#V7NtQls@wZHNkW2FfcHKeA=PVzCKDWN?N^qiW|niV6OfBejFPHpVkwW0GU?&7e`*gwWfFjU%zR6 zhCE!RP|6*u2UZ7WfN$OtgLq)rhavGYbaEIzYs8+B#rN3G zUmWpt3uf+GJ&3n6_-{e8l$P^NcSe@HG02`80>4q>55OyELq&dP$V=wQS(;Y)au+)C zRUpSc@qAs_{s0cUbl%q+Ru46VeT^Q_;R)T)^HS);rj2@o<@Y%x7ru24E{}xp)9ab| z0Tn!F$vm$`ogkM4t91G!tLCpy{yNr4 zN7l;A0amjqvuhFM*UPOeh|*+!7)We|%nw`C;j*>=zSU~-uUoAq+5eK#^_NEUL(&4I&l5(-MY&Z*CHM3AQ-&`53Ju zQ;=DPpYEm04K2>=h?60F%4k-dsJ7^5jc9RNMX{6&p42&BmCg$~W1yl9Jb)DM0UEc1 zP&!8$%HOT|kp^*fkoCUFgL{ywZlVqw9gP0YaF2+V6V5lOj2D&{VT7NL5t@zKV>t!lVZ+hU#Q=TWcTLKW5gPe8kY9e1 z>flYX6IF_*43yA@C(#~C8oQL{PeE)Wc94(QeuA^8?POLge9Ibx@>{+9x0Y0~)hW=K zD9hgm7M;+U2~B(T5M_-p{W%5g;7p2>EXn~nypzQR^bK%Z=WyGDR+pN58)LYbq&cx= zpRI`F@LBl>##oUZ2oriBNEhs9KdcTH$Ej4Elt(ZC;%XeH(sRRnQ^jT57y&?&f|w!7 zTsF@P%GCyKY(+aJP}@<0*)X)Kv$;v(TykUfty)l?X!#kWUEvLA_`f z5mXm9DZPy{Akf{}NjL$1wy4WlpcF@a(8?RwUh&xaK-iUzWRN_AaMU}u0H`hPCbP$8 zb&XZ!$(>+fev>N>)l_zZitgf9^xIDxtY1$tmGb%E7_Jmvkx2{OiZ3@%23Ai2+AtbT zYi~wd12KM`&dNGTce|fTJ~8+Sz{Wc?*M3yR@r^D)pIpJVg(ZazCf9gB*|8v3x0^LnqJ+}-esE?uhaog6SORLfs`*$pz$&i;rtox0Ghzt?BmGT3E5 zRhJw*n0;6i9~;6tosJv5{17b!0||RyCa>_54K)BNK3cbdkU)vSGFR;@I-%`2SKYfj z;Pd8Cm7BwE?Pvs1;5rMX$F$Gt!s|}AX=)JtSFmYX5G_;cEBeH#Bs?%SZG#`z4Mgs; z$pBIg#(lOpvDIW>FoSq!1C$y?${xLbbhRNIqiL_A z<>+66ez?sAs)`Lk*=cJ=4g>2Pu6fa!R%gSxT~Ra9@eM!nbg$Z17jK(t=hC-L{yz8O z-^yWYtwy!Sm-~MDe$76NR!nQwmvLqLKOI^`9e>}HKR=S-aJo(VT+y%Q9QrbXy zsmkPZtLnf$J=oyi9Ry7^VF7{^UfdO7_MGC{tNj^ynp;k&h1FMt%N!@bA^nzX*(A{f zo&0x!JHFWEM~`%iK!Z5>DHLAL9DZLzJfynpCN`E9XXEb>m=oR20s0k@MFu~+i%kt= zjCT>l%n3t&-U&jTLwTh?)`RT+2H&)wH3uY}_Ol>ECyZYgdljM&#JF|;^yN_+wFT<5r3$e5%w`1O53XI`0Q(j)a2swv0u!;X)=6tTn zx|x`3YV6rAN~v|`nDnDlu#J6%%-_n(_R_{b32TWAPxZX<$O@SQzjn>sv;6Gj>$F$t(8La2bgh0!$ zRg+D#QhfZoj6CCprkJL6*>p@>d*K}>DnOIV6171II4}?K zhT8hvD*D;Kd5_82gb>0NUY>T3K38#8$Ygf`NideAFDZT=sS_DZG=dXj+ME?nPi#tT zfqn<5C1lt4)I|^dz|M-=tfC#a)xl(rB9nNk7ZsEwn<;+rk}^>OC0tuK#*6lO3I@9> z(=~c@xT0%y6edzikD=69lw6cQ{W+$ml>r=Y$4oyH%k(Z-|L_;O)GzGw} zqsRB@@(P-3i(l(ZtH!6(eoD_+{1d~=!K$y~>*+oy@~gYFFvqj(BeY$LC|2g*pu@sX z*TVQasu)Nj34pp*hbQZ@zvR&0{9<=qyjbsItAW64RcDeGqD5VqzW-;Z zz8!F3vgTT+S~K#hsk2{eVft%L*rWEWyuu3sfMMmqXL#s2vn3GwT!-l(Pi!O&^A~7)VB|}d8V?Hsi$ey2Q}PrWsL|qj19+S_v5J)n>}0$ ziyL&8?V~r|ZE~%Tc3g8_GwI-wPB)-ke}ALF#hzsO_#crRt;UCwh2D9Qhsgn2(_BaU zalKpP=W#C|reDHQz2YSvq*n;OjpM~ZrxT`hT86UQ?J)i+=yZ({&|zE;3z=pGuFv|I z^9mQ&x?T25;G7ziMfFWMvGQXPWT#mC7EQd_Y z$zmfYF5-S}JM0c}*E+?ssU*8j4=WwCzOw_>@~Pt-(d|`{MH7@3ow8Kr+Y2xGJ(nq_ z)wv2Bo*%tvdg&gzOu$bmc@$^e)y*c$ieVS`1;wQXqzK#FmD8uUp;x1)6_2RoKPg8& z?;0=4H3q*sV7JP&wE>s}qJ@jNxXpvS_%1XMt#kRH#EQiB{{KCMAZ8fcBrjl<(b zPOC~zvBG4ufi^bL5I^Mnbu-Lv*P8Y+C7{%{R|dUefQiR2NZ)P_Cx+qtq&F-<^6V)q zz+wH(N%lRqTr`uv&4c8|O z%67Z6&%BHe(Ne>ik+Q|g+GtEa0ThF5-03APMx=?kauw~@B^L?G_44i@1&)Rz6=Dp_Pv^^^?w{O!W*(ER~M=adlSN z#@z=&V%AFX&D!*o<`;3N(Go@}EFsdo>IwSY8z{QsRmH1ds#e$ZR(o}AEQ;3E22%{F zCn&A{WKhC}aQtu{a$%lVC9=d%mf^Bj6w08c46*>Y$6r=x6Cz55EKgL&<8*stAbP*n z0)TZ^WF4=L#Tot9(W4lPcSw8V#Z;Jsz4G5^w7;XwFgaI|%VdD3GN2(dxsO+=Y&56o z!1z-GzU2{Ii8tpLnKC6G)xmzF$%Mz>;2wn~GdpqpSQ5o5rG*RnF%mp4p&jDm^Axt3 zT$O{OAep4qUsUYQem4WryKt2dNRu=#Qw9?vi`!HJ;70<&te4AF> zZ72&Uv8Q^1QYT=y>4bEWCf2ILzJbjuld261d;<#smclwf8&0oh5?;@PI^PTm^jwp_ zWKP88Vx!?jYt^wdR=`;BKjlyLCW8t`e{(*ldHJn!+GQYLM_SGyFAi%kncT%HScB_W z{c^|d#LuQj?M{(%u+5g4vWlZaG+N?d@TICP7r28A6=SQkROY`yKg)>LML!VRP)_Q# zx_rwu=!knU!LDa=>1K7QVfX1!0_53)=q>K!f==Hmpjjy`>hos(36;s! z9BQjNTtm@1^!{}^wq#MFn8;)V%fat|< zXvybytl{`~b>}GC%N|ubiH-N#xHoM56hJBXpMHAiHm&Y4E4OGh6#Wj+L{WWg)WtrG z7JK9D+~P+xyizq}nnnIWZKAAxXEj)hbrKEXlB>@gKf2WLDu-fBAjFrHS!I>JZc=5T zN~I5;)fM16&V<;PHNE&=+kHjh+Zy6tyGhAp)f$5q(Fq7)8<MR(?u!5x;4+7K}F8D8Z^VPKH(hV zC{ZaHhv6Kkkv;VBQs!W;F}!$9=Or7xLB5C1ckQKBTY|2-U4sQ!zZ94fv2T}wx6Ti5 z-3||PnZfsWWwPB|5(kf|xakkLoFGo-b~b=LfyxfG{i<7sYizcn4Kx(ofpM3p0u055 zdxGNGF1ZqNX1&RdZ3M+Fu3!CEFGxwSPo1Fny~#nIw*cBs!p+)Sa_3H37A#&-RQok^ zxV(;5tcY{^o9r|IeV@)Jy4hEIB`i!LLBTh*=J5ro?hoy7+wTmrIpot>p`)l>6gE9#h*ZcbNYi~ z8T9t`I;9b~_EXk)i2_bDizXl@KLkW}xxtUJ!{kJ$$Af!@ap)YXh0wgHJz5}qIrJS& z*W0|athex8=MaruB~!vFVJt&OoHrO29tPUE%5Vm_IneXc7ZjKw9qyk>oR4HKKhwYp z=#wR-7ogrKbB*FhndK)}IkaC5Onw`XlUakst(raBAScF>R;OfD&FK4d>8r-NI? zI@;hVyPolW22JX=(ru0LD!N9;MP^8mE6R>Q|NmSU`$mtLqCubT--9#o;IhtCiWq2i zyER;WrI%Kx3s^wAtdCdmqv~LB9TO*7E~Q+EU3+WJ@;+|hHi=*62;9MD z8_cL;=+W~{`eATF=v~Bc_4`TuKZV!>_tSjb?=cHsCT?=!!f&^645~b3vUsQ^>;-)| z#2e?kq0eC0+Wk{eFhzqbVh?sx_}=_%13*`mBumAAuSoPM3rjLoXUm2q3s=oPcHmmz zQfGZ)hp5&k#xGn$%M=N-j-^N#6;jsw{LW7|N_4}Sv{OXq@{xyEtWT()FDdQ+v5oz| z&Kbu4OHS5bYGwIP>Z)YI5dDbsKZt8O{fZ*3Kpc&#l`W~kGz8qUm>E#4=rp&$IA zX)(0 z=P2}J(2Le@$`!ppKY}unCJUtNpPtgj(eaAfKsgIuMg9W4MjL7l(MJ1}pC4v&o#eOG zARX35=>xpv{aR-lY9;Ms;ujCWsEn)bbXo=I4$}rC#6J)v(<%3gPN7XK^5ZBIR)@aeBtjRe+ZOkSgRjvm>>)fpdD)^7F6iq_vl-@QRlhV?}kIQh$lE)C4hR`H01TriU>uVx}`zMw2zi zaKh{w4Ey(MV`V&H@q=vkyeX$oDM{@U$4`h+9#viTC_LI)%LizLQ8xqEBLD#NlTNX33WLZpG z%w#b$Gj1_6gT>6u%*@Qp%*?RGjPW_CQ%U}dKVMS0NljHx-_6upt5^3s)4iU@Ss(y} znfARTCO$W7TKq4c@O?kY7- za{N217HE%sM|(})A0_wF?B}8mr_SQ1(xt*TPj6xD^0>L*j3Zp@!!?o`{>0 zx@X)MqMgjZv2A)m>^*?vb34*}4hAy6F>u;-B%X&Ea}!nbf@6#Cta_2h)yv?gaml5j#kR@VfO zZVj1eHu4e&E1lCYG}BAXE{>F-YPF({YZn%nhw~LD$j|BQ!`7<(*|>#N_U+--#1m%$ zQ|zaw@MkrjhxjUqbu;3utAp>7>iiZ4A{(c$_XT@H#VRNmqA+j#Pq8YOzNhc`<8XC) z=I>*I^pCBARvV3FoMuz`dbJreT#Q0$p*I1}wwTZdMSkE5X}$h*G|_)CE34~g#^GOr2TLCU zxyH9eyv#^z_PaWIHV@J5>fEON)@U=sl5l7GXB@&j`Fl*Co##v0*R$!x zS#)g+6Ub!p&d&GFqJ@u-!|l@4IB;p_JtPm$5UDCB&1p>e1pFM1Y)*sKoL@vK+{=gD z`fi6*f|pdBE-{yPErZ+EL92MEM-OU?ljO_M%7ayH<6Hg!uh9Z<4S4N$34cT$75I7S z$)FV0!cjef`Fnp}zK?n+$!ENCvDeE5`*#`zP;dcHeH=!j6^!9L-PA=~xT0Yq;aMQ* zD~YB#cf(^B6YrYdx7Jd*ampN~aE*isXnwL4Ts)bo-e}7ZL7oxL zmHU5S6J64k+JxpDj+sLWF_|8VJ%<&bOw1fKeK@fnJ>i88GdisntyPsGNpR-N((8=U zG_D2LC@AyKV%#7#=Q%vre31Py?OHzU$ir@8xQb(36M%RGCl)on^Zbigu?2!48CwsG zu#5ji&*yCOICk+XdNAax4O#WE8%6Ek+4-Ist)8sNx;ik1$kT+12HqB7521*J#aA~G zjfN9q0q!uxxB=;7(?Yq zg)mCTLxh2HPEFYXw7al%H1VqoZlX~0-=dN`Rw1jm!uI>FV>>9QWn`F`{AISM;ow{d zZ~|rqvkHmAnMMiEA5&dgqv2u6m2sMqa(#~~Zknl>=2Ng?8`F#9Y=K^%jUwGd+_A&9 z`wi*yYEk8s%fEgO>=)XWALL9KbpI4M(s~8{{sspA_v7|ws@9)3iSL5`x2!1)ugGMj zHjA+hYS82|wOuE_tOA-i3S~HW$amVc?+&eH(Y_=lUP;H5S)L20BqXieg%z7@U4-dc zHL>6Q%Y~u&V&hEon^_E7-XkGOPiFT+I8fTg-Nr=BaBW-Nj#ckZgqvA1D4SYDYy=46 zWD6;?AGl^}_LbEYGRj>Sv)oP21ceFfM-_fl2Xkieh;1?~J*u1hP34Pr9uV_qFXiSxVxSLMiyH+fuCm+ z0>ic8M?zoLLeC%s4qW4NCnYu~MOKwV%jWdF?9PO|X@BXI`AF)+)Tb%iNn|=UsO5%{ z=&?;>U*})!2>YvVPph(pt|psJttBi6nTQvA4*+)~%`!v}gbW=rNe~9Bt#90y!=i$=ymV--8qVxaTx8Wb6BH=}|0jO&NvadG$m4 zL1g~^=~NB}tJ?t)GTbXr`NJA4!S=B2fyudq#_^5(8jPMb5jMKTzNvCiFhQ3 zsJpLhhXnbRw>BlO6>4Rie9N6ERa!wkx=J{N{s*jqz=CY#zxm_KWe@4vZ%b3w6sOz0 z;pBU@x+o0}+^4DUO5;4WgFdQ=`G?ZUv)u8+Cn%m+qgEPl9V&-x^JHlDkjQmTZkwgp zd6G8nvr$p)Ez$p2Y(jtaS7K_^zPk89qftQ?U&T;+bhBsD)$XZE!DB~vzdk+ax;K#j`NeMw zKttGgAAiyBWvXT-hxSVT6LE1r2bR`wo^9vL`H4Eex} z-Eq#us~f3=gl#SDfRd7@l5sauqZ@j-m|+|AD+Mjs5-)d8?49^ACBOAOi%O9Zday zf?>v++n$2@FUZ@dToFv2fAn`k`2PTTYxv(OKdB;ZiYtnim71vy8dEvUZgEayR|c*X zj#Xd*~koaB-Q&*5#b)pOlaz1ksAx+bl9G2I=lvu&rl*~nCA_In(e3)h<$-LqXB zqPMHi0^z6s$aP_wQ}@7$f!yl zQBmw+ph5@fIO0*Yh4!Z)jxG+vIaK#dtXHX)0$Odj(5D@97AK86b0nD+O;#AWv0f^T z>$f=?I53I^OztX9u?uA)#!ScQRF(Yd#?o%+FG)U%qIVIX4B3&3%YoJo)R41cy^R(! zvqte?@3bVC9gXOys8Yh1_><`f!zfw@J$n6IT1SM+@0Y%E7A-B6TaM)H%_;y@6+08W z+&{355~04Zfq@5T_>=DS|=cGS+wKe}*sWZSFoK}$64Z9g-(2uRho8tNl(^KpL8-1SW_=ZbduWil-`Y$sN)UY~h){fuz?|W15+nae17*Y{`~6W7 z)5PclZetLe%N zrjai#B||Eg*W5jOIfU@D;!*KDyRWTNGCERj}}IJj-=(gU$^Q( zvU1E28F|#1K#H%`AAH-&id{UN9a>w5ui_SRKc?!R4U8{(eaXt&%Zz?Vhb}Or^OBBV z4lqP%$uoE!hst;fybz%RjmO%5(0FkXM=_Ooo+IOSSC|psM`|>oLv7In4zTt)c!aUD z$IiHa87M1{fQie897LPptj0iw$eQ)Q`);TZJu$JWfsaTpSsw}!8G)q?M8t+jREa9j z2|?=OCyq9z@Xe@&+Rzdu?`ODKOV~OR1UCPxDs+{p|7PV=d~RafZ`J9xc%JerggMrx zJ{n>70&;W+L5x5aPqI5p>@_@4#a=;HARUBl>LxQ8kksu)Hq#woH52`c6Fyw;cE9FG z6|EyJdpO%#r{8*KO2P8)92v@nSNAt_+wsH#$N*3X zx%*+HbLSbTsWTnq-q-{+dk%6pTwZ$^qPyG5Hwm}U-6g!9e(t)y>!(Ne@m8M-T;IRl z8}RwR&KJD173^*Qvex*2lC_Nf+pMK3ZHX(4mX-bBzHArIDsDE%TxzF?OhGji3L-Ht zkwl#rl0GaU;`nuD334*3y%A}*NC-hX^j&~Op@6d66`%(2*p>*O;5Df z*K%O~c?hZ+MC^LA=UGsGxhviqAIzS0!`h@3F_B)UOvD7al^>j7G@OM744i1e<$udl za0heKoi*^BFybS0wZU~e{8#&d?sGM0X!1S7Sjo{t!=*t03~bO+ur-B5A3D~jbI%Mv zcOl=s!+ok8Gek6E6eS6HjhLP6Kua=L3rd1rjj0!0V*Xf*k>_yn&P*p9nv5duQA`>N zB=GXH2n#i$+*qz{UfFP?5N97yzs1UzrPvIgxw{H@kEJ{n(XZ1wuAZxT+ZB9>mFJ5n-u1Dg5Q>(X#U!^=ykF`HT7N-#xZ(Z>+xV1e)Ba)inxdT{7%B-d zN=M@A^Eq6WE3w9AIAV38Pvp(1nO9Jjt(7IB9Yxw2 zp5&Hi4%k>*9he(VR)6K)mJjvGCm=ivvSBa1AQ^3!m_`ps|?ljG*!DPrq<+u_`?G0v-~B>eHK5PQAq&WmT@-(mK?i=qjWYAS*MLwW2dE0aDQ z5S+D*^y5^yx#)j0KB-HVyY+3fpqCj9;D+Nz6lB37bJ6H?#4N)5E>5OtWi9FyxN-Z9 zG##YJssBi(XpK4h?qQ$_=-9saTU~jHy5?Ioq_X1|)4-J?eozWQh zAb-20?b|a$RP9oYF5jvg_mYdobytgCw=X7WuB?IwrDvq4O;vaK^9^P1zWun@{gCr~ z8)nm%s%w^SL(W+^!WFBb@1+RepmWaD&F(T1eh0;mEhT>amvtNhb0z-Jw{PFV|E;14 zzTthd2in@unOPf|83G;t#~tWO%LrfuFwoJ{>FNUiyNvihSJ0P=X&GtB=H@#Y*_tuB zX~}zOstO6I8WZ1s@}Qi~SL0Rg$5DJZa?vt_2*411xTV}W{B-yYNWp%d@SEwPy(GLF zmAJA57#s-1(v408hBXVAHc?@8b}*Lwdh;ncf>M&a3n%Ks+m(AUv~@#`DFjj?>C32> zctV8hXb>#O$&^UbX!`!8Bt=5VCSAZT{B@qY_Oi_1sF~l|`i<%fr7RTJNC7;6_!_*7 z^5>+q+&MT;P8tAjCguG{SrUJ#;u_360B?#bJ9)j!%{W(h&|^-c2qVf#34lB!6j!T*+}5QVIpuwYMg(I z+RwH{J0}ex$BfDs>aOMw?ICZ@Kdd1b%`d?^h5zEcNSzf7{)aZcKmLQrG5NpKW=cLI zEi3(Bv`qB$^ehp_@_vlXasoo34R8GXM~vw?x_v;Oi2qZq);RvHZ{ zdW8v=HM2L@DJvcIq4;L@77fv3{Jl{%*P>e@?9m9!5wt8mXB|@YK_T(w9Zdo%I$66ytr>G@i;)`A#>B+zZ=xC&+{!WTN zJ=WLRF+sV|EPt=k@3aB@r@j>I zfBDh@ssEoC)6)N8OzZsqUm8=P2Qm5||7FNd_^ShLguZ>dclZy&((J#J9@OchHviQ6 z<&*Vv{viWJ6o4cEMKpwD7{M@vMJ!UlFccD!@VhWUL4vX{g3%~JVXP!SL0*E=@PBXA zjWZ3WIF6s$kGYeg9j4s4365{r|3`=OnA`i`#WOXHzkbKD{M6R9{Pg`FLm}W!VPC}hvSLW*UmrOprKys-VOCi#%$k8DPn6;?K zTJn_ltmxS95m}{RwoeZqyPg|O(DN--(qa63L8H^Y*%PNweR6P9$zJKek-E&9@-NSb z+1pQ-n)t_koP%{+^S>CKr%)nd?YOL!RNd_D3My%94tMRgKmD{zwxl2C*}*~dQ#014 zuoHnxYrYStoPm;9Ne2WC=!;5ioTZO|D1&dG00yvR6REk<&7-5f_<}MxH0S&c&yd^O zJJ4rL*+}VSL4|_<3Unt$70ZQ?msw(`oENgyKPuAq2;$}jPdutayBD-g!7XGdv7)#T z8x*FJDlQo?)6{?^AnPG>`$Kc?7up#^%fc2_cQI!$9eP*T-v@hr-lib3>8kbsm@j^vqS~e+{>ws8flKp3dnD5W z>^3&m6_MT*zgQ)m{jtegvoZ%n*v&9!TMer1@-_mGi0aWNkH%Zm58RG zN{==#?VChB&841~m7VD~fNFRnr^nyo1DaJqpncO3pf0@Apxu=3*X4D?xbq+02wW0Z z9>JDSU;S=JaQ8Q-6WO>+z^Kl&_y|TO+k$^impZ~jXgXGLpLLobq87NvHKn;rj}78O z{yhu=ktJcUF$VdduYDX_RWQCW(%78=r2W+_G~V&FjuCBvTz>7_ zF29>^o8H4gxN#%nG0Hs`#eM0?krhJXC2F1~V8t(ej{;n@bYe{pHA!}+2{F<`{1WxW z^My!<>YG>V9SrF> z>hiXNU>Y(7=|K4ejF?i_6}vX!Q&I<&i~^w;7GxQlz82@;Q=WE4Hb5_E-IQCrmwy zQ8pb>@hu&#f(wE%E9fUV&3t2&tPcw8q9zR|b++}|d$OWgdo+&{PSI`WF&RZ=N)N6d^jq=$s-ROY99!JmOGSdOwKE#mg61(jZeOTd!(ds2pi zsNcpeMX0$_BWL8O7HSm%*&9?=UbrL5Sq8_2|9HZp`E^h^^|bACD8N)>u`;mTR=ZS5 z6fs0+Bi}G(&o`!Av~Owfn-=J(VTyx5JQw&OXRK*# z3wH~^6OxO9V4mXP24OS6+1x-0i^w~r+}G_R1sl5}O*RXQXS0V98!f}t7PXdT7Vj`$ z_5n&Pi%bpiFiNUK*%**HWlWd*z3p}oyyY$#06)UfxN^u* zIcr7^8|c*&XFPs=XH1CIsjVgCU3m{gfz3#Dn5;f^EGG%UE)UGaZ2@VED18WFs$phV z{Ep{2hCwrN%rLk)`hW4fMzjWtC~u|HR9{zACS46gc2q^|nctg)HX-!$_*sV6X%C6l zM%^H7wV-Z8-(eG zF@qf>YRC8fi_d;Mlrm)79~{EL`s@f!h~Y9~)yX--?b@l&%b{SPevkJ+yQq4r| z%?@yF7WI!}WBv+T)@gnE&bw**%#lriy+0P#w)ek$u+$$|5l}Fs)zX{e$&^k;R5&5@ zcqLhvNGeOKI7EGr+jnlS0>#IvtS(HD2?w<8QpM`@7o4Fp!|v>bDyPE3piBF$ zxnyGr7A!PqHcb<`EcF@u&N2(Qr+K%-!}w2zkww)ZN7W|swusNHCOyQ4G0-PD+hsFQ zv$z(#aRkdUq1Xt78{N_T{4a=;Jg(U-9L^tRk7U_hnQ2};N}~-I7K#=hQMUgIT6g;I zfjMS8A7i7rhe4{vEC$c8QX=%PDDX(iU}V8UMa5S4Qcd$4;)#@cX}XcuWo%(Ml@>+V zv_B*ks}0x>+oI6Y;urNmz_eHm4k*k_@0(QufJ(Sn7?^`O+@apcPTGRO9(cm`i$AZh z=EcaRb1Z)h&Cjd`3bKjp3fQ#A(FVpOfur#JO;im1?q+-0z@laIPtT=GaQbyGnCLQ9s2)e;GQN?%LqCFJN7i?Bp zP7(6oG&qV(%+dgBlkt%`ALHU@t^}HD*w7*vbL@W4THGg2 zKjrqvYU4AEQ+1j)Ce{>a7VXK%ErCmo*6P`=7Z>6>`t}hx=pf?QJLf52&N%O41-_P! z>X=^WudYYUFIZZ&3ajScYT^vbE0RcoI!*a{#Mx~yhh+(YR>RJx4UP$d(mbKpZfYI| z^9^OzM9pzdRMX@u196S)7Er5g@^!~}f0&;b6FwQ91E*FjZK5>ab*^}$ ziWIMvW)%@MaNTu~V_8`lc1$;McQ=`e+5RC2VPf9Mt55JDcnp${<&&x}p{+Z7%wGc4 zW(seS{OyDaG9RFEW;%QL!`2tW5anAhaOD(UbQ{k;KxLoB(EvLJU8;gUla&}fR!n-% zYYe_5_wc<$6f2JQ&~tETQ!4+vM>sbakna{UR&_Se@{2{NG2iqFTq*5UafHrxLjLqu zU!CPsl2-i2F@s>6qV4@q$6ZMafMyI}kb{9gHa^GOumtU*#ugNY5nVrkUOFQJsI${( zr|iIF*-bOmUlOK|wN*h!cgbn(7b<3uWl(Z@cA4R?i#CXwGDv7M)l{BOaEt|CDrC(h zx5#zo`pZ;djq~JvvS3_EcA_9%56J#u@({uL`IM7+F+0k34jV&)IrH!@u?zq?q|i4t)@ z#QIeS$W?dvwwd6BL%P*r=&0Dz*?K%%7hlyIYqQ~hA>I}xH^?D0By@)kQKg5eoOB2e z`>4@Aud7>92N_L|-6pN!ksnvsP}y;fPbiIH$qX$&^JV{D(d!^p5sDH_ReLCJkb zHBD%jr=MCgXLgOuvTf-t$|gN$NQ%jqQ^^njg3#9TSm&9mdrg$H&<#s6P!5SrXV5P7 zclB0pk}?m+SyTBAXP;_uwfI498JH}E`c9QZa`K0AEUrT*tYjrkgy@0&2e!|zBPsJ$ zczL&ql@TPgw@WYHMrM^ zR=6GFv@7Q?idmlsWF9!qWSaD7f0S;p&;-dtv7nPnW%5zbgLkv(z zsjvFUV5+DE$X2lK7(WHuAyBCi*+u0jKE@HCk5d@7*brT(t)o~a?S|=Rs-&r=ZnHPS zyu76QXZ6>@n&iyJ&X|2nJ@A^@#(Rf55I~QyF$BFP7`e`|kH$H{LIbBEEzYg{7f%5I2ZDxJ%pm(Caw@DyD3>$PolEo1ePydBzg_^Hl`L*Mz^lAk3<=j$Uo&d#+420 zKVTe4$M%~qP7x_6RuTGE{&C6JKl#>gC%Q#jddmpRH25UQmE6PQ#Cepk0-D&y)Ga== zeblhX{wY>aO~jDS>){D909I*$C>K}RZ9J4SxPnVq=9Vrwz`UA7@)5(rHZr`C=`+yx zng7;2v0ouu?2?zz;|Y$Mu`cq2n6-^|5EBZ_e>0R(2UZE|UgH1CWoS|ga4NGKTecKc zR6b?CjQ~ZI5(zrI=%f8y`-?i_0ujjC*TpsJW)36WUO;q8W)_L@?=P#F^Eh_b7J>O~ z$`rI}FaKC`^5R;=6m-fh=Zitw>kby*pqdF>RzE2+f@+DjHWtQkZ-_VYWZ8I4MzZk< z_f=gTZd4L+UTbYmPb;F+=0pPi;AJ>Ho37uftCSmSY$L5Dhrp59lI$kHJ97SD;uz#G z76ht*)t-UOj0({NIb8f*ya3EEnxwYN^G3$Qj&wj>g-)c$xki1IB8L^eiINdYY0JQAJy5^h4&pyeIEV zAqqC!Q5PITV8h;w5Q%}Y9(Y8=Eq^Wn8@^`QCHAFL3ae02Vw2|Uv|6IIr!-^li7)H= z*SZ+qiCq-I`n=sU)e%Xp_ySjeG0spc!vy{$8oOlmYb0Dtw$wIcXQf3#V6pXK9~&1V z^`ux5ffl*$4IB%4V%7nQ*wm^_p2)Ka2PAl7-l_s;Z#buUx&WSgqqZZaM zC6mBbrdeTbWuX_7-GnJPz+%>~3>Z4VcMPC%Jkqn}3zi&J&k|@URWypUvm`K~ggsCL z}1R@_4Ex#oBW13t|8RB?|F4S z`nOL^rmI{C;br4?I#rGzCM5lM3yNqbC5@MHPP{M7_|)@vHp_i6(g#2{OFbu$IxhP#Q0)GctMx{Qg{PuAuvm>WKCnbtQm}_;1V@U z4$;IDWH(D0-OEZ_)`ngm$H5IA%;1j4*{{r;ViQ|?gAy=TKFJ_uw)o{9`~YR{pzpAPk$#siyit6Gy$NcI$e_H;6yHh{SZkj_X1%(0LC*0M)* zC}TPgY(p*Di!ZTvzptd7(n@TzWfQvpBA}-D+RVY`+sb>z1L`5?OAD=t4 zhfB`WY#Uoa-*nC>XYj%7TUQni-d(!z=V+cfQ&q2jO_fqEG_Jh)eW8uyB9AtEUI!t+ z0_j2?aIt{(u={=jCBQP6Bw1tkt+bC*9w?vH6V;N(c_%ZN(J}$>@eTI?*tvuw3xR~S zjR*xUbpmKq=!b8=gXC0qLRaCss?QodYdwW%!Vce z`r(bZ=;U>>&wdJyS?A7cMzX~me*AP3)^O=+Z|dJE!QG$fB6*TCoJC@}j0Cy1GLYHl z_V1OYBZ>eo@EbevBqN@A7KT&vcxQgq^KfJ$$OBvcRCb27F~-xl)GRW? z7j+`B40)H(Mura1gHP32Mxov+uMv^iiBp*;KZR-KTgM#S^;NWqP7#G2I5*t$llHN# zgewK*cQgXuPB;TKrJSFnPwd&(2kR<@2Y+XoI4(R_OW;Emq;X7y&EtI|Hti4zD%&7P zJF?6jYdi`^&0q@`s_h)jS^OEcQYx^XBH9X|G#}&g`wrvcYV&7*`)D60r#ENrimdr8 zt50bW+ryAN2sNK3&_yoG#tFV3+XGYouSD5}fE)PbuVg=t;=uMEomspQ>!iC|*zvoG zM-jzNYL|5iBTe$P=5ct@ojVRh$w5`hmEa+0DO2l^Jmh;Qi&A^U4(0U@k!D@zqa+K> zd|>k_i~ywk@vL^>RzU^!ky=V%Z9Lg%em#y+yE|VK=;*exWS9F=82w{0sgvQ0As2Ve=gT9I|O`F@{Y!!;3Xnm$^Osct=vC28D) z%BsXs{sWa}oOofnGltfEH6YQ`aH3sHAUAJDRXx4v%0g>)vmqj*Bs>z4E=@pcL^ zKKM)d%tkZ7P>iRap_n$g3lZ*VNxE826umJJ#~{N0qT zwf>b|PpVA4Ntzr2Tr^un-4rxTn{X-EDOe9{->Ov*C#`ahhi+Q&@IiqkEH7Ee>u(7k zh-`<{)MST{MAOM0?Ug_*qu;*Ztv2GYKquOFeCWH?6r7t=)R9B!518j?K0pk4cLwJA zM07NT9Q{By0{Lng7{;JVkBuNK;T?LLN1qNqV>9zE0+)aq^6H%2WQQiDWUV-n6IZ6akDx1HgwPijj&=*|#T%nwLSJqqLKV_=S z64^>Wvu3PBd!c~~7*L0&ncNMO+ZcB6L>b3ZOG65G@98bD2FXi2lgWv_(7x@;=C zFmL%xRFx=-a+f*qsBTtW55zj0w9Yr6myB?%C95*9)IKtAQ_Gw|hEdo`(M8^^G_=29 zNXdkgin(#Mbvef+I;I3ZutC_eTFWG>b3@}W<3OWWGvgp)++U1YxW+|G{VhX%UMRUq1Z9t}ecC0L#P!k~(vT#g zs~j!na2XMr9Yg@`1BG#CW`t6i5MJZT{ERP8Hx5C1A;H{>9(k2XN*ol4Wdh3XrnYcl zOU&p@p(A{>g@e{SZ6geAxM;9BazCB7B{6gYp zBJTmM(_NjJ*m28fp`0jp?x@X9hiHdm*D6t5nnIa%Iy`R~DV4L&3@YTZJq8z44;Fh& z<7_nw56qP}F%1zpo++YQh)^^_IWyD)UztpXeY4WxFv7=-GK`_Ne>CKhmMd-4Wc2!t zaQ(WGJ8ue~=nV7lEe+pg)H7b$DM+IsSx&jyhN(0U&vH0m9^|4U8dd+vnz0(#hAeOQ zO}(DN?x!FvWI(^>0v(87Ja^eve6>rFDzUiN7#k?V`2)GC1zeuX4`W=uP7@){FnzSB zE94?9iNMqm1O&@ivKR}XSfcv(4FO^CSE+G`1@2HHZpZ2B!)6V_NHT}GG2t%J8^uBn zrKxkPd0EC5%5gE)q11i}cEEI-|Bu@3Skps+2lhZ3r(S&p1ef>iyvP6%P*QgYK^e5i8=kw$3s}K0k z?9X?fM`RaSGnOu#h$BR!eRk9NSH2;V$9ohqeW_!;eFwy*Em&5(MVFLBxA1bonJtrCx=$F^ z*Y5PLe|MTh=R<@i({$3bLFI^e9w60FlX-p<=^yai-IQC6a;~q>9-#B5)2$Z4(lwu^$2pC1v!(l4)oQ_<8%&oNZ2tDJXVgre+>_L)ovNx|chzoM5X~Mn(yS-7>_AHA zRo_FeJV;(!p*mNHTTS8BwlA6-27B>j)sF42p01F9A7&-(@#rB#EGBu|UJ8u&;`FkF zGz6>ZRDc@_bSGFS*2QcZWOYZ3E(v!--A=9BXyQH+($5anjXT3y09LKXRXC7f-eh*L z!Mbv?^5~`%S4iquYbIZ4=1AV$7{PI$9=tIvuijI?6u)JSH7~Cytg!V@pD<$9xKyZB zN$U9c7%rb1VzPB5nQ4BhSx&D$w}H2%2(mk-y5m;U0C#T_$TEA69j=96W-*$+d$=6k zqIFrrTv@|&B}Bo;Sl-zdTOgyCuS`9}ljX75n?YO&80 z6tbe-w2JsJsB7VGfK^X}8N9~G9%267MQC@ntUO%0#Fw53w^dk=K!GOhR1F+mqz2S8&^v=54q6SFu=oF#VTTWS=TBLC!TK+9x5@7Xh!s z%mWU_7r~tl>bUoB#@}~K4IPXTs#4tr-Br(@b(&ZZ}l)dHc`GHa`vD7u!vR?D}jG)epPz%!CZ&%2-cqj--{tG zc_AX&h~>2m6)Yk+o$rCmdJ-l9R#d02`qB)=Y$g>tqu|T-A_)7NE<#Q2@OxE`JxtZ# zH!!>&$F$?nSgjSlxRMrmaVn&IjJ(9%F$>on&nfG5y0P?yn=(M27fnd^M>e&ezsw=z zCA*q;mXdkgA<`qNcvo2I(gVWcvbR`$46hYwc>kK{7Wnik&cQgn?JB?X*k^42_36FJecNNZ<^*~LaG!_aFkI2* z%6*|1rZ?9i3pSsU6lBsL9i7wdC-$XD&q`yEG{o;(RWLnI=97?__O?@$aRwa{=DA9f z-?Dz9^ZC*}q_5=Q^`#Z54rY&%jRzx(!9@MOB*qh_mq`w?5c{whar-Q}hkXbC$|QKf zN~NR=ghQmu0|us-d?=#jJP7MocS6-W>POb_LK6Av zwx6hXn3##yJ29grKEIas?RMVihsTCU-pN3{0~Gu(;J&6K%(#u>pe7ninF3YTGuuEX z#NAzZBy7_@0-VU((qg|WN%i*Jr3>PL*{Oo!Tk(b-2E+ z&kwc4M{x&i0CKSvJ29P2W|kVb?N{-}KypJ~YB}DmaO%)r1JFlG1J|v|3Ly6>>~q67 z$9owf)t%5TTz%AXv#a+x&#vu|(`Y3sb5!LkQ}2?HQRHd#w`a3){_|l3Dj(Sdy9+`` z>kAFk)YFT<^_;H`YhxUzY@g04#eFVxc}A>@_T~a zJC*KBA58Zkd2{UIB^8Qo55~PCQc#e4wJs)J50c)+Cxd5gX5bTrTV;q)Fw`NMRUubs z9DE`KQt$`APqg8%J6 z)1HV=wBx$*vYf~BZ`(p`1~Z*|nPXl2IVQpS%vz|~Ox&HzeOdYuvoRi*4C>o^>dAHd zS-LdhlJgGcEdnlHTyw#~Ad~iDQe!k|XcrEbyd#z=aQ=k!fq*azUr_jv4|YUWe=1of z@u8#ECoIY6m}UZ7hY!v-6jzKW@WNI-i>ThhMEL4|-$BX!byXnTxE+I0IRh6i>6&}r zoI%ZcHQX0`REgIoIrW@O`?{%SD^ynb0l!yuP5ng>eNiOLaUWe%zQ6VD0~_b`S6p(s zw^**t-0j(&tGA%{QZ!Qc#8%)Q-JoQdi+&73SOBoPuC8;V=_OW`8^LOf%UkE0qJ`by5v|(NoHCx_*!cMS5+}h|R^|B5cGu{w;qmG2@6~0}Yb`}JxR9}D zU7>L_sNUf&LL$Kr#S}9bAAgxZSiiTHZwwDeia#)9dAm*pSGtrMZOt~vB2ZnMn)XWN zQjbFsuNApr`o(F!zKL59Y+0)4pJN^6URsT{LZ$T$8>LU+N;Xg1R!GhCEsJ*(`MX#noD*{qeX-RJ+zhn4S|XHo_cyUjMHHpu zH6UU$N`4|YzUgaVsg--X|J(bs_>Vx#CwL*cLr{LwNBx_La`TnJ^X@3zu#YWbh^>Z< zmfS!GoB0#9^oVwz&p_!bLp>j;$k$ad-iXz>U$XA6`-|jWz1REMn`&xaGBQz?6whcv z;HJ%BQBYgWs)3B=8M_s=*$6+ess>S{JF<8lL&)OG1f2xoA1n9K-|;dM8`pizOHX=(u)2ec zBX)MXe2R27suE!yP*o*Yq$cKa7SwQFKg%Cg#j0`(@3K78iAF2EB2lb&OBwwiUM$Pg z=cO+dkHQ8=v6mMAH|pLg$kwJ?)T~vjY}>YN+qP}nwr$(CR@vq%+qSy)K7Dq?{yXA7 z-|py}zL*ztUS*7YXXeN;@_n9O;RgG;bETG5^Qc~YhmbMpAzMC-{*JqrRNV%kW%J{c zVB7vXDRI`SX^+}H<72w(_m8fr2~zyt;LWB7W+(+0k>Q4t?Rvpu z;@EsN5nfZghz`hH7V^525i(|OiyDNAi(37m)SrFHcc_)%ykPXZ87=slSFZXKyE_tj zm{(9gmP@t6=#a`KoSy3yKoeK*{I`|2Uo~vq$ymNS>_JN0cOPt1q(HNJ^BipnH_&f-kIK zb(eZZFEhWILL$oJFLt%gqh2;=%a3uhDoHjQKZI;x*+@s49E>eal%Q3KkPOzbAZe-W1X; z(^=Gu%9X#NvrNNE+!RvFlR+RGiT2LwyGoRXraw#oN;GceI-{~wbjCXc)zP;dr3#}$LTWzlh3U$O( z3gY4s-&bHyGycEcX;II^&3eJ^yYp_3hzJ2PBg6Qr#@Ih{F z`~`d15;*+};q(zwdgD@F4({`BdKK3CULij5zQTJEk1g&$`6r#C_Q#B#6O}Tl&Y(## zwbtcg618#Dhd~s6b?)vV85#<;`3wn9k@|R-88J7b@k2J65(pw;y5!yQ(5Sum0D-#9 z{8F&W&UXlt1Oy*u}H>{s($yDn5Zmm-qLxlDT=}F=ZI_g3a z_ks~b@i?g+Qw18x5s|tENlOq@ZOj;f!wsa==)tv#Vp+1 zhB-XjhRSA%;sFG6rLKJ{;HCm4LuFwTQo5zf;u_X;(Do*dA{i8-sR=eK97^Jl+6^@F zl}D2aW>LOOGRBJH@j~iuqd>}dH0(6yl-Ak}c>07*5rfH*^W-x{vyut!k!n&E#P;`D z3496Dmj+>yCD93Sj}(K%R8rZk5)xCHp-i8G_7vF2Y5@c3__T9Vs*=*>HWKjQ=n%#Q zNY2BOIQ(yulkOpHR1#xx&}U7=v6hTGZUYFkBeYSX4(kV zF{92e^Hrw?%?yI6)k(yO@-ea+ElmFNN|Y#rlgcfQ3lpI`JonZ$Qc<q?s4kR0#bGn-Wg+!SNSYYOiYQ?TZ$9Ou-1wY?tD;~j zgNU}Ux|x~9(Ydw+w?qh$+a{6hxD*7_B^pV2i(=_}WQBqv#5nQ(icB&4TXYnz^m?K- zGeY*d;WE22;wnLdF9XvEIsP%Q)+!D3)E<#T$$1ir>M;(*f~rW@Vd5|-_xwbvbPU$1 z#8h--bc@Z8q9KED0YchcZ0F(QjnRft>xH3nVI2rU6%B;|sdYYuc#Uc1DOok6mKsC~ z?FNNb{c@2;JoBLtC&h|NJbazZ*8+`OUs=stj7vmZgX~+;mz0yClu{70;ExI?L=WMU zbRMRj1Mk%7M+za;VirmmMVQ_-5~Uw7pME97=NUWl~$2 z+OE+@f6f<6Ezt)cznFpBITU9tg{XWA7+o-u?ml zF3|2uhpi2@ey*f^p{vyIf#o-Dsr5D}4UBI&w_U?pL(pYvTeS)aUv~^h6msYoej?li zO5YIDCxK6loR*r5-3!j^@S&~OyZc^$OIC6GnF;$AFW228(?lG~yQ>IiwdOdv3&FR$ zm`6#~tl`4+2}B)@<+tB-Y1}BJrXbnd{Qmde8ary{6+Xx&tsQq`<9?mHpmJ2RL(J6P z?xcuYs_ddXIu3}m&PC@0kV=s%no_j{JbE`?0xLTnS9m3Px?G^|suVK*QMCkf7SUYR z+yO1#5BLfwDb|tAn;srLH7=j?Z>r`nm|MioGV2=y^60DpORRdr1`}WnTgL+hJ#Uq1 z->A3rZ2PU>K@vSM><^^tFv^xh4Ls95d(cF~o>;N&k87y6UJdxSm~~W)-q5)CX?Mk? zy6VqPpGT|)d>zc~EymgHsNO(=xLW5bRxch^#1hR6j2YcZyHg5WcRZOj@<%}jHk4n4 zQ;}at8NUuhQeEv{wTd0N5mVz~qHj64#qw6&bRb_s20sAcZMCDG+KtnE8Sp7 zjsF1E@E?;=bEoUxD7S=R_wHhMb{&%GotHQTrfVfXSnaG6308RO^LO9rr0M#Yme_rS zbt70mf!1Ti=a5pXCZ#g_P;_a7e4$R%P7Kv%`ep?%qv`}w}`x>BH+I7c76Y}*NI4>tA ze+NyH`iv@lw%VC=V)}}`%~Na6$Kc(g#eN^gf}K+|bf}{yZIAGCC3W|E1Vo0k#_@(k ze}_%R@L809Z@9_REVy-p-Ut*Qg_dn}l#w_BtCnvPK{+lr)k z2O)a+(E1&nVC-7Z+S)ClRWze0H2aplw_tFHcSxp-Fh}c~SFVfZ)A}#`_0F@==%`zR zL%loe(z}jZEb)@q^$sGNMPqO9AGi5ivku*N*au0j!$<2E5_exR6qj2>qU(Jo+pvjd z-@;t|v?Sdyb%-s{i=n}{`xAuTz?X@*{IeRUTyh3+cf40~)UlI~ zd%Zw$7w|3z2L{?$#U(tA{5=lsyF_crkWV9r<@__+I)p1x7N7FwUm6N-xdv?ORl?kP z3yMj1+|(QCcOz|g1JqunG+<+2rK={i7$)2)m*q7(Kcu%1A483rl!BLAl51s@e2`;p zC#4^~qfpLm<8R2<6^?6TE(RM%#;lj9lppZCVYYhv?rH7bnm10b>1-vcqB+S1|G*+| z2fV;e<1CTqm}Brc?1q*})~qGPr`#qOX}G0){pyr^F?h!&;|=fz0}VR=Nh(T+w1(He zW3-y>ELgqPK7S#_#+y!cZ5tr?^6i0?UZ%n+L$XfB8^X*`@$|XUSZQQ6UD?KW<5nvGl%=~xkRY7{4Qqk9%CV|#V7W*pPx9^XRJO8`QWKofmu-0_N#@> zH11t{!z`lkHn%6pQWDhsCk_8`_z^KKD*~OX;gXQ-xHGDsyxn|F zP&2n1%DZg|brt-WoXY#5lF9c}`vZJgHvWMnr6z{R2lw3{d1(g8DvYb+lKLZwndTB; zB=sIt3=4m)^^wC}GFQ>K%35qQy0#%o8tfD7Yry&TBbMDJm_DDaiGJFsx%c-j8_{RD z0OS!dU|>x9^}$e`B)Y<_xNNA{P|YPw0@} zNs7i3Z!U?v@7axa>E~+MLf?-otWw{D85h~YWQUwP$Z?BS#mN)RkT(dj_o|#!v6mZtChm(<#yy7-}N1V~mY_F41_}NAlv%t*mLN{t7 zCGqn`I#SN?oPTuF;n04ayahWZ`*@C8RNyxnqUvc(n=jc2^jsb7@_3rmw>wJc8_b$> zVA%;GzT`C4f%-ht7O$G&nGa{}YB@%Gy&&^|y7!Gte#2W^74yc^4p6$s2@}b|>MNS- zX)nx3n|S>`jX|~1q-WWxgC zF&t-k0o6Ojw7|?F24|1~1pDP?!5*7aYS6lQbha)%;YdvO>hE*rjDo^FHj=tjD0a|M zHIHwm@QXsF=iZrwu&dpQoo@%ZLHeOzr&JqeSgu72Tq}rH6whvG%Ed^?M~f%@VHiED%}`$;E%(CrMwGp`_*a^TTO>N9>bp@ zaETf~x`fv*BF8N8z9lq+jS`&PWizj?`2f5A20+DEWzTm`Y}M65ZLGWeBc_#^Z^Zft z?ox~I~o@-D;F}DV)s?)Q; zx9+`937PXm2Gpya`Rn+uk(I0DCSz@|5;1C8`7`S^BX4DlL*84W-a*;31hNvqrNQq3 zOi>kq^YA_mk0el@LV94E^JE4-n{K_hZ^-a9cTxi`igzb6^IrHt6fc1 zJ2iI&NZE)bC4IvBw49{OQ4-C>*-Q|!OZF_L(vMV{%ijKV)tTI-iuS184o}byhg-Is zJ^*o~iA26|R(B3TT5PKh@d37JuRL(GL3sl`Q*Ags0 zYxqWoo{~)Q7<|V=KGfT+EEFtzNUev<9NMVNbF&SVOwKK;`Vv&Rdl9|^IGP5DP;*Q z3!fD*61bpgk||BOOJp63*$r}BZWHX2E3{zek5ZTT%d>gtGYxu(x%B@zU1zEMs2aO1 zf>S&}V1DMre0F$fMsbpCwb$rZjCCf-$rrOuE~%qmmg8n!UYgD7#I4X)C_ZPo*dX52 zs+zl@kpX*kJtZ3YsS5i$v<67dQ|>`XgbbSh#S^SAHHm1Fun(b0yasSLEn1He_gemX zM$s<7PT*Lcdq(Woo^}N+ZdGsgSk0^?b@MorpAO9)xWq!-+>4V#KPqatbZEnMaYe<3 zM=TnzM(jiuCVnc>%*vM?%thR&ASoNq?+Q4RHFoWX9(>!^5prj>>2qc?iIHi#j&@9; zpe?Y_eDL|+=s3r+ab{QD#YvQj5Ffvtz|po8-GMo;>>RS~CbC6sKR$uGYOUgy^mGsS z({)prRT2ZHa$MQA?ZV0AQ+&Svv z5D*TMITPpTTP1pdZU6k09$+)Qa>f1-roY~@e;-n3x_V6+_-9c6?B{+g^c@zd`yZxp z18kk+vKB8;3sX5AcX){v_oS0fwoZ*wGSZMt8+x8gB%ieAT_1_ba7LugCd{7+lRr7mX5i`aM!4sm@-F6R*lqu6}sEx zhGByjW+Sg0-WQ0DFL2u8_r56}Fn_C}&hfu>g`NnX<~2b!d@F{zINtMY0X^c_`NZQ9PJGX^zq6D<2XDrF_!#Ts-vp&pS3tG8ibSiT*0&%?w-sw@yU!P@sc24#0l z)Q6NsVIR9kr;b@sQePYGt7b)GThB_X7w&Xf=tG2>W;B78i#DiZ)G`z#rEGOAXDUr@)*$Jq$qflM*rnbh?Z+$q&3%f?SmE1)-h88(VauGu|yY@Er zFcNohLU2`8TsWOUZJm|L)rpam~NpID!hwKM6 zXel76OmE5$FcN4P5r;a+LTu})ugVRg;YknB#*ka2^hMs*3i5fsrjBvZi*+eqbpJc4 zP5^1G%l(HSqXhI1FeVoN4r5}R$SFG@2Ox4?3~h!g&IFGLY8Zt-jHYx`tpp5opqxkD zktPxeImfRa&D%HC-@CszXtAf~j}zc!G=?& z&4RT@;8q^%3G|!)$}4sVtnXKD{`<*g=1veUiB8YIaA~s6Q8kDDDN4%n-%aTM5VXbf zU!W~D&b6L~va?Q(gc(U$Q%0 zKM2@V1MtvWz=uH?TPJuzNfJbv9;^}_kP+7+{snAaz_J-k@cJM=v~wm)gf#i<7@WNf zYuOi9lb_$&@n1madcq1xL`wX6Ozi+z3d3@;`YOI#vHG*0mub!F$Wp&YY;7x`Gvx6Gzy{l?(hO{O> z#Z4xW;55A`+l&l7w^=>0FxW0M1n~p@Li$_(AW*M9)$oK-$Q;Kn#|~f1mH065(#zML z`|OQp@8eGAXWPRsYFXAhlc6Y(XAM#rjc5sd2GyBg>oTxd-w!L-+ZX6&e|vPiO5Ct7 zN-Db@9(OyGnIRX58@0-$k1d?*AzLmrw1U8_0K}|idOMjNEN^SE?e3U;*DA1Pma8l_ zd3Pm)T;D#LJ(`u+WkkP(=4D4ls4AsIA1(eA6$UJ^sjvv{&_oCg2ijUKLa6v?vLVU# zl{29+2AI=fK$iCR8`Nk``09Z*!%^iZJq%Nc{qr>kGpL+#1EFLt)Md_#NIj zn+-Dhh%Tn6f&db<4b1cPJM3xjHM=!^0HkTV|XP2X!Tk6`me?u%BrUu8oL>rYqPr~ zU=}CN8E*IPJ8DkXMkN1@;V+4J$#ol4ZVK;j5oX(lsl5-+zU1m$4s!vZaH9C#CiO#8 zacCLE5XZ+`0?>>u8zGv^raMuG!;m%i?D}>4W|1Fgq+ew&A!?jep%NchOPa{@*scbv z<(lf0MYWMK-ID70&3m$?bkAeT3_OrFY_$h6CxTq2?+yeBRT&v<)VZADUrS_{bT`vbScL#jQx}a#jxcO;rwsYo`L3txF;77WPQo(cCw%oWRuG z896cEiSh{Ceo#lN;}T?jou8 zA{K-Ess{A!a{U+qO%$*sKuqZIC}pyfTPG$=@l8_dk*w%+GAss-Qwa7y?Ga5D9s3h~y>s<$ntslA(yyqlFWb!^1>5arn=ep~! z(1c9DnsA#a%9&Bxw`_ zIB|<=7wIu>?FZNr|T0NwCNO-5m7xra0mm{SaH3WA<*f$ZV;T z_jn!BQ4XG@Sc5G;0l}W)&R=MF!Gy!!#BL(hz}Q#lP;zoi z8xJcJ3oqgh{B{wkaMT6$agB*AUD16*{qK2D0C{^)z|YPw5Bm>x2J3&jGpI`b5PQ*b z(y?n@5s&A%!Xy*+I`|R-2Hu)=M3dU0!J`*hESZS~@TgXFL{VZ`?+Td!EAa5`%_n}9 zUj~6y5UYz@XB<`X!pFy2q`GJb#5x?dc) z2K)CAE=9>R5sW9bw;8EKt_gJ{fDmOr(r9UYpGjr5zMVg}BeuWmB}CyI6~?MgCd6k? zV+LEfb7;|Q~Gi#S&999dN z?Gq|Oz>tjjIVS|(`Y4!8L1phA_=>#V(;fCsKaL!1gzALz+oTGasW*GJ<}+Ffsu!SVA$2Qwk5*r}nD3R`Kx6YN7o0M%g-HDd@^ zKnWMBLB^$^@MlCA8u`HjzTpE{<{}E-%x8ka@Zllgn~4C312p1;fw{Dd(J%xgoQA5C z1FBw!!j;Yk2XHF0R2E0=N+_qKLSa3r@=3I&}v#Qm1#xRD3t!6$F|HN?gUv@FgvmZCQ}w5f`(XSv|^hYyJ}r(TeIl2AjD zd)n->8z$8p8V_C-AnrngNPP3ee|CWo!iWLVLtW&VNV$n%0fruIZpuYIh^fp>gup{= z13DfV8D8U(h?`03uZ~me0;Pay9G#~jx;A?}d?LbU49E#Cy(r_IuWQ<~znc z47?zNJK`VlX;?c?>2`W2Rw^#yh+7`WVMG^ix1kEt%O|bB700&fUv;I^acEnE)*-dy zsM-F2;6PxOd*A}ymHa1!&;sX8trOW`X z(}zxh&I71AWA&bi=M!6$j6+zRy_mTa)*m?eiI!V+G{}3c8NRo5Hc4o-Hdp2Zb{z~k zhRK7e8-GHVT!a_wcdJ*EE8DfcnEqzK;`Z5a1)cc^yO8a=P_Ed>`>$3GdzE@vQ@Vaq z41;yxXx^`fvUOA9H2bfe3wdQFyYgiJjcv@k=t;gQe2yngF6=Hh@R%c;`2IuVn4^B( z>+;U%VFGRaSZe&;$CRT(NeTB0mhY-r(DWN3aQo@cAm`UQ$i3`~!1_H;FYZ9`(e^uN&5)>$m3)%WK54z^J}qyk1q(r^`;LA+Z+UoJd=Dm@LdjRm zg)Q4w65gv(>+S1F9rLq`YBz(mIYSX|`;X-`)~^pXayMpUKQWT_a9ikk@aCo)h9l!>HQ&kry) zcY*6)j&nw~!sn7drxgpte=vI6e>eKev{P74aP^UmQ|5(JX3=XG4(n1^&hBf^b@Ni! z(rlU9^sPwChKR8a_yLGvvZ`?jD3EY_XcN^azD4M(9(<8aKLe>74WKnMgN8LSAl4TF zF}%GQ)UI3GoBKC+9WL4&(%kEXuXRS3^N03#Mc!AX`dQy=?Jl1Czlt+YwY=WST)Txa zxbM-dy$}4|`|aOe=iHgkT|b`kL-u>OYwg?b4_z*kLJw?z@@^S@?X%b;Z3oM9q;ZIW zQ5J2`YCX;*=dBC@#1y>_zc3F|x;XaMM*DL&HWLHv1PE>mT) zjp^1u(0cn-Z?v1`X{@J}%>;_Q3Ul4GA|>x3E;JUWBW2-xe6N$-CR$?CoH$TRuDHo& zq}QF;Q4KaHPwf~5*sa_aB1Dz+<#|)WnSVES`9#-+mtpK*M#E)fDRLP^0v@%4ajV_+ zcnP^ifcNN#1!4tg63P1umd$`y^o@olpEK7Ilu>Aa_4lrg<*iw{(qP%LR7#$f3^d8} zjFIc6A=)nHUc?;(077P6xVP{k3gA6oBGO;|b(e9LtTlbZL*KAbT-TI3{KVzNg*t1k zM99~~GfDf|a@k>b<(1ItD-A-BfU&Cq5bnJ9T^sUdl=~dubnhW~NRO zTT{bf{yr%y2)g2Fq(26=!v1bEWmrs^7V)ZFs7#6ta$X;@i)=NQgp4sFN?49j6)b3* zv+(9_PHH)?>I_zr^>|wgDoWRZltZAi(ppw31TUx&*u;`qD(1?zSJs-iKpMzT#wZvD zisD;{6n3W8Y;JLas46eU+%K&{o)&p>TU&^F}O-gfR5{LVc*QUi3)7d<)kv zEAJpR4_sma%odz+Vm-S)(@@CBDt5^#Luw5+(}xQ|He)98R7$&X7q!^Q(0#(WV`2jK~nb z5!c?iSiPm|W`je3Yk#Tzv7DvaGxuHnsmB8SW?AzX&cf>OwpLM_Vzma%4g~a7WNePl zUjX4-30s*~)E@H_?LCzcCEeqSsuXDqsbkRgalZtjOrYQ1qNTW9nq?%(RIB438BPk8 z=iE|Bn$GmTf|{F;fR45BAK94P9^O71yJaNS_bKEr$!n)p(d5CW|0XTM@x(cpxE#yB zV%ThD{Kn^_kwGp-Lqzd#23 z*uYb7F$q5pUE2#9a7`iY$%q>-CwYINZIG5T*H4O4;_I6~%IFp9Pt>TXl+%ZPoF+zg z6WWgs*OvK=+mi)%v)W@O-PC=xv^yGx*E5O!n*P&)mF=FpRV||VRiawO5NkZzJlAr^S7b}&opb6k_=s$fQXU1U9>A-KqW%o z-1z#OkP@Tj64?aOEThIDgN=Jbx=7#?&Zr5$*p>ggL@72dC4`u|pvo~%46gEAN23!Q zYTz${2p}qD#Di9EDDRxUf!&SNY0S?*El5loUj@k zk*9%As?uc9&z8TP$9UcMV@vf}xH&Fx7!R#El9QLs%*r9F=GvTb@Y2(?h&+UiNo$5E zIn!$ubh_n_&yd*+L}^4PFxH8`m3|YX-0Rgqw{lDK;AE^1978KB4B74G$%X|>X+vk! z!qj8xSR*^0ZPD(0a9mkB`s7mxRNCnm=XiwNMinDQWWk&U7;`xwprGj57t5DitJ?3dX2b3n{oMSj{yUx9gsF5x%JxT;ur)q@O2FdSP8g82r0% zPkbO|LatR6aOKFzP`|=xBG#bGcAR7S#Ol_zxJ-rA5NZW6 z8t*x$yPW$rWxu%-Y87>?6!Yw2HPaU}a~e&v%H}K8B*u-KFT<30Q+662cVcH~wMY_t zKp*`ico92Ua{>Mzh|z$jh$j_pC4a{RHKmbDBVlsjmWcrkxcYMqDk_oGBe-m>kcb6*Qk;1j?E!vmt*SgRPCE zRcdBLuGJ(5d`2^NgvXTv1c$|t>Y}ZJ4X3D|?{fgwF%+aAqeGH{z%iKx*Y#@7$adK= z)7Xig`>%F`>KS+znazR9P5(w*%!)M0<(fgh7>TWwyQuyx+F9Dgi<8)Qe@lwxv4?$2 z=5??Cu7~ZqU8a5i-fOI-y}b2%m^tWV_>`@ztw^u+`r6QaxnDodb$_GGrOu)3ggwxm z)bx>fsnqp-pNQ`1 z+uGNbwD)1Xw(EWQ9=8!YV(Ogp+Ups%=LXl8>1`=Du;=mV$o3ldGv+gS=w8?BJ2M5f zXZ`f!rrcHYMc4OQk@outw>SOc?tW*xH{bK@vG;xRI4bwTXYcEQ7sdCyc*XI$)Q89X zyZ9T%`{-5o<7PqD_cC#^7G25h=KW`^j@!yk;oonUwx2t}GoR->AD68kJ*u$VFE6ON z?q9*Ne|zrD&t<({K4~>{GkhDr0=GSJpQd2JXHa}UOfh=x-e!AyKb9j~echw?zaD^t ze3pb(zEnp%&cM*cG5Cwd`F)ZyX{iM@c@hpL<>c+1I}bT*mr?b15ym^l!0;-EthLSg zZwHw>732Hq)f|)tcX>85+$9ba%1dKA0jleS?w+6c4TF*NoGU z0$+4-r?M44at1dKgBpHB9pp1!W@M$iA>C;Sy|C2OYD(fGQc}4{GeC3zw=PjsV1~1( zs{BSGep5DYX}5b1#Mmg_JLJfOEG4ZA;SoPq99!ddKQpOWn_kq{Q|8b)A56jP(=4*HX%H^VkRUGY^Mp=&RuB$R2AUqK{}- zR>m~34Oc$}wBA+MH@Ztc z#&GCUTO?;68Dtjnu^u#uTF>qS&J}j`^VHNkGZG-3YRm8okFMr-aHW|vZ<^61-y6(@ zY%0mAWZDcN_pb>i`lm<^7O9%fq>xX6aW5w3d%>0T~z2oZ;ksRc;DK{6m;G67!B-7$veScJU4s%b$UHV6NuTi z{K<^y6Toa`=*lide~t2}r7Bd#WegKd5<+M@7Rq%cmfJMtDgX7$ys9Ayl&9T#?J8}Y zt{t)0Re2JbkF>HHc#<4|h$eqWwm%%pI*V)ySHgywmgSIZuO%{r%mh9b;5d`zJd5e> zqq_p(zv-OMZOjb|st_h%ndZ@_5ot)A8|E^&4%*_g{nH}e(EE=sCio7GEFw)xpkogg z2o;VQRmhPK$`18c|4Qf( zSYeljm~!^=YoPtQ$*)JFY>MTh=3f|2-ZDG9}S&m)$ud$7SVImqzz#?Lr% z=SHFW?bQTPvRkgre||8aFLaUwfMU60_AfQ|y#=)FfHqHw5TM(_{CW|H2XLwtHsXd` z-?^OEyQ%iPmX&;5!oXHYVe$)PzwVLtc?7)4c0!Ij!Lu9N8Bqm8U2c zS5KwIRAom{pBR~^+jVIi1sBFX%&LetxESHe5DI;KrXhn$Wu0q6#x-lZTVldWT)sds zQBj3t#ZLii$mViumMV1^E%d}Jja4gpj*Srv8v)m-&I>4IhA#-;qjN#+D{+KOsYN#} z2I09IaIKC2pN`pbo{ z+3o67TAYWY?%*tAz(d9=N_x3LtNrlW~Gq%{&-6ZJ|r;`?=$T{TAU;}ZmR%$7l zgfvu#Wu7YNLXxxeqczg@ta)w-%R-&VEfj)}5p(<@`j28H2m}~&*O?XlLMufr?}3mk z$P;Y{o|j1#q39s<;r>frWK1r1OG1|Y({2Vu^#Urc-pzpW%YeB15jsUR$i4a? zy(Ua*m3?4gKYlGYCCB4{}X#qFgl~AS3|yzJqc&n56?SfzWjfpwFMe_ zl>YTEWj)2jsDCbQa{T`)^j84?C2pGh|3~Q4%i;ec^!5KQL$B6`{q(PYNYO6O$EV)_ z018n5prY#Z??V4$6#h>)C+~HvVFpW$xJ1Gp2ZcaDONopz0{jqDe^~&YDzqIzWi@vU z6sGN(cA?ORFg$_+@Pi-R-aKN~uAj&5K(IrN5;&0%<)fSjWdxfYrL7BhW^?2DgIrNm z^WEgebC3J=)W-AFhwbvot}3VV){$uhD{GtxlP7CxB%BB`AC&lz6|a9 zBdb*9pUtAlF(WC=p%86U4Ee6iQkf};F7WgQ$eha?k>;W55KU(7TF6LO?9n3~UHk6yjCOXWY!*tK+-w8^ZTYZpW z`Q(+haaF1a9zm^S%?3Jl$(UN^kQt7Lm^^912-=x7zl+1UYHl>?8o_QgZY4jU1oB;N zdwS4>g^YmDT5mm74q%YQe;Y-d97HX=INk{jh+X9+cIM?AhgxW0dODdQGh`alEVQn% zSN+EAX)v3~cyoEto@<1S-5Lcyw_AUcn75!oW&%sw0e^7d&%Xs0CZ&t2PB%g zvWbMFw_B#-I=Rp3>9*Ss{0(?7C%Y{}^jv2C^!#^Zj?NX$Ms6dv4$CktzmCvV-jpa4 zClxSQpIBWw20dAyS){sQu~H;4-;OK~t<9iRjBE&gljj~)K=z=hA##X9aN8ETENsZ{ z3RDSb43|)ms<%Zubw?Z|ON;)U;FLg{adr!`t5irzwz z%w?D{r}ws)Lo{{%&%m=E^pIT;Wta)!GjhGmZUyfgYRFyNof2`~Y=m$(35f);U5lM8^R+QV;siP! z$w0S5xn_s15ah|1=5`dch2GIHjx!u^rY0!0bK{SrQSR2QtRtPu!`--Dt{+#fA1qJYU5>v5;6HEJ6(vmm zF3;lu1gRrCn`?A zW!o(X8md-bGwQ5@rh(p1ru6K+{l1l%o;Y*Lzt2!tCdq6z3tm{`ySuI>pERlHc-P?O zOT`*^UF>a1OCU8S>}e`YAz}ZeIOU*@Wl+h?py8Sfie&(&kZLE73vMNCejWuG3wgo$ zx0<9WPF_JM4>Yg*Pd9H;u~2#8FYK0)?EOu1E@g=WraZt!Jte6VxS1Nj6e-F^Yo#3o zJ^jsNQ|`^hrO#C*pqlHtW`1UC^mNzz1kizk_SG7M+}~EOT1N0xq9)?fzxe&&*l4Pw ziel4M1uK>VD?&OlK$BZ>xg?62d>2~~5#x&&8BGAsBvSD9 zM$E4%y~MPF!`oO_uDYnEuxow0I_XP*z+@w^?*!=M(#e>;Hrf778+PvX{c?HuC?>Le zy{-5H{O|2k+x9Htg`YXh|0i?k`tRoOKOgXmq{Q4TY=xu@j0BA&&5QrtG+q4Hrm5X) z5=rS%aR$a9=IZZe`cB|bfaH1xI*2gaX1Eut;pV?Vug&m~_l7|nA2-AWIiPJ4c&?}C4$q1wcNHKLxdu$i7( z(%%kX>l1H|63$Py1ScAcXvp4ZMrrkT}mrRcXxMpBPre8-O{Nb=(D}<6MD}5 zy5x8N_rR>#vlfeaGsUc#HS_r{&5V_j1e8RlKIZUTy+Nm%3$ASzw&jcjSeUE~c>220 z4hvwxFdLTe{AH*WL+eC`>g9hlcLk!ad1T*-|IrvJvyA{2(tqT<2O~={H~F(?i|S9vdAGmI`I(eA zQIzv8Yd))9eEp-fMx5q`?~~muVJM!N4RSI}vldfQ#*)0Gw+tdl)PEL7qr9`12DUY7 z?li`;4-W89uX=~R(rg~eyLnM+W?rx^;Pq|gZ1i#LZtSq)^A}`qmHdZ!LCd>MruO9$ ziO{nKi<@D^j*E-HObDwlZJEeRjo-g0tnI&v_6?Fw1vp+ur&!|(Z8;&93S zSjY6`^tj;n*>NG8wa?q*+I2(512+%BH|6m=t`f8paBBc1xccr!xS8(f$~5D3-M-KD zh%V%^X-4#E(flPYl2H7E-^zt_oAMWk8v%%>bC?Muag~#T`;C>g2qGzSs&>kKO$mpt zsMKk5SnW_J!}#%rt^%ei3gZfic8kO)r!MbPcT^KUKdeTsd3_nxP^;|Zd&Iy~+m>a+ zu+rR2XMvj?iayLy>Hv)H)CbH;JqA2Dw%r6Se9ZC_^>#~nj7f=ivr86T1z^0^DN)JmO{E2(NL)~b!bvE%4d#$3$E7O<<-CoRkXFX9tD&I`*^eO z8Dp@_K!(DEDAw}lHM76&q5pXgT69=$?38h(+Iy+qqGHkL5M@;fGR)ipw4YwgPfvv` z{zfrWEi7dy22i8)^jo_O(eTT-Fck|5 z@)huOvBX-exeMT8?A+RrHKt8+RtiDsttyO7fvEl`K2{t=Q)YJn%kW_|oceHd3YkFZ z<>)pruCjLm^XpJW*?uCIRK41sO>qL%ou>_m^Lky2JTZ}c^(20Zsv|^2JJd}YJIHy& zyhSO`L~vSyjPNR1;paIo?G#JqVVqRIXTdaCx?UpcVbZvB_0LmTaj;cK$xw4PXfpI% zGdKw<$x)18xv&)Jpr2mIPw%hOP;h84NY2%(df-!0GFo(St{VNKEk&djSAA{t;BZ0) zxAAC=`{f;4Mwlct&>b+LmXHTC)fgQ1IrVZWl~u*RhMi-qB79zL%otH6fJ6Ta*O@ol zo^+qHVZx9p6VMwld9bae94xZz~72P}3xZ(XWO@E$$!(a!~mhEc`2|{WEC(SFoC9Y%4Bki(8m7 zwJXv%0W4g~VC zpk*lWXzD>IL!dz$szFNhuS~O9HbQV4w0+;|1X{vMM3h0Q1e&&W3KA{j; zkPI+Dga#aAmKozD@DeT5Ipn2*&-8S)^g}g7!z%QpRrH-Tw?j2_bQSasGLmyzKz$G> zUP66N6(|G3jE7d6QvhGs^g@LCVFl77 z=~y=|7*$UL_{f63BDT`I>JT(?8&$6v`}InO%44N6G-y4b>o7C`)9ScE4t-ley~?2` zScMF?S`Ta<47b4J#6#2R-o3J3p3o~SYdmJ5oYx-X-T6H^YmGt>|8pP--kihK!F1WM5Io z8T7T<>MU%g`wGhC`BHB=S8d!EJcSKXy*VFkU=|SSa8213%z{RhQjIu;%$i1FQ=>U^ zttFSks&QYljhVHLLZxOhPnyMynx^h@(phUPmDK=kYWvK}^(4h7a_Zc8yoY#GZ#eO- zIhU+!7OiW0%_2u_Q=uJ0;oxns7U>&+#7)cBX!9jwq>)3 zQQcHG&MO<&C6Fi#aXcq+dDXC4=_qIF2`8Vm?cz!UkF8VpUS3#rH*M-Pr+HqC&DdgE zW3#za|6au?W-ic@%+!`SyqhN+abXO%s}Nk2ojop4*MM9&Am_W~Yk12_!-iAav)sc0JgR zdrA3FS9f=IcvRJERNBKCc!I=7aLY$vzp!03zlnV8#C>2tU{u({7`Te$M7YN@@+n6>D0bwv!I6|aQQMu_Dy3L&8B(&DBXnKW^#{HAT|;c;VJiuOVhr+0o?j#_K3Cr zE2K!m3~v5-+RdrH{Bl{oL?T9k-AqyQL`FtEy@D1wCt8=}Q`R0(!7zQJT#!hEzdl^8 zo_q7P>*4m)e14{^Pa*^(q#n$mn}08(tZqUJy%X*>>l`R;_-*1#M)X~Pzcgd4UO^Lr zEAAHS3@B;XJ`t6Xe05$K=~BcqZrCD`htWxIuX*GRaHD1d6g}*icne3TtI<;CxOnE+ zxtldin%KkmN;hI?N0a-sW*HPYte@z`*srV6TIK*es!@;;Gx|6TlNipJs;kgs?Yg*C zGYd)`zGbBd3~^Rj*sPfbB@Rm$t4H=TD(Ysm@H>IGA^|u3hh-Dzh-~NxLk|COQ2S%`X0fM5_EU z0tFs&H%WUe{o6(TB9UTfN=6m^{E>G=@cdZr^p{%4)t^j?`&lcG2eQ$6FcUn=&b1D! zhfM1G@gnPqj`$-y2ol-`%nqx2ObYuMBUgz$_;20IzAS81D^KNB4Vu*UV?~yOTT+9^ z@zuhaL+5sqrp6AlLUz3(erAK4!>N=&1y6t>2rGDD-zoOitt zwUB#IBZxy01fN+Hv*m@C-?>7ZK=ML4BOG8%@MOsf&%euuTYcX0;u`h|HWM?ehfM5%?tHDv~Ts;j?#w5MGe?(AUT~C~=rP^qM$0%rxM2%Te#LA$pz%yakGymN%gy!=Km^37AbbHPX-OjrW zm$)84aw!BhBqxkD+yX%rrQz$&hIc0re9txLju93xtN1pgIw#+mK=eZ@K&8bE5?4tY zDR!>C^Md#Wi3CM}z>Q(gy&>5tA6#b7u+bt%nM@BcOgSAh{7xNBmyt=#v1ptM$ObtF zU531XVo$fxFp?0JP1++G5NNG|gUZTksyk2=HA!kB+840$vI*Z_5l+N)`rQi%WJn@t zex$c(=jJ(d^<^Y-h8mX7)eZV#RAu%h7so_9YR2S({H~2qS=&qCPKtNQuqYr7QSi%Q- zL?BiTzo|dJdJ-lppQ*yYN|YPvl^B*(gD92198&ZkKc%VFKwZ>S)FCOGs9AvZ%SDRm zS7^Ca45pxglBikI*E=y$<)q7Et{;vOh9d~@s`QOC2Tr2|NIk@E{g07(P#l?elm?6j z<2+qqdGH+Rcis-%MZuF|i!u7^A~mDc$YV*0lkGI9xeu&Hd60e)Bl^IP;D&Lj6`-^!J)laP z8H9vNNX{$a9C3g)$&sUT%{yciDEcLc2vvaGUE(?dhk%8`*y2-F&?YJ$h$oCDJc)>f z+}P|>SobTMD06gFH|R$rN6t1#{y&FdA2O-vz;=Vw2m7uY@&W zI`D0&eA*47L;Z$PA*mbMM8v{ntpDjD2pSch97}>e!h1pbQFeG!kt#bzIV3)&@jJO& z4$Z1*!LgsX&{i z!JTWbv6nrbh;S=7yPr*NRO2mS&ySTc zk7oqvsk@h6t2HyZWSuJsO4GJ=nE=0HWB~Y8-!hiwb;|(Tq z>9NXUdCew#J>TWKjhr3|YtX z0G!nKis@B>6*Z!u{GoYZzN6-D#)xM83jmk8ZSjgmbFQOAKAfg{MX|1CbM~e9ZrYGr zJUjqfow2x5r!`_dZg9J1NG$#p;F~&95rG;{o}<=o(2#O`34tn4g`?s4+pP>^UOisBrrErQZw8ZZg{H z&odkv51s?dl74O>uNr95)(2Hy7L8483 zw^It||$W*J5q zRv88v78xcPHW_{}%!xB3V#cG3wIk_L+;zLB~)!-FKn-L zE_JSTE`F|dE_bebE_$BnMr%>-BxMh_d||Igf@rbg5rX1gldF- z1N8>_%TO5X+FpBH?M8N1xPnf)EANR5?}78oVVA}3Q&QwQolgTA)@`Y;DUC!ya zylgHlN7oaF#nZBdjS9r+?Yz?Whu8DCS!_z?QDcVbq`YD-0fz|l(k!Cpd1Ex`le{L* zeV@aodGejK_rvsE#W$k&%EoxouXwSY89(bzHJ3R_?3au&q$7las^@_vwQ}aoV{qy5 zyrs?>M`e?=X60kN={LMwE)|Em^Uc{#;`>EoEFZy@^}{wVwX?$KkqcoH-86CRR5Z#t73}dA~TH9<>carU&vyI)k5Sqg{HkK8n2XBZ4VM6wC`3(eZPG z5kPduz2-bHnV8SY6!Gzk#4%)^FpTOl^h<`RMd-r`(}O696UL4MVqVIcF>L7?Pc}NA*of{j@uP-OM9jeD z=W3#~XZRrCpqUCop^nu6W0+Uq7tEcp%*3 z9kVUi*>`Sab&>k@sEC^_qKb4fS(>oUnx;q>4?4%Tp=v2^$2)I#KP*GbZV>H&ejB8^nw~ zo9Hu6A9RmJq{5fyOt$7+G_IL7O&#Y=PrD%m2CmoY5(QiZ41Co@~5K`;)?yzD_4)lahY?8X|=09R*DzT zq*?6feN&=-1unayD9y+2$fMcfc+GnEk4v+cuhomS^WDSU>#b%+Dphl>^e;?COOks^ zlEo_HQ;WeaJ+)Ym&-~$HDzbz5^N#1g-Ja*?f4N=H&;N4!o}d5e@OC`^;XF50b9Maz zyH`W;rD~;~;82NXkyf^qH;p+Uu2Q8~@_R<6da(w1rGkFE)y-weT$xIt<7^vVed&(o zaHV#=~~sw;Q`-k8qS6YjzMB8z{GWNKd=y-|hI`Z4uLB z`M&byRQ%9U z*ArS55E7U^y}pe|oh=(XUT>=S#`gKNrM9A_g3j8I*fXl?y*vffa}j0=xR6lN_tuwN zV!l+c-DtTlZDClynQj?$|pbU(w2t{vx3OqVd1e7xvby;kc}C*;$N$4 z>~N(;s^Jf-RlrG9;0tj(&9e-A^Olv6D!mDzd*Uo{Af94u*`s63xmY6cMm(`sJP}$v zQEVCnoB(YdvD&!ssOTwZ>A`3x?j1O8+{f&!#(a@Va6NbdHd^2*UgI3;EqZA=Xih0> zcZf}45a%gfSu|&@v3IaxyH>Tfbtqf-$v2juL;g z*igrXfXjmHG*g;oL{Ln&8csm@2lr1nwAmT#b8$)RQB?RGvlRtPvudp=R+_3ORwa7! z^`(QZi+h-AYQYcwOm^B5EIw1Yis}sY4O&*=)ibQd#p<0Zlrt*LwzIEfjx~%Wg2iOY zXdSeJon&;?wpb`lg&9;2+2RHN=;aGGoLJ3T(WHl$pvk!OsE9@wy4U))L?^Renf-7F zKf9(VvR>K+NHt#|qD==I1+W}opexcJAq;c*X#_~5iZJxRjYfGQlrsChz?oAIfHKuu z#nJ7mx@4vx6&p1>e<>DjA|>8H_|EBTfuBN!S0X3=;k^eYr{s$P;|BP59*gDASd&+g zI>)V^)IT}t3tEBZ+%`0&qCpFqCDWU5XXi*k4_-=bf)8GU~SK)p;1+?P{yLe^~Rv8`CMeCkXho{_n;3Flec&2 zp`llyq3}I->3;cl7fU5?O-~+JzN5pzsW0^+3o;pGcvfK6U0NAN!ns&QVs%gn9pp}W zAikEE^^5OAr!%IlwXM3xW5vGyWKz#j@aX>;Ni*Piy=N+cj!fZh#xHD#<~XEQO6SlZrAYfvOdex8V@6(CdmP z?0-g;9r?Cp@LQ!4PIG#Wfhlkdz9=Y{M3K)SPI#J{>XXL%UH!Pum?HFr2J^;c1(Z0q z4JWyz()pL@l(H)dmFQok-_iC=Cm*s8ejPQqzkVrVe&Ee3i#*|Xjgp=lc*rjxifw$# z?!^}{-=zLkp3Cl{>4cY|gD~H&T>-fB$*60X_4O!)?F@YFRAh6a_#VWM+~y0iZ)a*r zPzEq7O07H`KkoMW8*H>OTRDajsg3>mer!VzX;d<3ET>dDFia41&Ml=B%3HFSLYf=f zl?yx7qiCX)=SwtEk7;X{0GqbK>o3kX!G?7UJ0z6(t~}bX^AxbeIqlwjd|N_Xc#?&> z=+E^zLFDwdC;2U-wDz<5{G9FK2SVMyTLZ#!T zAU^+uZTjBLt+4Hu7i?m>R~H}rURN~SkDVR9QB>YUtXkUDXF9lB&BB(W7FfF&t|Deg zn7$BYN!FoOt8x#=HSM^?ySQ-es( z?gfOdjQOt2-7>#)*UdVZZIc~vd)Mm!K&<~hP_m7@FcsN&a^L##oB8I8?Z!Uhr7%fH zJMQVS2lu=B{R1(blVr}t2ZY(vd+%rp3Qo3EYm2@LpGrXG;pL;OeEWXWE>>=L`<v2xU6C01gYEl;%&z_MzBY5TrvbSThv^BLf0XhBr z1ifWoWn^V!VPgKr8S~$*$ywEsja*j4$dIOrG%FxkdM-h)olP<+5q`1|JS}fz{7UH{ zls3os{Vq3GwfRE{u^U4PToFx?C_g}$UJG*)8Vy#&$R7pPTcorkrNp0*4huHeRZ_i~ zx?SOQVJKC+R#<)*_56Bsd-FQaI?w*;M^pZm!CDNk<@5E_iazgxc#M~EWbrw^c;^Ay-@yr|*LrDrq7L@O zf*g#`-Ug|s89eRs@R{aJ`in$YqI3fdUbtBijcM0L0%y!wVZPT5M++5OM-BDDM6E>M zW6WvZ=j^gR8gg4nB6C*rs~5cCT9|_Dvsa(=8?is(S=D~`}}>ng4*QGx9|7f7Ia_Hr;l~PYza`k zi9_UacI~&Q^<0)1ZHWjg&}`IGy`Y`8&FRjma~9XBj%AeGiUK+z$#gi@tx;peRW#1M#DojsdFeWR z+B3&1R0JlnRKx2p7-wdCW<8i@dn~#87ea3Qaeab`J3mA-9-L3Hnmdm*w(!RMM_-tn zvIRHGP>(Xnn~qV8GAWvlQH^R+vocYRR?3-79;yc889Y<*OFiR}n4aFU5RkYQ9F#2%f|J4t+jR@CcZ`y_!{2TZs!Yn@KSzWQ9fr` z(FKW>(H6-YdaE9rG>sX3fsg4yOK|oh>w8Hyh*6Qoc?|vBd+)X*fOLZIY4_*}Egd46 zjYO&9I=9Uo;EoznBGf~S5IU`P2U^{a(BD$-@l3So5F7E=wUCVq*+bG327!zz zjII?FCe$C5O$tY953QSQcw&+V3FYzmL?08qNXd_zY8MvJ_v(n5{cK`K)4_#!s|OFa8p{4oy6U}~!bRa_g> zzj6cFU6(6hGw7=pw`V34h<16eMm56dLCQkJYb+#354Y|HC+GQT*ILD6dQ2t!lJocK zlqt`Nom2TEV$EA!AECTFKCI35)0q(D4n`T9UUW1#y$Cge%NaMN z`_Ds;C~xAC*xx>q4kCL6kltpg-X5Y3-c?eO+==?`c>b8{{vm&J!;LT;_H40{OP;FwgE= z4XK+uF^bN9hnvfU_x4^bA7!w%L*2wQ=3Crw%~*^GHMKJiNG%%!a@NR>$ABCN&?5W@ z-l{cZ$!OYfOUoIgtX04j|6ufWxrpMmsJzuJYO@0SIEPgo46iHof(vy3kQ){_l_3m) zwLu$9Mu^`%Z($(Twj3R)Q3jPE;T4cK%W@=s%NC@Zm})yOZP)~xtBq2N8LrbXN?DlJ zd5)4>WpA6MFw(w`vWqlsCHhutf2jtXW7_!t>ADGRrgjHW52`W^T+jqa|fGm;cv)nK5#!&eFC>d z<;M@6>J-G-!_?1FZhg5ETqCKM#w{n!G|Da188tsuCHB_hQ5j9fI<}f% zKwS`O&>2_bHVhw=y!fF;1QZA?bU6kLD;6y0*V!h|mf(*oDK!p6UEpHmxTL8yo@IUq zpiAeF2<4Mi&bQkHh3u)>@Mde%HR!@t8w$cnb_@r+gw^U?MpRh*?066#i^4$!D;C#z z!WsL0U4LO$BEJA-zhLnq}EFg0N@S}R_pj`*GdlAKz7(&5Lu z@$w4;*U`RpAUc{Xoy`ejhb;B=23M|pzbXIn;=|g6HKaKhGE*}edLYyXe4EHFv^s*&Arvz zi?CXyR^Sph8Dw~~3$+sbDx3=W$Vl=4e{hkQkYRrpMdLFuhxi=0OLcsHl5rr!R(m<Ukr`8O94zs91E}YmsE4CxLp=WZu@e2c-AbCTkVf%kE^Mz6&f{Z5R*Uk+On?u1dWG zNX}kg5dbCM(rXkE{TaM?1FEo9yA!&XYk$;N;md6_bm=!YGBICRe>`5sJA3zhx-yi&cX)dZ1lvg0;D8hhrxoP2bUM*n~=etw^8S@B0+#iO(?zZAbA@hIoDT3;&4oDFQ#(HK=kxn-kO6nC65Hbh8cY;lr(CV zNS2(hEw&1_!@a7qUDERwuO5%yH_4Erw%eMnoM^_+0?ycAZ3{cg-~ z`Q;q)g8;jW>h=4hxQ0W+iu%0O@;zat8`^pQL{km?eNa!iV8Rl9;39Z$ZI9vC`9*lG zM%3{69HsC(as!#VA}EH*-39;ZIE%DbD?%g06? zb0NM-i~TRh^Z!xkDJ024bl^e{03R6tEcE|}0@jNZQ0Ngx@t1%9q3M|bCP{BMqvcm7 zG*U`bk(^r2wU{)1p z)oxS%IGBVnvO`Use?IX2WhRvX=@s<>iybPVCh18VKb~4;`g$j23@-VS*&DK-YH&Lur)za4&akB|A39?D}^JunIGO#gGvhmb^4aAO$ z*3N#NXP#&NwKl(rqMBlsfqQ0gk%PMleSi)S%rrP6uMV0CyQ;!Wn#f2=^raw~nbC_e zh)F^>NQ>5rQ}MJAb+BZenAoaQX;SjgHa1hRM4c$e?C>E5@8|S|fSV)K;=u+_Yd}0F zf`kL7@!2y%YdfjG0{`%!pW*x?g!1e}wiV{T01oGX9PbFxPL?izV}()dm*s@&lwXCc%RtQ;sgKEzrGNE`_odv*Z&#cUo$ZO zmnd<6i*oBfqx^MJ=l>Et{%_H9fHUH^zxdbanVJ3`@H4Yv|0*pGTtl)Z7;ta!mw4^9+Z$`YsrvFbl1+~BA6nD*>q2Si=dztfqfqz@(+)rh^2sVfO-_xOj`v(TkQyI9%^JJth z&z{|9{bxEx@&3RtdMblEJQwfZn|psP56P%MFpQtdh{%@2)(5u@V#NPUM`!dO7$#3; z2u_x9w}2VL|HW`k_yfcAsSG6ca>9QrV*Q#|LEygVZ|fE4sf_0ru{EFm*ScGf_6LU9 zQyG-5D#kw<@4zF9-`hub`X3nPPi4559{r+(^lM(NW&D9*@l?i$9}r|G{_NSc(|@J| z2yQWdTOO8AW&CbP?bmdW?EZlPHmmluzV>(9UcWLf_Wr;CTl#t$<98ETzcP>x{=fhm z%z7H*cVjERGH6c!zyOf=zyd&|y;r-VX*3)RePlx?V+mQSR?f)3_ l@AEys#tfADN6b&oA!zWJ{Mj=G@V6Q&SQ%6rJp6d}{{c4Xo+AJN literal 0 HcmV?d00001 From 2390eb5ed4f0e3af725f34d651dfaf268ba3ec40 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 15 Mar 2016 14:07:20 +0100 Subject: [PATCH 242/320] Add 2.2.1 to the version lookup table --- core/src/main/java/org/elasticsearch/Version.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index 799beb707f58..8b65adf170dc 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -83,6 +83,8 @@ public class Version { return V_5_0_0; case V_2_3_0_ID: return V_2_3_0; + case V_2_2_1_ID: + return V_2_2_1; case V_2_2_0_ID: return V_2_2_0; case V_2_1_2_ID: From 52c283cdeed8c7cc1974d010348f1b240e069131 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 15 Mar 2016 14:15:41 +0100 Subject: [PATCH 243/320] [TEST] Add test that ensures we never bump the minor version of lucene in a bugfix release --- .../java/org/elasticsearch/VersionTests.java | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/core/src/test/java/org/elasticsearch/VersionTests.java b/core/src/test/java/org/elasticsearch/VersionTests.java index eec912989a74..17a41c302751 100644 --- a/core/src/test/java/org/elasticsearch/VersionTests.java +++ b/core/src/test/java/org/elasticsearch/VersionTests.java @@ -260,4 +260,20 @@ public class VersionTests extends ESTestCase { } } + // this test ensures we never bump the lucene version in a bugfix release + public void testLuceneVersionIsSameOnMinorRelease() { + for (Version version : VersionUtils.allVersions()) { + for (Version other : VersionUtils.allVersions()) { + if (other.onOrAfter(version)) { + assertTrue("lucene versions must be " + other + " >= " + version, + other.luceneVersion.onOrAfter(version.luceneVersion)); + } + if (other.major == version.major && other.minor == version.minor) { + assertEquals(other.luceneVersion.major, version.luceneVersion.major); + assertEquals(other.luceneVersion.minor, version.luceneVersion.minor); + // should we also assert the lucene bugfix version? + } + } + } + } } From fac0f990fcbea296871ac4e6092502fcb62e09b4 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 15 Mar 2016 09:27:01 -0400 Subject: [PATCH 244/320] Rename "daemonize" to "foreground" This commit renames the Bootstrap#init parameter "daemonize" to "foreground" for clarity. --- .../org/elasticsearch/bootstrap/Bootstrap.java | 16 ++++++++-------- .../elasticsearch/bootstrap/Elasticsearch.java | 2 +- .../ElasticsearchCommandLineParsingTests.java | 18 +++++++++--------- 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 3ad592af6352..a89bee0f4ac9 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -188,8 +188,8 @@ final class Bootstrap { node = new Node(nodeSettings); } - private static Environment initialSettings(boolean daemonize, String pathHome, String pidFile) { - Terminal terminal = daemonize ? null : Terminal.DEFAULT; + private static Environment initialSettings(boolean foreground, String pathHome, String pidFile) { + Terminal terminal = foreground ? Terminal.DEFAULT : null; Settings.Builder builder = Settings.builder(); builder.put(Environment.PATH_HOME_SETTING.getKey(), pathHome); if (Strings.hasLength(pidFile)) { @@ -223,7 +223,7 @@ final class Bootstrap { * to startup elasticsearch. */ static void init( - final boolean daemonize, + final boolean foreground, final String pathHome, final String pidFile, final Map esSettings) throws Throwable { @@ -234,7 +234,7 @@ final class Bootstrap { INSTANCE = new Bootstrap(); - Environment environment = initialSettings(daemonize, pathHome, pidFile); + Environment environment = initialSettings(foreground, pathHome, pidFile); Settings settings = environment.settings(); LogConfigurator.configure(settings, true); checkForCustomConfFile(); @@ -250,7 +250,7 @@ final class Bootstrap { } try { - if (daemonize) { + if (!foreground) { Loggers.disableConsoleLogging(); closeSystOut(); } @@ -265,12 +265,12 @@ final class Bootstrap { INSTANCE.start(); - if (daemonize) { + if (!foreground) { closeSysError(); } } catch (Throwable e) { // disable console logging, so user does not see the exception twice (jvm will show it already) - if (!daemonize) { + if (foreground) { Loggers.disableConsoleLogging(); } ESLogger logger = Loggers.getLogger(Bootstrap.class); @@ -290,7 +290,7 @@ final class Bootstrap { logger.error("Exception", e); } // re-enable it if appropriate, so they can see any logging during the shutdown process - if (!daemonize) { + if (foreground) { Loggers.enableConsoleLogging(); } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index dfe49c52e98d..f492f06cd5a1 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -109,7 +109,7 @@ class Elasticsearch extends Command { void init(final boolean daemonize, final String pathHome, final String pidFile, final Map esSettings) { try { - Bootstrap.init(daemonize, pathHome, pidFile, esSettings); + Bootstrap.init(!daemonize, pathHome, pidFile, esSettings); } catch (final Throwable t) { // format exceptions to the console in a special way // to avoid 2MB stacktraces from guice, etc. diff --git a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCommandLineParsingTests.java b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCommandLineParsingTests.java index 0d70cb8fba55..62603412a2c9 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCommandLineParsingTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCommandLineParsingTests.java @@ -77,7 +77,7 @@ public class ElasticsearchCommandLineParsingTests extends ESTestCase { } private void runTestVersion(int expectedStatus, Consumer outputConsumer, String... args) throws Exception { - runTest(expectedStatus, false, outputConsumer, (daemonize, pathHome, pidFile, esSettings) -> {}, args); + runTest(expectedStatus, false, outputConsumer, (foreground, pathHome, pidFile, esSettings) -> {}, args); } public void testThatPidFileCanBeConfigured() throws Exception { @@ -92,7 +92,7 @@ public class ElasticsearchCommandLineParsingTests extends ESTestCase { expectedStatus, expectedInit, outputConsumer, - (daemonize, pathHome, pidFile, esSettings) -> assertThat(pidFile, equalTo("/tmp/pid")), + (foreground, pathHome, pidFile, esSettings) -> assertThat(pidFile, equalTo("/tmp/pid")), args); } @@ -107,7 +107,7 @@ public class ElasticsearchCommandLineParsingTests extends ESTestCase { ExitCodes.OK, true, output -> {}, - (daemonize, pathHome, pidFile, esSettings) -> assertThat(daemonize, equalTo(expectedDaemonize)), + (foreground, pathHome, pidFile, esSettings) -> assertThat(foreground, equalTo(!expectedDaemonize)), args); } @@ -116,7 +116,7 @@ public class ElasticsearchCommandLineParsingTests extends ESTestCase { ExitCodes.OK, true, output -> {}, - (daemonize, pathHome, pidFile, esSettings) -> { + (foreground, pathHome, pidFile, esSettings) -> { assertThat(esSettings.size(), equalTo(2)); assertThat(esSettings, hasEntry("es.foo", "bar")); assertThat(esSettings, hasEntry("es.baz", "qux")); @@ -136,7 +136,7 @@ public class ElasticsearchCommandLineParsingTests extends ESTestCase { ExitCodes.USAGE, false, output -> assertThat(output, containsString("Elasticsearch settings must be prefixed with [es.] but was [")), - (daemonize, pathHome, pidFile, esSettings) -> {}, + (foreground, pathHome, pidFile, esSettings) -> {}, args ); } @@ -146,7 +146,7 @@ public class ElasticsearchCommandLineParsingTests extends ESTestCase { ExitCodes.USAGE, false, output -> assertThat(output, containsString("Elasticsearch setting [es.foo] must not be empty")), - (daemonize, pathHome, pidFile, esSettings) -> {}, + (foreground, pathHome, pidFile, esSettings) -> {}, "-E", "es.foo=" ); } @@ -156,12 +156,12 @@ public class ElasticsearchCommandLineParsingTests extends ESTestCase { ExitCodes.USAGE, false, output -> assertThat(output, containsString("network.host is not a recognized option")), - (daemonize, pathHome, pidFile, esSettings) -> {}, + (foreground, pathHome, pidFile, esSettings) -> {}, "--network.host"); } private interface InitConsumer { - void accept(final boolean daemonize, final String pathHome, final String pidFile, final Map esSettings); + void accept(final boolean foreground, final String pathHome, final String pidFile, final Map esSettings); } private void runTest( @@ -177,7 +177,7 @@ public class ElasticsearchCommandLineParsingTests extends ESTestCase { @Override void init(final boolean daemonize, final String pathHome, final String pidFile, final Map esSettings) { init.set(true); - initConsumer.accept(daemonize, pathHome, pidFile, esSettings); + initConsumer.accept(!daemonize, pathHome, pidFile, esSettings); } }, terminal); assertThat(status, equalTo(expectedStatus)); From 088dea8863339639710431a44cd7f7b15f8e79e8 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 15 Mar 2016 09:35:17 -0400 Subject: [PATCH 245/320] Fix javadoc comment on Elasticsearch#init This commit fixes a stale javadoc comment on Elasticsearch#init; the method is now visible for testing. --- .../main/java/org/elasticsearch/bootstrap/Elasticsearch.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index f492f06cd5a1..d15f72e86552 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -45,7 +45,7 @@ class Elasticsearch extends Command { private final OptionSpec pidfileOption; private final OptionSpec propertyOption; - /** no instantiation */ + // visible for testing Elasticsearch() { super("starts elasticsearch"); // TODO: in jopt-simple 5.0, make this mutually exclusive with all other options From d14ae5f8b642888793e7777748304ef7126661ea Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Tue, 15 Mar 2016 15:02:24 +0100 Subject: [PATCH 246/320] Remove Python and Javascript Benchmark classes --- .../script/javascript/SimpleBench.java | 75 ------------------ .../script/python/SimpleBench.java | 76 ------------------- 2 files changed, 151 deletions(-) delete mode 100644 plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/SimpleBench.java delete mode 100644 plugins/lang-python/src/test/java/org/elasticsearch/script/python/SimpleBench.java diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/SimpleBench.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/SimpleBench.java deleted file mode 100644 index 3445c1160576..000000000000 --- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/SimpleBench.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.script.javascript; - -import org.elasticsearch.common.StopWatch; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.script.CompiledScript; -import org.elasticsearch.script.ExecutableScript; -import org.elasticsearch.script.ScriptService; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -/** - * - */ -public class SimpleBench { - - public static void main(String[] args) { - JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(Settings.Builder.EMPTY_SETTINGS); - Object compiled = se.compile("x + y", Collections.emptyMap()); - CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "testExecutableNoRuntimeParams", "js", compiled); - - Map vars = new HashMap(); - // warm up - for (int i = 0; i < 1000; i++) { - vars.put("x", i); - vars.put("y", i + 1); - se.executable(compiledScript, vars).run(); - } - - final long ITER = 100000; - - StopWatch stopWatch = new StopWatch().start(); - for (long i = 0; i < ITER; i++) { - se.executable(compiledScript, vars).run(); - } - System.out.println("Execute Took: " + stopWatch.stop().lastTaskTime()); - - stopWatch = new StopWatch().start(); - ExecutableScript executableScript = se.executable(compiledScript, vars); - for (long i = 0; i < ITER; i++) { - executableScript.run(); - } - System.out.println("Executable Took: " + stopWatch.stop().lastTaskTime()); - - stopWatch = new StopWatch().start(); - executableScript = se.executable(compiledScript, vars); - for (long i = 0; i < ITER; i++) { - for (Map.Entry entry : vars.entrySet()) { - executableScript.setNextVar(entry.getKey(), entry.getValue()); - } - executableScript.run(); - } - System.out.println("Executable (vars) Took: " + stopWatch.stop().lastTaskTime()); - } -} diff --git a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/SimpleBench.java b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/SimpleBench.java deleted file mode 100644 index d9559aef16ce..000000000000 --- a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/SimpleBench.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.script.python; - -import org.elasticsearch.common.StopWatch; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.script.CompiledScript; -import org.elasticsearch.script.ExecutableScript; -import org.elasticsearch.script.ScriptService; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -/** - * - */ -public class SimpleBench { - - public static void main(String[] args) { - PythonScriptEngineService se = new PythonScriptEngineService(Settings.Builder.EMPTY_SETTINGS); - Object compiled = se.compile("x + y", Collections.emptyMap()); - CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "SimpleBench", "python", compiled); - - - Map vars = new HashMap(); - // warm up - for (int i = 0; i < 1000; i++) { - vars.put("x", i); - vars.put("y", i + 1); - se.executable(compiledScript, vars).run(); - } - - final long ITER = 100000; - - StopWatch stopWatch = new StopWatch().start(); - for (long i = 0; i < ITER; i++) { - se.executable(compiledScript, vars).run(); - } - System.out.println("Execute Took: " + stopWatch.stop().lastTaskTime()); - - stopWatch = new StopWatch().start(); - ExecutableScript executableScript = se.executable(compiledScript, vars); - for (long i = 0; i < ITER; i++) { - executableScript.run(); - } - System.out.println("Executable Took: " + stopWatch.stop().lastTaskTime()); - - stopWatch = new StopWatch().start(); - executableScript = se.executable(compiledScript, vars); - for (long i = 0; i < ITER; i++) { - for (Map.Entry entry : vars.entrySet()) { - executableScript.setNextVar(entry.getKey(), entry.getValue()); - } - executableScript.run(); - } - System.out.println("Executable (vars) Took: " + stopWatch.stop().lastTaskTime()); - } -} From e91fd0969285829ad3f9c474e4e15d73c3e36ff0 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Tue, 15 Mar 2016 15:03:37 +0100 Subject: [PATCH 247/320] Enable jdk-system-out Forbidden API checks on test sources --- .../org/elasticsearch/gradle/precommit/PrecommitTasks.groovy | 3 +-- test/build.gradle | 1 - test/framework/build.gradle | 1 - 3 files changed, 1 insertion(+), 4 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index cbd72f2c7da6..0d4a51f050a3 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -64,7 +64,7 @@ class PrecommitTasks { project.forbiddenApis { internalRuntimeForbidden = true failOnUnsupportedJava = false - bundledSignatures = ['jdk-unsafe', 'jdk-deprecated'] + bundledSignatures = ['jdk-unsafe', 'jdk-deprecated', 'jdk-system-out'] signaturesURLs = [getClass().getResource('/forbidden/jdk-signatures.txt'), getClass().getResource('/forbidden/es-all-signatures.txt')] suppressAnnotations = ['**.SuppressForbidden'] @@ -72,7 +72,6 @@ class PrecommitTasks { Task mainForbidden = project.tasks.findByName('forbiddenApisMain') if (mainForbidden != null) { mainForbidden.configure { - bundledSignatures += 'jdk-system-out' signaturesURLs += getClass().getResource('/forbidden/es-core-signatures.txt') } } diff --git a/test/build.gradle b/test/build.gradle index fcf4f5bb7617..7feb332b717f 100644 --- a/test/build.gradle +++ b/test/build.gradle @@ -29,7 +29,6 @@ subprojects { // the main files are actually test files, so use the appropriate forbidden api sigs forbiddenApisMain { - bundledSignatures = ['jdk-unsafe', 'jdk-deprecated'] signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'), PrecommitTasks.getResource('/forbidden/es-signatures.txt'), PrecommitTasks.getResource('/forbidden/es-test-signatures.txt')] diff --git a/test/framework/build.gradle b/test/framework/build.gradle index 8ee5fbfe81a0..af65c9ff7c9d 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -38,7 +38,6 @@ compileTestJava.options.compilerArgs << '-Xlint:-rawtypes' // the main files are actually test files, so use the appropriate forbidden api sigs forbiddenApisMain { - bundledSignatures = ['jdk-unsafe', 'jdk-deprecated'] signaturesURLs = [PrecommitTasks.getResource('/forbidden/all-signatures.txt'), PrecommitTasks.getResource('/forbidden/test-signatures.txt')] } From 2f7e181318192966ca7ebab8c6e36ba942533d3f Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 15 Mar 2016 10:05:28 -0400 Subject: [PATCH 248/320] Fix typo inadvertently introduced --- docs/reference/migration/migrate_5_0/settings.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/migration/migrate_5_0/settings.asciidoc b/docs/reference/migration/migrate_5_0/settings.asciidoc index 5d39f87773db..87ea356ec7a0 100644 --- a/docs/reference/migration/migrate_5_0/settings.asciidoc +++ b/docs/reference/migration/migrate_5_0/settings.asciidoc @@ -153,7 +153,7 @@ on startup if it is set too low. ==== Removed es.netty.gathering -Disabling Netty from using NIO gathring could be done via the escape +Disabling Netty from using NIO gathering could be done via the escape hatch of setting the system property "es.netty.gathering" to "false". Time has proven enabling gathering by default is a non-issue and this non-documented setting has been removed. From cbaa480c160a6c93ae905ea114fd89312213e9eb Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 15 Mar 2016 15:21:56 +0100 Subject: [PATCH 249/320] [TEST] Let the windows machine be slow as hell --- .../org/elasticsearch/backwards/MultiNodeBackwardsIT.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/MultiNodeBackwardsIT.java b/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/MultiNodeBackwardsIT.java index d5094cf09f00..1f3ad15d1bfa 100644 --- a/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/MultiNodeBackwardsIT.java +++ b/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/MultiNodeBackwardsIT.java @@ -17,16 +17,18 @@ * under the License. */ -package org.elasticsearch.smoketest; +package org.elasticsearch.backwards; -import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; +import org.apache.lucene.util.TimeUnits; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; +@TimeoutSuite(millis = 40 * TimeUnits.MINUTE) // some of the windows test VMs are slow as hell public class MultiNodeBackwardsIT extends ESRestTestCase { public MultiNodeBackwardsIT(RestTestCandidate testCandidate) { From f5e6db4090d6bbb1dd9c237e117570fe5959bf7b Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Tue, 15 Mar 2016 15:04:34 +0100 Subject: [PATCH 250/320] Remove System.out.println and Throwable.printStackTrace from tests --- .../builder/XContentBuilderTests.java | 2 +- .../netty/NettyHttpServerPipeliningTests.java | 2 +- .../index/translog/TranslogTests.java | 2 +- .../IndexingMemoryControllerTests.java | 2 +- .../percolator/ConcurrentPercolatorIT.java | 5 +- .../BasePipelineAggregationTestCase.java | 2 +- .../search/fetch/FetchSubPhasePluginIT.java | 3 +- .../elasticsearch/search/geo/GeoFilterIT.java | 3 +- .../highlight/HighlightBuilderTests.java | 1 - .../AbstractSimpleTransportTestCase.java | 56 +++++++++--------- .../netty/NettyScheduledPingTests.java | 8 +-- .../ConcurrentDocumentOperationIT.java | 2 +- .../versioning/SimpleVersioningIT.java | 58 +++++++------------ .../messy/tests/RandomScoreFunctionTests.java | 15 +++-- .../deletebyquery/DeleteByQueryTests.java | 14 ++--- .../loggerusage/ESLoggerUsageChecker.java | 3 +- .../test/loggerusage/SuppressForbidden.java | 35 +++++++++++ 17 files changed, 115 insertions(+), 98 deletions(-) create mode 100644 test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/SuppressForbidden.java diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java index 9129e3c05b38..36b16d6a176d 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java @@ -166,7 +166,7 @@ public class XContentBuilderTests extends ESTestCase { byte[] data = bos.bytes().toBytes(); String sData = new String(data, "UTF8"); - System.out.println("DATA: " + sData); + assertThat(sData, equalTo("{\"name\":\"something\", source : { test : \"value\" },\"name2\":\"something2\"}")); } public void testFieldCaseConversion() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java index 6afe8a0aefc4..5d4330ec5c3c 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java @@ -181,7 +181,7 @@ public class NettyHttpServerPipeliningTests extends ESTestCase { @Override public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) { - e.getCause().printStackTrace(); + logger.info("Caught exception", e.getCause()); e.getChannel().close(); } } diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 337d91356b95..984908ad9d64 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -1491,7 +1491,7 @@ public class TranslogTests extends ESTestCase { if (writtenOperations.size() != snapshot.totalOperations()) { for (int i = 0; i < threadCount; i++) { if (threadExceptions[i] != null) { - threadExceptions[i].printStackTrace(); + logger.info("Translog exception", threadExceptions[i]); } } } diff --git a/core/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java b/core/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java index 4f08c4974439..3316b52be2cb 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java @@ -281,7 +281,7 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { controller.assertNotThrottled(shard0); controller.assertThrottled(shard1); - System.out.println("TEST: now index more"); + logger.info("--> Indexing more data"); // More indexing to shard0 controller.simulateIndexing(shard0); diff --git a/core/src/test/java/org/elasticsearch/percolator/ConcurrentPercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/ConcurrentPercolatorIT.java index f2493d85e861..1cf2ef035b9d 100644 --- a/core/src/test/java/org/elasticsearch/percolator/ConcurrentPercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/ConcurrentPercolatorIT.java @@ -54,7 +54,7 @@ import static org.hamcrest.Matchers.nullValue; * */ public class ConcurrentPercolatorIT extends ESIntegTestCase { - public void testSimpleConcurrentPercolator() throws Exception { + public void testSimpleConcurrentPercolator() throws Throwable { // We need to index a document / define mapping, otherwise field1 doesn't get recognized as number field. // If we don't do this, then 'test2' percolate query gets parsed as a TermQuery and not a RangeQuery. // The percolate api doesn't parse the doc if no queries have registered, so it can't lazily create a mapping @@ -143,9 +143,8 @@ public class ConcurrentPercolatorIT extends ESIntegTestCase { Throwable assertionError = exceptionHolder.get(); if (assertionError != null) { - assertionError.printStackTrace(); + throw assertionError; } - assertThat(assertionError + " should be null", assertionError, nullValue()); } public void testConcurrentAddingAndPercolating() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java index f180ab571629..667cc9008a0f 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java @@ -218,7 +218,7 @@ public abstract class BasePipelineAggregationTestCase use random simple ids"); ids = new IDSource() { @Override public String next() { @@ -366,9 +364,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { break; case 1: // random realistic unicode - if (VERBOSE) { - System.out.println("TEST: use random realistic unicode ids"); - } + logger.info("--> use random realistic unicode ids"); ids = new IDSource() { @Override public String next() { @@ -378,9 +374,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { break; case 2: // sequential - if (VERBOSE) { - System.out.println("TEST: use seuquential ids"); - } + logger.info("--> use sequential ids"); ids = new IDSource() { int upto; @@ -392,9 +386,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { break; case 3: // zero-pad sequential - if (VERBOSE) { - System.out.println("TEST: use zero-pad seuquential ids"); - } + logger.info("--> use zero-padded sequential ids"); ids = new IDSource() { final int radix = TestUtil.nextInt(random, Character.MIN_RADIX, Character.MAX_RADIX); final String zeroPad = String.format(Locale.ROOT, "%0" + TestUtil.nextInt(random, 4, 20) + "d", 0); @@ -409,9 +401,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { break; case 4: // random long - if (VERBOSE) { - System.out.println("TEST: use random long ids"); - } + logger.info("--> use random long ids"); ids = new IDSource() { final int radix = TestUtil.nextInt(random, Character.MIN_RADIX, Character.MAX_RADIX); int upto; @@ -424,9 +414,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { break; case 5: // zero-pad random long - if (VERBOSE) { - System.out.println("TEST: use zero-pad random long ids"); - } + logger.info("--> use zero-padded random long ids"); ids = new IDSource() { final int radix = TestUtil.nextInt(random, Character.MIN_RADIX, Character.MAX_RADIX); final String zeroPad = String.format(Locale.ROOT, "%015d", 0); @@ -539,9 +527,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { idPrefix = ""; } else { idPrefix = TestUtil.randomSimpleString(random); - if (VERBOSE) { - System.out.println("TEST: use id prefix: " + idPrefix); - } + logger.debug("--> use id prefix {}", idPrefix); } int numIDs; @@ -564,9 +550,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { final IDAndVersion[] idVersions = new IDAndVersion[TestUtil.nextInt(random, numIDs / 2, numIDs * (TEST_NIGHTLY ? 8 : 2))]; final Map truth = new HashMap<>(); - if (VERBOSE) { - System.out.println("TEST: use " + numIDs + " ids; " + idVersions.length + " operations"); - } + logger.debug("--> use {} ids; {} operations", numIDs, idVersions.length); for (int i = 0; i < idVersions.length; i++) { @@ -596,10 +580,9 @@ public class SimpleVersioningIT extends ESIntegTestCase { idVersions[i] = x; } - if (VERBOSE) { - for (IDAndVersion idVersion : idVersions) { - System.out.println("id=" + idVersion.id + " version=" + idVersion.version + " delete?=" + idVersion.delete + " truth?=" + (truth.get(idVersion.id) == idVersion)); - } + for (IDAndVersion idVersion : idVersions) { + logger.debug("--> id={} version={} delete?={} truth?={}", idVersion.id, idVersion.version, idVersion.delete, + truth.get(idVersion.id) == idVersion); } final AtomicInteger upto = new AtomicInteger(); @@ -623,8 +606,8 @@ public class SimpleVersioningIT extends ESIntegTestCase { if (index >= idVersions.length) { break; } - if (VERBOSE && index % 100 == 0) { - System.out.println(Thread.currentThread().getName() + ": index=" + index); + if (index % 100 == 0) { + logger.trace("{}: index={}", Thread.currentThread().getName(), index); } IDAndVersion idVersion = idVersions[index]; @@ -657,18 +640,18 @@ public class SimpleVersioningIT extends ESIntegTestCase { idVersion.indexFinishTime = System.nanoTime() - startTime; if (threadRandom.nextInt(100) == 7) { - System.out.println(threadID + ": TEST: now refresh at " + (System.nanoTime() - startTime)); + logger.trace("--> {}: TEST: now refresh at {}", threadID, System.nanoTime() - startTime); refresh(); - System.out.println(threadID + ": TEST: refresh done at " + (System.nanoTime() - startTime)); + logger.trace("--> {}: TEST: refresh done at {}", threadID, System.nanoTime() - startTime); } if (threadRandom.nextInt(100) == 7) { - System.out.println(threadID + ": TEST: now flush at " + (System.nanoTime() - startTime)); + logger.trace("--> {}: TEST: now flush at {}", threadID, System.nanoTime() - startTime); try { flush(); } catch (FlushNotAllowedEngineException fnaee) { // OK } - System.out.println(threadID + ": TEST: flush done at " + (System.nanoTime() - startTime)); + logger.trace("--> {}: TEST: flush done at {}", threadID, System.nanoTime() - startTime); } } } catch (Exception e) { @@ -696,16 +679,17 @@ public class SimpleVersioningIT extends ESIntegTestCase { } long actualVersion = client().prepareGet("test", "type", id).execute().actionGet().getVersion(); if (actualVersion != expected) { - System.out.println("FAILED: idVersion=" + idVersion + " actualVersion=" + actualVersion); + logger.error("--> FAILED: idVersion={} actualVersion= {}", idVersion, actualVersion); failed = true; } } if (failed) { - System.out.println("All versions:"); + StringBuilder sb = new StringBuilder(); for (int i = 0; i < idVersions.length; i++) { - System.out.println("i=" + i + " " + idVersions[i]); + sb.append("i=").append(i).append(" ").append(idVersions[i]).append(System.lineSeparator()); } + logger.error("All versions: {}", sb); fail("wrong versions for some IDs"); } } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java index 67f7d6ff0daf..592fb362bc29 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java @@ -316,10 +316,9 @@ public class RandomScoreFunctionTests extends ESIntegTestCase { } } - System.out.println(); - System.out.println("max repeat: " + maxRepeat); - System.out.println("avg repeat: " + sumRepeat / (double) filled); - System.out.println("distribution: " + filled / (double) count); + logger.info("max repeat: {}", maxRepeat); + logger.info("avg repeat: {}", sumRepeat / (double) filled); + logger.info("distribution: {}", filled / (double) count); int percentile50 = filled / 2; int percentile25 = (filled / 4); @@ -333,18 +332,18 @@ public class RandomScoreFunctionTests extends ESIntegTestCase { } sum += i * matrix[i]; if (percentile50 == 0) { - System.out.println("median: " + i); + logger.info("median: {}", i); } else if (percentile25 == 0) { - System.out.println("percentile_25: " + i); + logger.info("percentile_25: {}", i); } else if (percentile75 == 0) { - System.out.println("percentile_75: " + i); + logger.info("percentile_75: {}", i); } percentile50--; percentile25--; percentile75--; } - System.out.println("mean: " + sum / (double) count); + logger.info("mean: {}", sum / (double) count); } } diff --git a/plugins/delete-by-query/src/test/java/org/elasticsearch/plugin/deletebyquery/DeleteByQueryTests.java b/plugins/delete-by-query/src/test/java/org/elasticsearch/plugin/deletebyquery/DeleteByQueryTests.java index 232b056535c5..3c595b1ab16f 100644 --- a/plugins/delete-by-query/src/test/java/org/elasticsearch/plugin/deletebyquery/DeleteByQueryTests.java +++ b/plugins/delete-by-query/src/test/java/org/elasticsearch/plugin/deletebyquery/DeleteByQueryTests.java @@ -276,7 +276,7 @@ public class DeleteByQueryTests extends ESIntegTestCase { assertSearchContextsClosed(); } - public void testConcurrentDeleteByQueriesOnDifferentDocs() throws Exception { + public void testConcurrentDeleteByQueriesOnDifferentDocs() throws Throwable { createIndex("test"); ensureGreen(); @@ -324,18 +324,17 @@ public class DeleteByQueryTests extends ESIntegTestCase { Throwable assertionError = exceptionHolder.get(); if (assertionError != null) { - assertionError.printStackTrace(); + throw assertionError; } - assertThat(assertionError + " should be null", assertionError, nullValue()); - refresh(); + refresh(); for (int i = 0; i < threads.length; i++) { assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.termQuery("field", i)).get(), 0); } assertSearchContextsClosed(); } - public void testConcurrentDeleteByQueriesOnSameDocs() throws Exception { + public void testConcurrentDeleteByQueriesOnSameDocs() throws Throwable { assertAcked(prepareCreate("test").setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1))); ensureGreen(); @@ -386,9 +385,8 @@ public class DeleteByQueryTests extends ESIntegTestCase { Throwable assertionError = exceptionHolder.get(); if (assertionError != null) { - assertionError.printStackTrace(); + throw assertionError; } - assertThat(assertionError + " should be null", assertionError, nullValue()); assertHitCount(client().prepareSearch("test").setSize(0).get(), 0L); assertThat(deleted.get(), equalTo(docs)); assertSearchContextsClosed(); @@ -445,4 +443,4 @@ public class DeleteByQueryTests extends ESIntegTestCase { } }); } -} \ No newline at end of file +} diff --git a/test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/ESLoggerUsageChecker.java b/test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/ESLoggerUsageChecker.java index 57ec37cb695d..25d4052c162f 100644 --- a/test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/ESLoggerUsageChecker.java +++ b/test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/ESLoggerUsageChecker.java @@ -38,8 +38,6 @@ import org.objectweb.asm.tree.analysis.BasicInterpreter; import org.objectweb.asm.tree.analysis.BasicValue; import org.objectweb.asm.tree.analysis.Frame; -import java.io.File; -import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.file.FileVisitResult; @@ -59,6 +57,7 @@ public class ESLoggerUsageChecker { public static final List LOGGER_METHODS = Arrays.asList("trace", "debug", "info", "warn", "error"); public static final String IGNORE_CHECKS_ANNOTATION = "org.elasticsearch.common.SuppressLoggerChecks"; + @SuppressForbidden(reason = "command line tool") public static void main(String... args) throws Exception { System.out.println("checking for wrong usages of ESLogger..."); boolean[] wrongUsageFound = new boolean[1]; diff --git a/test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/SuppressForbidden.java b/test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/SuppressForbidden.java new file mode 100644 index 000000000000..995269e9f02c --- /dev/null +++ b/test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/SuppressForbidden.java @@ -0,0 +1,35 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test.loggerusage; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotation to suppress forbidden-apis errors inside a whole class, a method, or a field. + * Duplicated from core as main sources of logger-usage project have no dependencies on core + */ +@Retention(RetentionPolicy.CLASS) +@Target({ ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.METHOD, ElementType.TYPE }) +public @interface SuppressForbidden { + String reason(); +} From 52852bdf39243219e7a1e046f55679b874d9aed0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 15 Mar 2016 15:33:32 +0100 Subject: [PATCH 251/320] Fix a potential parsing problem in GeoDistanceSortParser Test revealed a potential problem in the current GeoDistanceSortParser. For an input like `{ [...], "coerce" = true, "ignore_malformed" = false } the parser will fail to parse the `ignore_malformed` boolean flag and will fall through to the last else-branch where the boolean flag will be parsed as geo-hash and `ignore_malformed` treated as field name. Adding fix and test that will fail with the old parser code. --- .../elasticsearch/search/sort/GeoDistanceSortParser.java | 7 +++++-- .../search/sort/GeoDistanceSortBuilderIT.java | 8 ++++++++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java index b9407b31bf6c..d1eabf89e459 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java +++ b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java @@ -111,8 +111,11 @@ public class GeoDistanceSortParser implements SortParser { if (coerce == true) { ignoreMalformed = true; } - } else if ("ignore_malformed".equals(currentName) && coerce == false) { - ignoreMalformed = parser.booleanValue(); + } else if ("ignore_malformed".equals(currentName)) { + boolean ignoreMalformedFlag = parser.booleanValue(); + if (coerce == false) { + ignoreMalformed = ignoreMalformedFlag; + } } else if ("sort_mode".equals(currentName) || "sortMode".equals(currentName) || "mode".equals(currentName)) { sortMode = MultiValueMode.fromString(parser.text()); } else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) { diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java index ac9270cbe210..e7f9b167999a 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java @@ -263,6 +263,14 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort("location", 2.0, 2.0) .unit(DistanceUnit.KILOMETERS).geoDistance(GeoDistance.PLANE))).execute().actionGet(); checkCorrectSortOrderForGeoSort(searchResponse); + + searchResponse = client() + .prepareSearch() + .setSource( + new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort("location", 2.0, 2.0) + .unit(DistanceUnit.KILOMETERS).geoDistance(GeoDistance.PLANE) + .ignoreMalformed(true).coerce(true))).execute().actionGet(); + checkCorrectSortOrderForGeoSort(searchResponse); } private static void checkCorrectSortOrderForGeoSort(SearchResponse searchResponse) { From d1b85f69ef469812693b26393c1519618793cf2f Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 15 Mar 2016 11:22:53 -0400 Subject: [PATCH 252/320] Shorter name for test class This commit renames the ElasticsearchCommandLineParsingTests to ElasticsearchCliTests. --- ...hCommandLineParsingTests.java => ElasticsearchCliTests.java} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename core/src/test/java/org/elasticsearch/bootstrap/{ElasticsearchCommandLineParsingTests.java => ElasticsearchCliTests.java} (99%) diff --git a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCommandLineParsingTests.java b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java similarity index 99% rename from core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCommandLineParsingTests.java rename to core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java index 62603412a2c9..fe1eeb8b5a6c 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCommandLineParsingTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java @@ -40,7 +40,7 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.hasEntry; -public class ElasticsearchCommandLineParsingTests extends ESTestCase { +public class ElasticsearchCliTests extends ESTestCase { public void testVersion() throws Exception { runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "-d"); From 432f0cc193c3347e1e7e43222de2a85dd7101103 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Tue, 15 Mar 2016 19:03:18 +0100 Subject: [PATCH 253/320] Docs: Added the ingest node to the modules/nodes page Closes #17113 --- docs/reference/ingest.asciidoc | 6 +++--- docs/reference/ingest/ingest-node.asciidoc | 2 +- docs/reference/modules/node.asciidoc | 9 +++++++++ 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/docs/reference/ingest.asciidoc b/docs/reference/ingest.asciidoc index b2486c7f12b2..c565f3b2047e 100644 --- a/docs/reference/ingest.asciidoc +++ b/docs/reference/ingest.asciidoc @@ -15,10 +15,10 @@ on all nodes. To disable ingest on a node, configure the following setting in th node.ingest: false -------------------------------------------------- -To pre-process documents before indexing, you <> that specifies +To pre-process documents before indexing, you <> that specifies a series of <>. Each processor transforms the document in some way. For example, you may have a pipeline that consists of one processor that removes a field from -the document followed by another processor that renames a field. +the document followed by another processor that renames a field. To use a pipeline, you simply specify the `pipeline` parameter on an index or bulk request to tell the ingest node which pipeline to use. For example: @@ -32,7 +32,7 @@ PUT /my-index/my-type/my-id?pipeline=my_pipeline_id -------------------------------------------------- // AUTOSENSE -See <> for more information about creating, adding, and deleting pipelines. +See <> for more information about creating, adding, and deleting pipelines. -- diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index b314495b34ae..10b640dbaf13 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -1,4 +1,4 @@ -[[pipe-line]] +[[pipeline]] == Pipeline Definition A pipeline is a definition of a series of <> that are to be executed diff --git a/docs/reference/modules/node.asciidoc b/docs/reference/modules/node.asciidoc index 0117d193043e..799fdcab2f9e 100644 --- a/docs/reference/modules/node.asciidoc +++ b/docs/reference/modules/node.asciidoc @@ -40,11 +40,20 @@ A tribe node, configured via the `tribe.*` settings, is a special type of client node that can connect to multiple clusters and perform search and other operations across all connected clusters. +<>:: + +A node that has `node.ingest` set to `true` (default). Ingest nodes are able +to apply an <> to a document in order to transform +and enrich the document before indexing. With a heavy ingest load, it makes +sense to use dedicated ingest nodes and to mark the master and data nodes as +`node.ingest: false`. + By default a node is both a master-eligible node and a data node. This is very convenient for small clusters but, as the cluster grows, it becomes important to consider separating dedicated master-eligible nodes from dedicated data nodes. + [NOTE] [[coordinating-node]] .Coordinating node From 5994e91b084c99fea415713133adeed13d865cad Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 15 Mar 2016 15:47:27 -0400 Subject: [PATCH 254/320] Fix systemd pidfile setting This commit fixes the pidfile setting on systems that used systemd. The issue is that the pidfile can only be set via the command line arguments -p or --pidfile, and is no longer settable via a setting. --- distribution/src/main/packaging/systemd/elasticsearch.service | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distribution/src/main/packaging/systemd/elasticsearch.service b/distribution/src/main/packaging/systemd/elasticsearch.service index 4f643f6a4a4a..6aa6efeadde9 100644 --- a/distribution/src/main/packaging/systemd/elasticsearch.service +++ b/distribution/src/main/packaging/systemd/elasticsearch.service @@ -20,7 +20,7 @@ Group=elasticsearch ExecStartPre=/usr/share/elasticsearch/bin/elasticsearch-systemd-pre-exec ExecStart=/usr/share/elasticsearch/bin/elasticsearch \ - -Ees.pidfile=${PID_DIR}/elasticsearch.pid \ + -p ${PID_DIR}/elasticsearch.pid \ -Ees.default.path.home=${ES_HOME} \ -Ees.default.path.logs=${LOG_DIR} \ -Ees.default.path.data=${DATA_DIR} \ From 4ee90db13dc54744a9eb7b5d56df2ea4238014d8 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 15 Mar 2016 16:29:29 -0400 Subject: [PATCH 255/320] Remove path.home command-line setting --- .../elasticsearch/bootstrap/Bootstrap.java | 6 ++--- .../bootstrap/Elasticsearch.java | 12 ++++------ .../elasticsearch/bootstrap/security.policy | 3 --- .../bootstrap/ElasticsearchCliTests.java | 24 ++++++++----------- .../src/main/packaging/init.d/elasticsearch | 2 +- .../packaging/systemd/elasticsearch.service | 1 - .../src/main/resources/bin/elasticsearch | 8 +++---- .../src/main/resources/bin/service.bat | 2 +- 8 files changed, 22 insertions(+), 36 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index a89bee0f4ac9..2a8984e59d4c 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -188,10 +188,9 @@ final class Bootstrap { node = new Node(nodeSettings); } - private static Environment initialSettings(boolean foreground, String pathHome, String pidFile) { + private static Environment initialSettings(boolean foreground, String pidFile) { Terminal terminal = foreground ? Terminal.DEFAULT : null; Settings.Builder builder = Settings.builder(); - builder.put(Environment.PATH_HOME_SETTING.getKey(), pathHome); if (Strings.hasLength(pidFile)) { builder.put(Environment.PIDFILE_SETTING.getKey(), pidFile); } @@ -224,7 +223,6 @@ final class Bootstrap { */ static void init( final boolean foreground, - final String pathHome, final String pidFile, final Map esSettings) throws Throwable { // Set the system property before anything has a chance to trigger its use @@ -234,7 +232,7 @@ final class Bootstrap { INSTANCE = new Bootstrap(); - Environment environment = initialSettings(foreground, pathHome, pidFile); + Environment environment = initialSettings(foreground, pidFile); Settings settings = environment.settings(); LogConfigurator.configure(settings, true); checkForCustomConfFile(); diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index d15f72e86552..0cc952907c03 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -41,7 +41,6 @@ class Elasticsearch extends Command { private final OptionSpec versionOption; private final OptionSpec daemonizeOption; - private final OptionSpec pathHomeOption; private final OptionSpec pidfileOption; private final OptionSpec propertyOption; @@ -54,8 +53,6 @@ class Elasticsearch extends Command { daemonizeOption = parser.acceptsAll(Arrays.asList("d", "daemonize"), "Starts Elasticsearch in the background"); // TODO: in jopt-simple 5.0 this option type can be a Path - pathHomeOption = parser.acceptsAll(Arrays.asList("H", "path.home"), "").withRequiredArg(); - // TODO: in jopt-simple 5.0 this option type can be a Path pidfileOption = parser.acceptsAll(Arrays.asList("p", "pidfile"), "Creates a pid file in the specified path on start") .withRequiredArg(); @@ -80,7 +77,7 @@ class Elasticsearch extends Command { @Override protected void execute(Terminal terminal, OptionSet options) throws Exception { if (options.has(versionOption)) { - if (options.has(daemonizeOption) || options.has(pathHomeOption) || options.has(pidfileOption)) { + if (options.has(daemonizeOption) || options.has(pidfileOption)) { throw new UserError(ExitCodes.USAGE, "Elasticsearch version option is mutually exclusive with any other option"); } terminal.println("Version: " + org.elasticsearch.Version.CURRENT @@ -90,7 +87,6 @@ class Elasticsearch extends Command { } final boolean daemonize = options.has(daemonizeOption); - final String pathHome = pathHomeOption.value(options); final String pidFile = pidfileOption.value(options); final Map esSettings = new HashMap<>(); @@ -104,12 +100,12 @@ class Elasticsearch extends Command { esSettings.put(kvp.key, kvp.value); } - init(daemonize, pathHome, pidFile, esSettings); + init(daemonize, pidFile, esSettings); } - void init(final boolean daemonize, final String pathHome, final String pidFile, final Map esSettings) { + void init(final boolean daemonize, final String pidFile, final Map esSettings) { try { - Bootstrap.init(!daemonize, pathHome, pidFile, esSettings); + Bootstrap.init(!daemonize, pidFile, esSettings); } catch (final Throwable t) { // format exceptions to the console in a special way // to avoid 2MB stacktraces from guice, etc. diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 4909959015b0..10bf8b2a1588 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -72,9 +72,6 @@ grant { // set by ESTestCase to improve test reproducibility // TODO: set this with gradle or some other way that repros with seed? permission java.util.PropertyPermission "es.processors.override", "write"; - // set by CLIToolTestCase - // TODO: do this differently? or test commandline tools differently? - permission java.util.PropertyPermission "es.default.path.home", "write"; // TODO: these simply trigger a noisy warning if its unable to clear the properties // fix that in randomizedtesting diff --git a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java index fe1eeb8b5a6c..51274af9a016 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java @@ -45,14 +45,10 @@ public class ElasticsearchCliTests extends ESTestCase { public void testVersion() throws Exception { runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "-d"); runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "--daemonize"); - runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "-H", "/tmp/home"); - runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "--path.home", "/tmp/home"); runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "-p", "/tmp/pid"); runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "--pidfile", "/tmp/pid"); runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "-d"); runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "--daemonize"); - runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "-H", "/tmp/home"); - runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "--path.home", "/tmp/home"); runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "-p", "/tmp/pid"); runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "--pidfile", "/tmp/pid"); runTestThatVersionIsReturned("-V"); @@ -77,7 +73,7 @@ public class ElasticsearchCliTests extends ESTestCase { } private void runTestVersion(int expectedStatus, Consumer outputConsumer, String... args) throws Exception { - runTest(expectedStatus, false, outputConsumer, (foreground, pathHome, pidFile, esSettings) -> {}, args); + runTest(expectedStatus, false, outputConsumer, (foreground, pidFile, esSettings) -> {}, args); } public void testThatPidFileCanBeConfigured() throws Exception { @@ -92,7 +88,7 @@ public class ElasticsearchCliTests extends ESTestCase { expectedStatus, expectedInit, outputConsumer, - (foreground, pathHome, pidFile, esSettings) -> assertThat(pidFile, equalTo("/tmp/pid")), + (foreground, pidFile, esSettings) -> assertThat(pidFile, equalTo("/tmp/pid")), args); } @@ -107,7 +103,7 @@ public class ElasticsearchCliTests extends ESTestCase { ExitCodes.OK, true, output -> {}, - (foreground, pathHome, pidFile, esSettings) -> assertThat(foreground, equalTo(!expectedDaemonize)), + (foreground, pidFile, esSettings) -> assertThat(foreground, equalTo(!expectedDaemonize)), args); } @@ -116,7 +112,7 @@ public class ElasticsearchCliTests extends ESTestCase { ExitCodes.OK, true, output -> {}, - (foreground, pathHome, pidFile, esSettings) -> { + (foreground, pidFile, esSettings) -> { assertThat(esSettings.size(), equalTo(2)); assertThat(esSettings, hasEntry("es.foo", "bar")); assertThat(esSettings, hasEntry("es.baz", "qux")); @@ -136,7 +132,7 @@ public class ElasticsearchCliTests extends ESTestCase { ExitCodes.USAGE, false, output -> assertThat(output, containsString("Elasticsearch settings must be prefixed with [es.] but was [")), - (foreground, pathHome, pidFile, esSettings) -> {}, + (foreground, pidFile, esSettings) -> {}, args ); } @@ -146,7 +142,7 @@ public class ElasticsearchCliTests extends ESTestCase { ExitCodes.USAGE, false, output -> assertThat(output, containsString("Elasticsearch setting [es.foo] must not be empty")), - (foreground, pathHome, pidFile, esSettings) -> {}, + (foreground, pidFile, esSettings) -> {}, "-E", "es.foo=" ); } @@ -156,12 +152,12 @@ public class ElasticsearchCliTests extends ESTestCase { ExitCodes.USAGE, false, output -> assertThat(output, containsString("network.host is not a recognized option")), - (foreground, pathHome, pidFile, esSettings) -> {}, + (foreground, pidFile, esSettings) -> {}, "--network.host"); } private interface InitConsumer { - void accept(final boolean foreground, final String pathHome, final String pidFile, final Map esSettings); + void accept(final boolean foreground, final String pidFile, final Map esSettings); } private void runTest( @@ -175,9 +171,9 @@ public class ElasticsearchCliTests extends ESTestCase { final AtomicBoolean init = new AtomicBoolean(); final int status = Elasticsearch.main(args, new Elasticsearch() { @Override - void init(final boolean daemonize, final String pathHome, final String pidFile, final Map esSettings) { + void init(final boolean daemonize, final String pidFile, final Map esSettings) { init.set(true); - initConsumer.accept(!daemonize, pathHome, pidFile, esSettings); + initConsumer.accept(!daemonize, pidFile, esSettings); } }, terminal); assertThat(status, equalTo(expectedStatus)); diff --git a/distribution/deb/src/main/packaging/init.d/elasticsearch b/distribution/deb/src/main/packaging/init.d/elasticsearch index e2d857a7ffe3..1476a520c1d5 100755 --- a/distribution/deb/src/main/packaging/init.d/elasticsearch +++ b/distribution/deb/src/main/packaging/init.d/elasticsearch @@ -99,7 +99,7 @@ fi # Define other required variables PID_FILE="$PID_DIR/$NAME.pid" DAEMON=$ES_HOME/bin/elasticsearch -DAEMON_OPTS="-d -p $PID_FILE -Ees.default.path.home=$ES_HOME -Ees.default.path.logs=$LOG_DIR -Ees.default.path.data=$DATA_DIR -Ees.default.path.conf=$CONF_DIR" +DAEMON_OPTS="-d -p $PID_FILE -Ees.default.path.logs=$LOG_DIR -Ees.default.path.data=$DATA_DIR -Ees.default.path.conf=$CONF_DIR" export ES_HEAP_SIZE export ES_HEAP_NEWSIZE diff --git a/distribution/src/main/packaging/systemd/elasticsearch.service b/distribution/src/main/packaging/systemd/elasticsearch.service index 6aa6efeadde9..1aed30ac9689 100644 --- a/distribution/src/main/packaging/systemd/elasticsearch.service +++ b/distribution/src/main/packaging/systemd/elasticsearch.service @@ -21,7 +21,6 @@ ExecStartPre=/usr/share/elasticsearch/bin/elasticsearch-systemd-pre-exec ExecStart=/usr/share/elasticsearch/bin/elasticsearch \ -p ${PID_DIR}/elasticsearch.pid \ - -Ees.default.path.home=${ES_HOME} \ -Ees.default.path.logs=${LOG_DIR} \ -Ees.default.path.data=${DATA_DIR} \ -Ees.default.path.conf=${CONF_DIR} diff --git a/distribution/src/main/resources/bin/elasticsearch b/distribution/src/main/resources/bin/elasticsearch index 0d0e0069ae2a..253ee1ee1f53 100755 --- a/distribution/src/main/resources/bin/elasticsearch +++ b/distribution/src/main/resources/bin/elasticsearch @@ -126,11 +126,11 @@ export HOSTNAME # manual parsing to find out, if process should be detached daemonized=`echo $* | egrep -- '(^-d |-d$| -d |--daemonize$|--daemonize )'` if [ -z "$daemonized" ] ; then - exec "$JAVA" $JAVA_OPTS $ES_JAVA_OPTS -cp "$ES_CLASSPATH" \ - org.elasticsearch.bootstrap.Elasticsearch --path.home "$ES_HOME" "$@" + exec "$JAVA" $JAVA_OPTS $ES_JAVA_OPTS -Des.path.home="$ES_HOME" -cp "$ES_CLASSPATH" \ + org.elasticsearch.bootstrap.Elasticsearch "$@" else - exec "$JAVA" $JAVA_OPTS $ES_JAVA_OPTS -cp "$ES_CLASSPATH" \ - org.elasticsearch.bootstrap.Elasticsearch --path.home "$ES_HOME" "$@" <&- & + exec "$JAVA" $JAVA_OPTS $ES_JAVA_OPTS -Des.path.home="$ES_HOME" -cp "$ES_CLASSPATH" \ + org.elasticsearch.bootstrap.Elasticsearch "$@" <&- & retval=$? pid=$! [ $retval -eq 0 ] || exit $retval diff --git a/distribution/src/main/resources/bin/service.bat b/distribution/src/main/resources/bin/service.bat index 22242e36ff93..2786c87a6340 100644 --- a/distribution/src/main/resources/bin/service.bat +++ b/distribution/src/main/resources/bin/service.bat @@ -152,7 +152,7 @@ if "%DATA_DIR%" == "" set DATA_DIR=%ES_HOME%\data if "%CONF_DIR%" == "" set CONF_DIR=%ES_HOME%\config -set ES_PARAMS=-Delasticsearch;-Des.path.home="%ES_HOME%";-Des.default.path.home="%ES_HOME%";-Des.default.path.logs="%LOG_DIR%";-Des.default.path.data="%DATA_DIR%";-Des.default.path.conf="%CONF_DIR%" +set ES_PARAMS=-Delasticsearch;-Des.path.home="%ES_HOME%";-Des.default.path.logs="%LOG_DIR%";-Des.default.path.data="%DATA_DIR%";-Des.default.path.conf="%CONF_DIR%" set JVM_OPTS=%JAVA_OPTS: =;% From 66ba044ec569f76b6e2b03bfe7ba0912c44707df Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 15 Mar 2016 17:45:17 -0400 Subject: [PATCH 256/320] Use setting in integration test cluster config --- .../elasticsearch/gradle/test/ClusterConfiguration.groovy | 7 ------- .../groovy/org/elasticsearch/gradle/test/NodeInfo.groovy | 1 - modules/lang-groovy/build.gradle | 4 ++-- modules/lang-mustache/build.gradle | 4 ++-- plugins/lang-javascript/build.gradle | 4 ++-- plugins/lang-python/build.gradle | 4 ++-- qa/smoke-test-ingest-disabled/build.gradle | 2 +- qa/smoke-test-reindex-with-groovy/build.gradle | 2 +- 8 files changed, 10 insertions(+), 18 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index 2adc59e9e9dd..3e8b62253294 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -73,8 +73,6 @@ class ClusterConfiguration { return tmpFile.exists() } - Map esSettings = new HashMap<>(); - Map systemProperties = new HashMap<>() Map settings = new HashMap<>() @@ -88,11 +86,6 @@ class ClusterConfiguration { LinkedHashMap setupCommands = new LinkedHashMap<>() - @Input - void esSetting(String setting, String value) { - esSettings.put(setting, value); - } - @Input void systemProperty(String property, String value) { systemProperties.put(property, value) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index 168a67a47286..ebeb3d538951 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -130,7 +130,6 @@ class NodeInfo { 'ES_GC_OPTS': config.jvmArgs // we pass these with the undocumented gc opts so the argline can set gc, etc ] args.addAll("-E", "es.node.portsfile=true") - args.addAll(config.esSettings.collectMany { key, value -> ["-E", "${key}=${value}" ] }) env.put('ES_JAVA_OPTS', config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ")) for (Map.Entry property : System.properties.entrySet()) { if (property.getKey().startsWith('es.')) { diff --git a/modules/lang-groovy/build.gradle b/modules/lang-groovy/build.gradle index 340dd620ca64..884fe8b65baf 100644 --- a/modules/lang-groovy/build.gradle +++ b/modules/lang-groovy/build.gradle @@ -28,8 +28,8 @@ dependencies { integTest { cluster { - esSetting 'es.script.inline', 'true' - esSetting 'es.script.indexed', 'true' + setting 'script.inline', 'true' + setting 'script.indexed', 'true' } } diff --git a/modules/lang-mustache/build.gradle b/modules/lang-mustache/build.gradle index 36b58792d86c..8eed31dd6684 100644 --- a/modules/lang-mustache/build.gradle +++ b/modules/lang-mustache/build.gradle @@ -28,7 +28,7 @@ dependencies { integTest { cluster { - esSetting 'es.script.inline', 'true' - esSetting 'es.script.indexed', 'true' + setting 'script.inline', 'true' + setting 'script.indexed', 'true' } } diff --git a/plugins/lang-javascript/build.gradle b/plugins/lang-javascript/build.gradle index 41d858243189..1f4312418386 100644 --- a/plugins/lang-javascript/build.gradle +++ b/plugins/lang-javascript/build.gradle @@ -28,7 +28,7 @@ dependencies { integTest { cluster { - esSetting 'es.script.inline', 'true' - esSetting 'es.script.indexed', 'true' + setting 'script.inline', 'true' + setting 'script.indexed', 'true' } } diff --git a/plugins/lang-python/build.gradle b/plugins/lang-python/build.gradle index bc9db2a20c25..c7466316806c 100644 --- a/plugins/lang-python/build.gradle +++ b/plugins/lang-python/build.gradle @@ -28,8 +28,8 @@ dependencies { integTest { cluster { - esSetting 'es.script.inline', 'true' - esSetting 'es.script.indexed', 'true' + setting 'script.inline', 'true' + setting 'script.indexed', 'true' } } diff --git a/qa/smoke-test-ingest-disabled/build.gradle b/qa/smoke-test-ingest-disabled/build.gradle index f8ebd6317869..09b2d1409a15 100644 --- a/qa/smoke-test-ingest-disabled/build.gradle +++ b/qa/smoke-test-ingest-disabled/build.gradle @@ -21,6 +21,6 @@ apply plugin: 'elasticsearch.rest-test' integTest { cluster { - esSetting 'es.node.ingest', 'false' + setting 'node.ingest', 'false' } } diff --git a/qa/smoke-test-reindex-with-groovy/build.gradle b/qa/smoke-test-reindex-with-groovy/build.gradle index 749f5c1237c6..c4b462ce45a9 100644 --- a/qa/smoke-test-reindex-with-groovy/build.gradle +++ b/qa/smoke-test-reindex-with-groovy/build.gradle @@ -21,6 +21,6 @@ apply plugin: 'elasticsearch.rest-test' integTest { cluster { - esSetting 'es.script.inline', 'true' + setting 'script.inline', 'true' } } From 0f00c14afc8428a2a72c0b766d2171029dc8f6e1 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 15 Mar 2016 21:54:25 -0400 Subject: [PATCH 257/320] Remove dead code in FTL#simpleMatchToFullName This commit removes some dead code that resulted from removing the ability for a field to have different names (after enforcing that fields have the same full and index name). Closes #17127 --- .../java/org/elasticsearch/index/mapper/FieldTypeLookup.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java index 5e9378e2f55e..5f6fddf09ef6 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java @@ -154,8 +154,6 @@ class FieldTypeLookup implements Iterable { for (MappedFieldType fieldType : this) { if (Regex.simpleMatch(pattern, fieldType.name())) { fields.add(fieldType.name()); - } else if (Regex.simpleMatch(pattern, fieldType.name())) { - fields.add(fieldType.name()); } } return fields; From a90a2b34fc6fd135b23f6518156078f03804b7ae Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 15 Mar 2016 19:10:14 -0700 Subject: [PATCH 258/320] Build: Switch to maven-publish plugin The build currently uses the old maven support in gradle. This commit switches to use the newer maven-publish plugin. This will allow future changes, for example, easily publishing to artifactory. An additional part of this change makes publishing of build-tools part of the normal publishing, instead of requiring a separate upload step from within buildSrc. That also sets us up for a follow up to enable precomit checks on the buildSrc code itself. --- build.gradle | 80 +++++---- buildSrc/.gitignore | 1 + buildSrc/build.gradle | 163 +++++++++++------- .../elasticsearch/gradle/BuildPlugin.groovy | 100 ++++++----- .../gradle/plugin/PluginBuildPlugin.groovy | 37 ++++ .../plugin/PluginPropertiesExtension.groovy | 4 + core/build.gradle | 13 +- distribution/build.gradle | 13 ++ distribution/deb/build.gradle | 9 +- distribution/integ-test-zip/build.gradle | 9 +- distribution/rpm/build.gradle | 9 +- distribution/tar/build.gradle | 9 +- distribution/zip/build.gradle | 9 +- modules/build.gradle | 4 - plugins/build.gradle | 2 + settings.gradle | 3 + test/build.gradle | 5 + 17 files changed, 312 insertions(+), 158 deletions(-) create mode 100644 buildSrc/.gitignore diff --git a/build.gradle b/build.gradle index 6ab00d73881e..8ffc90cd31b3 100644 --- a/build.gradle +++ b/build.gradle @@ -17,54 +17,61 @@ * under the License. */ -import com.bmuschko.gradle.nexus.NexusPlugin import org.gradle.plugins.ide.eclipse.model.SourceFolder // common maven publishing configuration subprojects { + if (path.startsWith(':x-plugins')) { + // don't try to configure publshing for extra plugins attached to this build + return + } group = 'org.elasticsearch' version = org.elasticsearch.gradle.VersionProperties.elasticsearch - plugins.withType(NexusPlugin).whenPluginAdded { - modifyPom { - project { - url 'https://github.com/elastic/elasticsearch' - inceptionYear '2009' + plugins.withType(MavenPublishPlugin).whenPluginAdded { + publishing { + publications { + // add license information to generated poms + all { + pom.withXml { XmlProvider xml -> + Node node = xml.asNode() + node.appendNode('inceptionYear', '2009') - scm { - url 'https://github.com/elastic/elasticsearch' - connection 'scm:https://elastic@github.com/elastic/elasticsearch' - developerConnection 'scm:git://github.com/elastic/elasticsearch.git' - } - - licenses { - license { - name 'The Apache Software License, Version 2.0' - url 'http://www.apache.org/licenses/LICENSE-2.0.txt' - distribution 'repo' + Node license = node.appendNode('licenses').appendNode('license') + license.appendNode('name', 'The Apache Software License, Version 2.0') + license.appendNode('url', 'http://www.apache.org/licenses/LICENSE-2.0.txt') + license.appendNode('distribution', 'repo') } } } - } - extraArchive { - javadoc = true - tests = false - } - // we have our own username/password prompts so that they only happen once - // TODO: add gpg signing prompts - project.gradle.taskGraph.whenReady { taskGraph -> - if (taskGraph.allTasks.any { it.name == 'uploadArchives' }) { - Console console = System.console() - if (project.hasProperty('nexusUsername') == false) { - String nexusUsername = console.readLine('\nNexus username: ') - project.rootProject.allprojects.each { - it.ext.nexusUsername = nexusUsername - } + repositories.maven { + name 'sonatype' + if (version.endsWith('-SNAPSHOT')) { + url 'https://oss.sonatype.org/content/repositories/snapshots/' + } else { + url 'https://oss.sonatype.org/service/local/staging/deploy/maven2/' } - if (project.hasProperty('nexusPassword') == false) { - String nexusPassword = new String(console.readPassword('\nNexus password: ')) - project.rootProject.allprojects.each { - it.ext.nexusPassword = nexusPassword + + // It would be nice to pass a custom impl of PasswordCredentials + // that could lazily read username/password from the console if not + // present as properties. However, gradle's credential handling is + // completely broken for custom impls. It checks that the class + // passed in is exactly PasswordCredentials or AwsCredentials. + // So instead, we must rely on heuristics of "are we publishing" + // by inspecting the command line, stash the credentials + // once read in the root project, and set them on each project + if (gradle.startParameter.taskNames.contains('publish')) { + Console console = System.console() + if (project.rootProject.hasProperty('nexusUsername') == false) { + project.rootProject.ext.nexusUsername = console.readLine('\nNexus username: ') + } + if (project.rootProject.hasProperty('nexusPassword') == false) { + project.rootProject.ext.nexusPassword = new String(console.readPassword("\nNexus password: ")) + } + + credentials { + username = project.rootProject.nexusUsername + password = project.rootProject.nexusPassword } } } @@ -72,6 +79,7 @@ subprojects { } } + allprojects { // injecting groovy property variables into all projects project.ext { diff --git a/buildSrc/.gitignore b/buildSrc/.gitignore new file mode 100644 index 000000000000..bfdaf60b97ed --- /dev/null +++ b/buildSrc/.gitignore @@ -0,0 +1 @@ +build-bootstrap/ diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index f8d806795049..30053db683e5 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -1,5 +1,3 @@ -import java.nio.file.Files - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -19,25 +17,21 @@ import java.nio.file.Files * under the License. */ -// we must use buildscript + apply so that an external plugin -// can apply this file, since the plugins directive is not -// supported through file includes -buildscript { - repositories { - jcenter() - } - dependencies { - classpath 'com.bmuschko:gradle-nexus-plugin:2.3.1' - } -} +import java.nio.file.Files + apply plugin: 'groovy' -apply plugin: 'com.bmuschko.nexus' -// TODO: move common IDE configuration to a common file to include -apply plugin: 'idea' -apply plugin: 'eclipse' group = 'org.elasticsearch.gradle' -archivesBaseName = 'build-tools' + +if (project == rootProject) { + // change the build dir used during build init, so that doing a clean + // won't wipe out the buildscript jar + buildDir = 'build-bootstrap' +} + +/***************************************************************************** + * Propagating version.properties to the rest of the build * + *****************************************************************************/ Properties props = new Properties() props.load(project.file('version.properties').newDataInputStream()) @@ -51,32 +45,6 @@ if (snapshot) { props.put("elasticsearch", version); } - -repositories { - mavenCentral() - maven { - name 'sonatype-snapshots' - url "https://oss.sonatype.org/content/repositories/snapshots/" - } - jcenter() -} - -dependencies { - compile gradleApi() - compile localGroovy() - compile "com.carrotsearch.randomizedtesting:junit4-ant:${props.getProperty('randomizedrunner')}" - compile("junit:junit:${props.getProperty('junit')}") { - transitive = false - } - compile 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3' - compile 'com.netflix.nebula:gradle-info-plugin:3.0.3' - compile 'org.eclipse.jgit:org.eclipse.jgit:3.2.0.201312181205-r' - compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE.... - compile 'de.thetaphi:forbiddenapis:2.0' - compile 'com.bmuschko:gradle-nexus-plugin:2.3.1' - compile 'org.apache.rat:apache-rat:0.11' -} - File tempPropertiesFile = new File(project.buildDir, "version.properties") task writeVersionProperties { inputs.properties(props) @@ -95,31 +63,92 @@ processResources { from tempPropertiesFile } -extraArchive { - javadoc = false - tests = false +/***************************************************************************** + * Dependencies used by the entire build * + *****************************************************************************/ + +repositories { + jcenter() } -idea { - module { - inheritOutputDirs = false - outputDir = file('build-idea/classes/main') - testOutputDir = file('build-idea/classes/test') +dependencies { + compile gradleApi() + compile localGroovy() + compile "com.carrotsearch.randomizedtesting:junit4-ant:${props.getProperty('randomizedrunner')}" + compile("junit:junit:${props.getProperty('junit')}") { + transitive = false + } + compile 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3' + compile 'com.netflix.nebula:nebula-publishing-plugin:4.4.4' + compile 'com.netflix.nebula:gradle-info-plugin:3.0.3' + compile 'org.eclipse.jgit:org.eclipse.jgit:3.2.0.201312181205-r' + compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE.... + compile 'de.thetaphi:forbiddenapis:2.0' + compile 'com.bmuschko:gradle-nexus-plugin:2.3.1' + compile 'org.apache.rat:apache-rat:0.11' +} + + +/***************************************************************************** + * Bootstrap repositories and IDE setup * + *****************************************************************************/ +// this will only happen when buildSrc is built on its own during build init +if (project == rootProject) { + + repositories { + mavenCentral() + maven { + name 'sonatype-snapshots' + url "https://oss.sonatype.org/content/repositories/snapshots/" + } + } + + apply plugin: 'idea' + apply plugin: 'eclipse' + + idea { + module { + inheritOutputDirs = false + outputDir = file('build-idea/classes/main') + testOutputDir = file('build-idea/classes/test') + } + } + + eclipse { + classpath { + defaultOutputDir = file('build-eclipse') + } + } + + task copyEclipseSettings(type: Copy) { + from project.file('src/main/resources/eclipse.settings') + into '.settings' + } + // otherwise .settings is not nuked entirely + tasks.cleanEclipse { + delete '.settings' + } + tasks.eclipse.dependsOn(cleanEclipse, copyEclipseSettings) +} + +/***************************************************************************** + * Normal project checks * + *****************************************************************************/ + +// this happens when included as a normal project in the build, which we do +// to enforce precommit checks like forbidden apis, as well as setup publishing +if (project != rootProject) { + apply plugin: 'nebula.maven-base-publish' + apply plugin: 'nebula.maven-scm' + apply plugin: 'nebula.source-jar' + apply plugin: 'nebula.javadoc-jar' + + publishing { + publications { + nebula { + artifactId 'build-tools' + } + } } } -eclipse { - classpath { - defaultOutputDir = file('build-eclipse') - } -} - -task copyEclipseSettings(type: Copy) { - from project.file('src/main/resources/eclipse.settings') - into '.settings' -} -// otherwise .settings is not nuked entirely -tasks.cleanEclipse { - delete '.settings' -} -tasks.eclipse.dependsOn(cleanEclipse, copyEclipseSettings) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index ca78157bcf2e..a167fb0c1a97 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -32,7 +32,8 @@ import org.gradle.api.artifacts.ModuleVersionIdentifier import org.gradle.api.artifacts.ProjectDependency import org.gradle.api.artifacts.ResolvedArtifact import org.gradle.api.artifacts.dsl.RepositoryHandler -import org.gradle.api.artifacts.maven.MavenPom +import org.gradle.api.publish.maven.MavenPublication +import org.gradle.api.publish.maven.plugins.MavenPublishPlugin import org.gradle.api.tasks.bundling.Jar import org.gradle.api.tasks.compile.JavaCompile import org.gradle.internal.jvm.Jvm @@ -60,7 +61,6 @@ class BuildPlugin implements Plugin { project.pluginManager.apply('nebula.info-java') project.pluginManager.apply('nebula.info-scm') project.pluginManager.apply('nebula.info-jar') - project.pluginManager.apply('com.bmuschko.nexus') project.pluginManager.apply(ProvidedBasePlugin) globalBuildInfo(project) @@ -68,6 +68,7 @@ class BuildPlugin implements Plugin { configureConfigurations(project) project.ext.versions = VersionProperties.versions configureCompile(project) + configurePublishing(project) configureTest(project) configurePrecommit(project) @@ -260,48 +261,6 @@ class BuildPlugin implements Plugin { project.configurations.compile.dependencies.all(disableTransitiveDeps) project.configurations.testCompile.dependencies.all(disableTransitiveDeps) project.configurations.provided.dependencies.all(disableTransitiveDeps) - - // add exclusions to the pom directly, for each of the transitive deps of this project's deps - project.modifyPom { MavenPom pom -> - pom.withXml { XmlProvider xml -> - // first find if we have dependencies at all, and grab the node - NodeList depsNodes = xml.asNode().get('dependencies') - if (depsNodes.isEmpty()) { - return - } - - // check each dependency for any transitive deps - for (Node depNode : depsNodes.get(0).children()) { - String groupId = depNode.get('groupId').get(0).text() - String artifactId = depNode.get('artifactId').get(0).text() - String version = depNode.get('version').get(0).text() - - // collect the transitive deps now that we know what this dependency is - String depConfig = transitiveDepConfigName(groupId, artifactId, version) - Configuration configuration = project.configurations.findByName(depConfig) - if (configuration == null) { - continue // we did not make this dep non-transitive - } - Set artifacts = configuration.resolvedConfiguration.resolvedArtifacts - if (artifacts.size() <= 1) { - // this dep has no transitive deps (or the only artifact is itself) - continue - } - - // we now know we have something to exclude, so add the exclusion elements - Node exclusions = depNode.appendNode('exclusions') - for (ResolvedArtifact transitiveArtifact : artifacts) { - ModuleVersionIdentifier transitiveDep = transitiveArtifact.moduleVersion.id - if (transitiveDep.group == groupId && transitiveDep.name == artifactId) { - continue; // don't exclude the dependency itself! - } - Node exclusion = exclusions.appendNode('exclusion') - exclusion.appendNode('groupId', transitiveDep.group) - exclusion.appendNode('artifactId', transitiveDep.name) - } - } - } - } } /** Adds repositores used by ES dependencies */ @@ -375,6 +334,59 @@ class BuildPlugin implements Plugin { } } + /** + * Adds a hook to all publications that will effectively make the maven pom transitive dependency free. + */ + private static void configurePublishing(Project project) { + project.plugins.withType(MavenPublishPlugin.class).whenPluginAdded { + project.publishing { + publications { + all { MavenPublication publication -> // we only deal with maven + // add exclusions to the pom directly, for each of the transitive deps of this project's deps + publication.pom.withXml { XmlProvider xml -> + // first find if we have dependencies at all, and grab the node + NodeList depsNodes = xml.asNode().get('dependencies') + if (depsNodes.isEmpty()) { + return + } + + // check each dependency for any transitive deps + for (Node depNode : depsNodes.get(0).children()) { + String groupId = depNode.get('groupId').get(0).text() + String artifactId = depNode.get('artifactId').get(0).text() + String version = depNode.get('version').get(0).text() + + // collect the transitive deps now that we know what this dependency is + String depConfig = transitiveDepConfigName(groupId, artifactId, version) + Configuration configuration = project.configurations.findByName(depConfig) + if (configuration == null) { + continue // we did not make this dep non-transitive + } + Set artifacts = configuration.resolvedConfiguration.resolvedArtifacts + if (artifacts.size() <= 1) { + // this dep has no transitive deps (or the only artifact is itself) + continue + } + + // we now know we have something to exclude, so add the exclusion elements + Node exclusions = depNode.appendNode('exclusions') + for (ResolvedArtifact transitiveArtifact : artifacts) { + ModuleVersionIdentifier transitiveDep = transitiveArtifact.moduleVersion.id + if (transitiveDep.group == groupId && transitiveDep.name == artifactId) { + continue; // don't exclude the dependency itself! + } + Node exclusion = exclusions.appendNode('exclusion') + exclusion.appendNode('groupId', transitiveDep.group) + exclusion.appendNode('artifactId', transitiveDep.name) + } + } + } + } + } + } + } + } + /** Returns a closure of common configuration shared by unit and integration tests. */ static Closure commonTestConfig(Project project) { return { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index b04f959e0681..f1123678eae3 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -18,11 +18,14 @@ */ package org.elasticsearch.gradle.plugin +import nebula.plugin.publishing.maven.MavenManifestPlugin +import nebula.plugin.publishing.maven.MavenScmPlugin import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.test.RunTask import org.gradle.api.Project import org.gradle.api.artifacts.Dependency +import org.gradle.api.publish.maven.MavenPublication import org.gradle.api.tasks.SourceSet import org.gradle.api.tasks.bundling.Zip @@ -34,6 +37,7 @@ public class PluginBuildPlugin extends BuildPlugin { @Override public void apply(Project project) { super.apply(project) + configureDependencies(project) // this afterEvaluate must happen before the afterEvaluate added by integTest creation, // so that the file name resolution for installing the plugin will be setup @@ -50,6 +54,10 @@ public class PluginBuildPlugin extends BuildPlugin { } else { project.integTest.clusterConfig.plugin(name, project.bundlePlugin.outputs.files) project.tasks.run.clusterConfig.plugin(name, project.bundlePlugin.outputs.files) + + if (project.pluginProperties.extension.publish) { + configurePublishing(project) + } } project.namingConventions { @@ -59,6 +67,7 @@ public class PluginBuildPlugin extends BuildPlugin { } createIntegTestTask(project) createBundleTask(project) + configurePublishing(project) project.tasks.create('run', RunTask) // allow running ES with this plugin in the foreground of a build } @@ -125,4 +134,32 @@ public class PluginBuildPlugin extends BuildPlugin { project.configurations.getByName('default').extendsFrom = [] project.artifacts.add('default', bundle) } + + /** + * Adds the plugin jar and zip as publications. + */ + private static void configurePublishing(Project project) { + project.plugins.apply(MavenScmPlugin.class) + project.plugins.apply(MavenManifestPlugin.class) + + project.publishing { + publications { + nebula { + artifact project.bundlePlugin + pom.withXml { + // overwrite the name/description in the pom nebula set up + Node root = asNode() + for (Node node : root.children()) { + if (node.name() == 'name') { + node.setValue(name) + } else if (node.name() == 'description') { + node.setValue(project.pluginProperties.extension.description) + } + } + } + } + } + } + + } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy index 7b949b3e1da1..9429b3a4d00b 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy @@ -42,6 +42,10 @@ class PluginPropertiesExtension { @Input boolean isolated = true + /** Whether the plugin should be published to maven. */ + @Input + boolean publish = false + PluginPropertiesExtension(Project project) { name = project.name version = project.version diff --git a/core/build.gradle b/core/build.gradle index ab3754e72ff6..329f255688ad 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -22,10 +22,19 @@ import com.carrotsearch.gradle.junit4.RandomizedTestingTask import org.elasticsearch.gradle.BuildPlugin apply plugin: 'elasticsearch.build' -apply plugin: 'com.bmuschko.nexus' apply plugin: 'nebula.optional-base' +apply plugin: 'nebula.maven-base-publish' +apply plugin: 'nebula.maven-scm' +apply plugin: 'nebula.source-jar' +apply plugin: 'nebula.javadoc-jar' -archivesBaseName = 'elasticsearch' +publishing { + publications { + nebula { + artifactId 'elasticsearch' + } + } +} dependencies { diff --git a/distribution/build.gradle b/distribution/build.gradle index d70f0254f3be..3d5716b9693d 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -157,6 +157,19 @@ subprojects { MavenFilteringHack.filter(it, expansions) } } + + /***************************************************************************** + * Publishing setup * + *****************************************************************************/ + apply plugin: 'nebula.maven-base-publish' + apply plugin: 'nebula.maven-scm' + publishing { + publications { + nebula { + artifactId 'elasticsearch' + } + } + } } /***************************************************************************** diff --git a/distribution/deb/build.gradle b/distribution/deb/build.gradle index d9bd8447ab90..073039c50174 100644 --- a/distribution/deb/build.gradle +++ b/distribution/deb/build.gradle @@ -36,7 +36,14 @@ task buildDeb(type: Deb) { artifacts { 'default' buildDeb - archives buildDeb +} + +publishing { + publications { + nebula { + artifact buildDeb + } + } } integTest { diff --git a/distribution/integ-test-zip/build.gradle b/distribution/integ-test-zip/build.gradle index 23191ff03a4b..8507be0e18e9 100644 --- a/distribution/integ-test-zip/build.gradle +++ b/distribution/integ-test-zip/build.gradle @@ -24,7 +24,14 @@ task buildZip(type: Zip) { artifacts { 'default' buildZip - archives buildZip +} + +publishing { + publications { + nebula { + artifact buildZip + } + } } integTest.dependsOn buildZip diff --git a/distribution/rpm/build.gradle b/distribution/rpm/build.gradle index 2ab78fe7e41b..599c1d95ecef 100644 --- a/distribution/rpm/build.gradle +++ b/distribution/rpm/build.gradle @@ -33,7 +33,14 @@ task buildRpm(type: Rpm) { artifacts { 'default' buildRpm - archives buildRpm +} + +publishing { + publications { + nebula { + artifact buildRpm + } + } } integTest { diff --git a/distribution/tar/build.gradle b/distribution/tar/build.gradle index 7230ab50799e..8b3823b35137 100644 --- a/distribution/tar/build.gradle +++ b/distribution/tar/build.gradle @@ -26,5 +26,12 @@ task buildTar(type: Tar) { artifacts { 'default' buildTar - archives buildTar +} + +publishing { + publications { + nebula { + artifact buildTar + } + } } diff --git a/distribution/zip/build.gradle b/distribution/zip/build.gradle index 23191ff03a4b..8507be0e18e9 100644 --- a/distribution/zip/build.gradle +++ b/distribution/zip/build.gradle @@ -24,7 +24,14 @@ task buildZip(type: Zip) { artifacts { 'default' buildZip - archives buildZip +} + +publishing { + publications { + nebula { + artifact buildZip + } + } } integTest.dependsOn buildZip diff --git a/modules/build.gradle b/modules/build.gradle index 3cafe7d903f7..4b88dfd703f6 100644 --- a/modules/build.gradle +++ b/modules/build.gradle @@ -40,8 +40,4 @@ subprojects { throw new InvalidModelException("Modules cannot disable isolation") } } - - // these are implementation details of our build, no need to publish them! - install.enabled = false - uploadArchives.enabled = false } diff --git a/plugins/build.gradle b/plugins/build.gradle index e49b08c6015c..4c33260eba59 100644 --- a/plugins/build.gradle +++ b/plugins/build.gradle @@ -27,5 +27,7 @@ configure(subprojects.findAll { it.parent.path == project.path }) { esplugin { // for local ES plugins, the name of the plugin is the same as the directory name project.name + // only publish non examples + publish project.name.contains('example') == false } } diff --git a/settings.gradle b/settings.gradle index b1bb374fff1d..6cb3cd139905 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1,6 +1,7 @@ rootProject.name = 'elasticsearch' List projects = [ + 'build-tools', 'rest-api-spec', 'core', 'distribution:integ-test-zip', @@ -58,6 +59,8 @@ if (isEclipse) { include projects.toArray(new String[0]) +project(':build-tools').projectDir = new File(rootProject.projectDir, 'buildSrc') + if (isEclipse) { project(":core").projectDir = new File(rootProject.projectDir, 'core/src/main') project(":core").buildFileName = 'eclipse-build.gradle' diff --git a/test/build.gradle b/test/build.gradle index fcf4f5bb7617..8605655e72ab 100644 --- a/test/build.gradle +++ b/test/build.gradle @@ -41,4 +41,9 @@ subprojects { // TODO: why is the test framework pulled in... forbiddenApisMain.enabled = false jarHell.enabled = false + + apply plugin: 'nebula.maven-base-publish' + apply plugin: 'nebula.maven-scm' + apply plugin: 'nebula.source-jar' + apply plugin: 'nebula.javadoc-jar' } From 302087d686ef2e86d6c05afbb55ac7d18e02c43e Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 16 Mar 2016 09:37:47 +0100 Subject: [PATCH 259/320] [TEST] Wait for ongoing merges in testRenewSyncFlush Now that we also renew on forceMerge we might get a concurrent flush while we are flushing on the test level --- .../org/elasticsearch/index/engine/InternalEngineTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index abe0851c2b62..ab2041baa4a0 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -867,7 +867,7 @@ public class InternalEngineTests extends ESTestCase { assertEquals(engine.getLastWriteNanos(), delete.startTime()); } assertFalse(engine.tryRenewSyncCommit()); - engine.flush(); + engine.flush(false, true); // we might hit a concurrent flush from a finishing merge here - just wait if ongoing... assertNull(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID)); assertNull(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID)); } From d2db9cf95fce0cb938ba805adc916ba760ae81c6 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 16 Mar 2016 04:46:25 -0400 Subject: [PATCH 260/320] Fix es.path.home on Windows --- distribution/src/main/resources/bin/elasticsearch-plugin.bat | 2 +- distribution/src/main/resources/bin/elasticsearch.in.bat | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/distribution/src/main/resources/bin/elasticsearch-plugin.bat b/distribution/src/main/resources/bin/elasticsearch-plugin.bat index 9ed797e6308c..6c6be019fc67 100644 --- a/distribution/src/main/resources/bin/elasticsearch-plugin.bat +++ b/distribution/src/main/resources/bin/elasticsearch-plugin.bat @@ -48,7 +48,7 @@ GOTO loop SET HOSTNAME=%COMPUTERNAME% -"%JAVA_HOME%\bin\java" -client -Ees.path.home="%ES_HOME%" !properties! -cp "%ES_HOME%/lib/*;" "org.elasticsearch.plugins.PluginCli" !args! +"%JAVA_HOME%\bin\java" -client -Des.path.home="%ES_HOME%" !properties! -cp "%ES_HOME%/lib/*;" "org.elasticsearch.plugins.PluginCli" !args! goto finally diff --git a/distribution/src/main/resources/bin/elasticsearch.in.bat b/distribution/src/main/resources/bin/elasticsearch.in.bat index 80ed7894316b..537df9d4f9f9 100644 --- a/distribution/src/main/resources/bin/elasticsearch.in.bat +++ b/distribution/src/main/resources/bin/elasticsearch.in.bat @@ -104,4 +104,4 @@ ECHO additional elements via the plugin mechanism, or if code must really be 1>& ECHO added to the main classpath, add jars to lib\, unsupported 1>&2 EXIT /B 1 ) -set ES_PARAMS=-Delasticsearch -Ees.path.home="%ES_HOME%" +set ES_PARAMS=-Delasticsearch -Des.path.home="%ES_HOME%" From 79356c8a3bdeec28bb2852694a876b13b4e00d9b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 16 Mar 2016 10:59:48 +0100 Subject: [PATCH 261/320] Query DSL: `constant_score` should throw error on more than one filter When specifying more than one `filter` in a `constant_score` query, the last one will be the only one that will be executed, overwriting previous filters. It should rather raise a ParseException to notify the user that only one filter query is accepted. Closes #17126 --- .../index/query/ConstantScoreQueryParser.java | 8 ++- .../query/ConstantScoreQueryBuilderTests.java | 54 +++++++++++++++---- 2 files changed, 50 insertions(+), 12 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryParser.java index 1ad64c42135a..318a0b338058 100644 --- a/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryParser.java @@ -42,7 +42,7 @@ public class ConstantScoreQueryParser implements QueryParser query = null; boolean queryFound = false; String queryName = null; float boost = AbstractQueryBuilder.DEFAULT_BOOST; @@ -56,6 +56,10 @@ public class ConstantScoreQueryParser implements QueryParser Date: Wed, 16 Mar 2016 12:31:00 +0100 Subject: [PATCH 262/320] Docs: Added redirect entries for multicast plugin and the cloud plugins --- docs/plugins/index.asciidoc | 3 +++ docs/plugins/redirects.asciidoc | 40 +++++++++++++++++++++++++++++++++ 2 files changed, 43 insertions(+) create mode 100644 docs/plugins/redirects.asciidoc diff --git a/docs/plugins/index.asciidoc b/docs/plugins/index.asciidoc index 598e7872dcd0..fa05668c1318 100644 --- a/docs/plugins/index.asciidoc +++ b/docs/plugins/index.asciidoc @@ -67,3 +67,6 @@ include::integrations.asciidoc[] include::authors.asciidoc[] +include::redirects.asciidoc[] + + diff --git a/docs/plugins/redirects.asciidoc b/docs/plugins/redirects.asciidoc new file mode 100644 index 000000000000..caf2008e5219 --- /dev/null +++ b/docs/plugins/redirects.asciidoc @@ -0,0 +1,40 @@ +["appendix",role="exclude",id="redirects"] += Deleted pages + +The following pages have moved or been deleted. + +[role="exclude",id="discovery-multicast"] +=== Multicast Discovery Plugin + +The `multicast-discovery` plugin has been removed. Instead, configure networking +using unicast (see {ref}/modules-network.html[Network settings]) or using +one of the <>. + +[role="exclude",id="cloud-aws"] +=== AWS Cloud Plugin + +The `cloud-aws` plugin has been split into two separate plugins: + +* <> (`discovery-ec2`) +* <> (`repository-s3`) + + +[role="exclude",id="cloud-azure"] +=== Azure Cloud Plugin + +The `cloud-azure` plugin has been split into two separate plugins: + +* <> (`discovery-azure`) +* <> (`repository-azure`) + + +[role="exclude",id="cloud-gce"] +=== GCE Cloud Plugin + +The `cloud-gce` plugin has been renamed to <> (`discovery-gce`). + + + + + + From 71971720471a6bea34aafdb9773313a0988d1cea Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 15 Mar 2016 18:15:14 -0400 Subject: [PATCH 263/320] [reindex] Properly register status Without this commit fetching the status of a reindex from a node that isn't coordinating the reindex will fail. This commit properly registers reindex's status so this doesn't happen. To do so it moves all task status registration into NetworkModule and creates a method to register other statuses which the reindex plugin calls. --- .../common/network/NetworkModule.java | 7 ++ .../transport/TransportService.java | 8 +-- .../node/tasks/TaskManagerTestCase.java | 2 +- .../TransportClientHeadersTests.java | 4 +- .../TransportClientNodesServiceTests.java | 2 +- .../common/network/NetworkModuleTests.java | 67 ++++++++++++++++--- .../discovery/ZenFaultDetectionTests.java | 2 +- .../transport/TransportModuleTests.java | 4 +- .../netty/NettyScheduledPingTests.java | 4 +- .../messy/tests/IndicesRequestTests.java | 4 +- .../index/reindex/ReindexPlugin.java | 7 +- .../index/reindex/CancelTestUtils.java | 22 ++++++ .../test/transport/MockTransportService.java | 8 +-- 13 files changed, 107 insertions(+), 34 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java index 1a54ad2753ac..7e4c1348f8e6 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -22,6 +22,7 @@ package org.elasticsearch.common.network; import java.util.Arrays; import java.util.List; +import org.elasticsearch.action.support.replication.ReplicationTask; import org.elasticsearch.client.transport.TransportClientNodesService; import org.elasticsearch.client.transport.support.TransportProxyClient; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -139,6 +140,7 @@ import org.elasticsearch.rest.action.template.RestPutSearchTemplateAction; import org.elasticsearch.rest.action.termvectors.RestMultiTermVectorsAction; import org.elasticsearch.rest.action.termvectors.RestTermVectorsAction; import org.elasticsearch.rest.action.update.RestUpdateAction; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.local.LocalTransport; @@ -326,6 +328,7 @@ public class NetworkModule extends AbstractModule { registerTransportService(NETTY_TRANSPORT, TransportService.class); registerTransport(LOCAL_TRANSPORT, LocalTransport.class); registerTransport(NETTY_TRANSPORT, NettyTransport.class); + registerTaskStatus(ReplicationTask.Status.PROTOTYPE); if (transportClient == false) { registerHttpTransport(NETTY_TRANSPORT, NettyHttpServerTransport.class); @@ -371,6 +374,10 @@ public class NetworkModule extends AbstractModule { } } + public void registerTaskStatus(Task.Status prototype) { + namedWriteableRegistry.registerPrototype(Task.Status.class, prototype); + } + @Override protected void configure() { bind(NetworkService.class).toInstance(networkService); diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index 0faad9003393..2d804bfc7863 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -20,13 +20,11 @@ package org.elasticsearch.transport; import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction; -import org.elasticsearch.action.support.replication.ReplicationTask; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.metrics.MeanMetric; @@ -43,7 +41,6 @@ import org.elasticsearch.common.util.concurrent.ConcurrentMapLong; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; @@ -113,11 +110,11 @@ public class TransportService extends AbstractLifecycleComponent> requests = new HashMap<>(); @Inject - public InterceptingTransportService(Settings settings, Transport transport, ThreadPool threadPool, NamedWriteableRegistry namedWriteableRegistry) { - super(settings, transport, threadPool, namedWriteableRegistry); + public InterceptingTransportService(Settings settings, Transport transport, ThreadPool threadPool) { + super(settings, transport, threadPool); } synchronized List consumeRequests(String action) { diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java index a01c6e3b30e0..9ab025a25272 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java @@ -41,8 +41,9 @@ public class ReindexPlugin extends Plugin { actionModule.registerAction(UpdateByQueryAction.INSTANCE, TransportUpdateByQueryAction.class); } - public void onModule(NetworkModule restModule) { - restModule.registerRestHandler(RestReindexAction.class); - restModule.registerRestHandler(RestUpdateByQueryAction.class); + public void onModule(NetworkModule networkModule) { + networkModule.registerRestHandler(RestReindexAction.class); + networkModule.registerRestHandler(RestUpdateByQueryAction.class); + networkModule.registerTaskStatus(BulkByScrollTask.Status.PROTOTYPE); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTestUtils.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTestUtils.java index d1f6b1ee1714..5117d2781be4 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTestUtils.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTestUtils.java @@ -21,7 +21,10 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ListenableActionFuture; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo; +import org.elasticsearch.index.reindex.BulkByScrollTask.Status; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.NativeScriptFactory; @@ -41,7 +44,10 @@ import java.util.concurrent.TimeoutException; import static java.util.Collections.emptyMap; import static org.elasticsearch.test.ESIntegTestCase.client; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; /** @@ -76,10 +82,26 @@ public class CancelTestUtils { // Wait until the script is on the second document. barrier.await(30, TimeUnit.SECONDS); + // Status should show running + ListTasksResponse tasksList = client().admin().cluster().prepareListTasks().setActions(actionToCancel).setDetailed(true).get(); + assertThat(tasksList.getNodeFailures(), empty()); + assertThat(tasksList.getTaskFailures(), empty()); + assertThat(tasksList.getTasks(), hasSize(1)); + BulkByScrollTask.Status status = (Status) tasksList.getTasks().get(0).getStatus(); + assertNull(status.getReasonCancelled()); + // Cancel the request while the script is running. This will prevent the request from being sent at all. List cancelledTasks = client().admin().cluster().prepareCancelTasks().setActions(actionToCancel).get().getTasks(); assertThat(cancelledTasks, hasSize(1)); + // The status should now show canceled. The request will still be in the list because the script is still blocked. + tasksList = client().admin().cluster().prepareListTasks().setActions(actionToCancel).setDetailed(true).get(); + assertThat(tasksList.getNodeFailures(), empty()); + assertThat(tasksList.getTaskFailures(), empty()); + assertThat(tasksList.getTasks(), hasSize(1)); + status = (Status) tasksList.getTasks().get(0).getStatus(); + assertEquals(CancelTasksRequest.DEFAULT_REASON, status.getReasonCancelled()); + // Now let the next document through. It won't be sent because the request is cancelled but we need to unblock the script. barrier.await(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 037451832400..f5fd51238477 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -99,22 +99,22 @@ public class MockTransportService extends TransportService { public static MockTransportService local(Settings settings, Version version, ThreadPool threadPool) { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); Transport transport = new LocalTransport(settings, threadPool, version, namedWriteableRegistry); - return new MockTransportService(settings, transport, threadPool, namedWriteableRegistry); + return new MockTransportService(settings, transport, threadPool); } public static MockTransportService nettyFromThreadPool(Settings settings, Version version, ThreadPool threadPool) { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); Transport transport = new NettyTransport(settings, threadPool, new NetworkService(settings), BigArrays.NON_RECYCLING_INSTANCE, version, namedWriteableRegistry); - return new MockTransportService(Settings.EMPTY, transport, threadPool, namedWriteableRegistry); + return new MockTransportService(Settings.EMPTY, transport, threadPool); } private final Transport original; @Inject - public MockTransportService(Settings settings, Transport transport, ThreadPool threadPool, NamedWriteableRegistry namedWriteableRegistry) { - super(settings, new LookupTestTransport(transport), threadPool, namedWriteableRegistry); + public MockTransportService(Settings settings, Transport transport, ThreadPool threadPool) { + super(settings, new LookupTestTransport(transport), threadPool); this.original = transport; } From e32da555aa56b20f914610282392f51c2d12b8ac Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Wed, 16 Mar 2016 13:42:43 +0100 Subject: [PATCH 264/320] Fix Windows start script to pass parameters in last position Relates to #15320 --- distribution/src/main/resources/bin/elasticsearch.bat | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distribution/src/main/resources/bin/elasticsearch.bat b/distribution/src/main/resources/bin/elasticsearch.bat index a0079fc967c8..4da9a5d032ab 100644 --- a/distribution/src/main/resources/bin/elasticsearch.bat +++ b/distribution/src/main/resources/bin/elasticsearch.bat @@ -43,6 +43,6 @@ IF ERRORLEVEL 1 ( EXIT /B %ERRORLEVEL% ) -"%JAVA_HOME%\bin\java" %JAVA_OPTS% %ES_JAVA_OPTS% %ES_PARAMS% !newparams! -cp "%ES_CLASSPATH%" "org.elasticsearch.bootstrap.Elasticsearch" start +"%JAVA_HOME%\bin\java" %JAVA_OPTS% %ES_JAVA_OPTS% %ES_PARAMS% -cp "%ES_CLASSPATH%" "org.elasticsearch.bootstrap.Elasticsearch" start !newparams! ENDLOCAL From b10db19595b0c638501a7dcd9e25cf860d0f5c98 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Wed, 9 Mar 2016 14:43:00 -0500 Subject: [PATCH 265/320] Bring back tests for missing elements in the diff-serialized cluster state We can add it back now that we improved our compression framework. Closes #11257 --- .../src/main/java/org/elasticsearch/test/ESIntegTestCase.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 235ccec4ee06..493efa9021e2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1100,7 +1100,7 @@ public abstract class ESIntegTestCase extends ESTestCase { // remove local node reference masterClusterState = ClusterState.Builder.fromBytes(masterClusterStateBytes, null); Map masterStateMap = convertToMap(masterClusterState); - int masterClusterStateSize = masterClusterState.toString().length(); + int masterClusterStateSize = ClusterState.Builder.toBytes(masterClusterState).length; String masterId = masterClusterState.nodes().masterNodeId(); for (Client client : cluster().getClients()) { ClusterState localClusterState = client.admin().cluster().prepareState().all().setLocal(true).get().getState(); @@ -1108,7 +1108,7 @@ public abstract class ESIntegTestCase extends ESTestCase { // remove local node reference localClusterState = ClusterState.Builder.fromBytes(localClusterStateBytes, null); final Map localStateMap = convertToMap(localClusterState); - final int localClusterStateSize = localClusterState.toString().length(); + final int localClusterStateSize = ClusterState.Builder.toBytes(localClusterState).length; // Check that the non-master node has the same version of the cluster state as the master and // that the master node matches the master (otherwise there is no requirement for the cluster state to match) if (masterClusterState.version() == localClusterState.version() && masterId.equals(localClusterState.nodes().masterNodeId())) { From 0543d46c1dc9624c9938f249633df064bf32b684 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Wed, 16 Mar 2016 17:22:00 +0100 Subject: [PATCH 266/320] Fixed regex in cat.recovery REST tes The time column should accept integer ms or floating point seconds --- .../resources/rest-api-spec/test/cat.recovery/10_basic.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml index 820cf6dec4db..effc4c20313d 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml @@ -27,7 +27,7 @@ ( index1 \s+ \d \s+ # shard - \d+ms \s+ # time + (?:\d+ms|\d+(?:\.\d+)?s) \s+ # time in ms or seconds (store|replica|snapshot|relocating) \s+ # type (init|index|verify_index|translog|finalize|done) \s+ # stage [-\w./]+ \s+ # source_host From 80f638b56a1c1551b391ce67ae381ca47ee8a641 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 11 Mar 2016 13:00:47 -0500 Subject: [PATCH 267/320] Support scheduled commands in current context Adds support for scheduling commands to run at a later time on another thread pool in the current thread's context: ```java Runnable someCommand = () -> {System.err.println("Demo");}; someCommand = threadPool.getThreadContext().preserveContext(someCommand); threadPool.schedule(timeValueMinutes(1), Names.GENERAL, someCommand); ``` This happens automatically for calls to `threadPool.execute` but `schedule` and `scheduleWithFixedDelay` don't do that, presumably because scheduled tasks are usually context-less. Rather than preserve the current context on all scheduled tasks this just makes it possible to preserve it using the syntax above. To make this all go it moves the Runnables that wrap the commands from EsThreadPoolExecutor into ThreadContext. This, or something like it, is required to support reindex throttling. --- .../resources/checkstyle_suppressions.xml | 1 - .../util/concurrent/EsThreadPoolExecutor.java | 113 +-------------- .../common/util/concurrent/ThreadContext.java | 130 ++++++++++++++++++ .../elasticsearch/threadpool/ThreadPool.java | 29 ++++ .../util/concurrent/ThreadContextTests.java | 69 ++++++++++ 5 files changed, 235 insertions(+), 107 deletions(-) diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index b9039f92659d..a9c73bca1278 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -390,7 +390,6 @@ - diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java index fde8d828295d..2f664679bb44 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java @@ -40,11 +40,14 @@ public class EsThreadPoolExecutor extends ThreadPoolExecutor { */ private final String name; - EsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue workQueue, ThreadFactory threadFactory, ThreadContext contextHolder) { + EsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, + BlockingQueue workQueue, ThreadFactory threadFactory, ThreadContext contextHolder) { this(name, corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, new EsAbortPolicy(), contextHolder); } - EsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue workQueue, ThreadFactory threadFactory, XRejectedExecutionHandler handler, ThreadContext contextHolder) { + EsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, + BlockingQueue workQueue, ThreadFactory threadFactory, XRejectedExecutionHandler handler, + ThreadContext contextHolder) { super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, handler); this.name = name; this.contextHolder = contextHolder; @@ -133,112 +136,10 @@ public class EsThreadPoolExecutor extends ThreadPoolExecutor { } protected Runnable wrapRunnable(Runnable command) { - final Runnable wrappedCommand; - if (command instanceof AbstractRunnable) { - wrappedCommand = new FilterAbstractRunnable(contextHolder, (AbstractRunnable) command); - } else { - wrappedCommand = new FilterRunnable(contextHolder, command); - } - return wrappedCommand; + return contextHolder.preserveContext(command); } protected Runnable unwrap(Runnable runnable) { - if (runnable instanceof FilterAbstractRunnable) { - return ((FilterAbstractRunnable) runnable).in; - } else if (runnable instanceof FilterRunnable) { - return ((FilterRunnable) runnable).in; - } - return runnable; + return contextHolder.unwrap(runnable); } - - private class FilterAbstractRunnable extends AbstractRunnable { - private final ThreadContext contextHolder; - private final AbstractRunnable in; - private final ThreadContext.StoredContext ctx; - - FilterAbstractRunnable(ThreadContext contextHolder, AbstractRunnable in) { - this.contextHolder = contextHolder; - ctx = contextHolder.newStoredContext(); - this.in = in; - } - - @Override - public boolean isForceExecution() { - return in.isForceExecution(); - } - - @Override - public void onAfter() { - in.onAfter(); - } - - @Override - public void onFailure(Throwable t) { - in.onFailure(t); - } - - @Override - public void onRejection(Throwable t) { - in.onRejection(t); - } - - @Override - protected void doRun() throws Exception { - boolean whileRunning = false; - try (ThreadContext.StoredContext ingore = contextHolder.stashContext()){ - ctx.restore(); - whileRunning = true; - in.doRun(); - whileRunning = false; - } catch (IllegalStateException ex) { - if (whileRunning || isShutdown() == false) { - throw ex; - } - // if we hit an ISE here we have been shutting down - // this comes from the threadcontext and barfs if - // our threadpool has been shutting down - } - } - - @Override - public String toString() { - return in.toString(); - } - - } - - private class FilterRunnable implements Runnable { - private final ThreadContext contextHolder; - private final Runnable in; - private final ThreadContext.StoredContext ctx; - - FilterRunnable(ThreadContext contextHolder, Runnable in) { - this.contextHolder = contextHolder; - ctx = contextHolder.newStoredContext(); - this.in = in; - } - - @Override - public void run() { - boolean whileRunning = false; - try (ThreadContext.StoredContext ingore = contextHolder.stashContext()){ - ctx.restore(); - whileRunning = true; - in.run(); - whileRunning = false; - } catch (IllegalStateException ex) { - if (whileRunning || isShutdown() == false) { - throw ex; - } - // if we hit an ISE here we have been shutting down - // this comes from the threadcontext and barfs if - // our threadpool has been shutting down - } - } - @Override - public String toString() { - return in.toString(); - } - } - } diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index 2ac6082e85da..462b4f539dcf 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -200,6 +200,36 @@ public final class ThreadContext implements Closeable, Writeablecommand has already been passed through this method then it is returned unaltered rather than wrapped twice. + */ + public Runnable preserveContext(Runnable command) { + if (command instanceof ContextPreservingAbstractRunnable) { + return command; + } + if (command instanceof ContextPreservingRunnable) { + return command; + } + if (command instanceof AbstractRunnable) { + return new ContextPreservingAbstractRunnable((AbstractRunnable) command); + } + return new ContextPreservingRunnable(command); + } + + /** + * Unwraps a command that was previously wrapped by {@link #preserveContext(Runnable)}. + */ + public Runnable unwrap(Runnable command) { + if (command instanceof ContextPreservingAbstractRunnable) { + return ((ContextPreservingAbstractRunnable) command).unwrap(); + } + if (command instanceof ContextPreservingRunnable) { + return ((ContextPreservingRunnable) command).unwrap(); + } + return command; + } + public interface StoredContext extends AutoCloseable { @Override void close(); @@ -356,4 +386,104 @@ public final class ThreadContext implements Closeable, Writeable scheduleWithFixedDelay(Runnable command, TimeValue interval) { return scheduler.scheduleWithFixedDelay(new LoggingRunnable(command), interval.millis(), interval.millis(), TimeUnit.MILLISECONDS); } + /** + * Schedules a one-shot command to run after a given delay. The command is not run in the context of the calling thread. To preserve the + * context of the calling thread you may call threadPool.getThreadContext().preserveContext on the runnable before passing + * it to this method. + * + * @param delay delay before the task executes + * @param name the name of the thread pool on which to execute this task. SAME means "execute on the scheduler thread" which changes the + * meaning of the ScheduledFuture returned by this method. In that case the ScheduledFuture will complete only when the command + * completes. + * @param command the command to run + * @return a ScheduledFuture who's get will return when the task is has been added to its target thread pool and throw an exception if + * the task is canceled before it was added to its target thread pool. Once the task has been added to its target thread pool + * the ScheduledFuture will cannot interact with it. + */ public ScheduledFuture schedule(TimeValue delay, String name, Runnable command) { if (!Names.SAME.equals(name)) { command = new ThreadedRunnable(command, executor(name)); diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java index cbf58bf9daab..1a582d48f6b7 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java @@ -27,6 +27,8 @@ import java.io.IOException; import java.util.Collections; import java.util.HashMap; +import static org.hamcrest.Matchers.sameInstance; + public class ThreadContextTests extends ESTestCase { public void testStashContext() { @@ -235,4 +237,71 @@ public class ThreadContextTests extends ESTestCase { } } + public void testPreserveContext() throws IOException { + try (ThreadContext threadContext = new ThreadContext(Settings.EMPTY)) { + Runnable withContext; + + // Create a runnable that should run with some header + try (ThreadContext.StoredContext ignored = threadContext.stashContext()) { + threadContext.putHeader("foo", "bar"); + withContext = threadContext.preserveContext(sometimesAbstractRunnable(() -> { + assertEquals("bar", threadContext.getHeader("foo")); + })); + } + + // We don't see the header outside of the runnable + assertNull(threadContext.getHeader("foo")); + + // But we do inside of it + withContext.run(); + } + } + + public void testPreserveContextKeepsOriginalContextWhenCalledTwice() throws IOException { + try (ThreadContext threadContext = new ThreadContext(Settings.EMPTY)) { + Runnable originalWithContext; + Runnable withContext; + + // Create a runnable that should run with some header + try (ThreadContext.StoredContext ignored = threadContext.stashContext()) { + threadContext.putHeader("foo", "bar"); + withContext = threadContext.preserveContext(sometimesAbstractRunnable(() -> { + assertEquals("bar", threadContext.getHeader("foo")); + })); + } + + // Now attempt to rewrap it + originalWithContext = withContext; + try (ThreadContext.StoredContext ignored = threadContext.stashContext()) { + threadContext.putHeader("foo", "zot"); + withContext = threadContext.preserveContext(withContext); + } + + // We get the original context inside the runnable + withContext.run(); + + // In fact the second wrapping didn't even change it + assertThat(withContext, sameInstance(originalWithContext)); + } + } + + /** + * Sometimes wraps a Runnable in an AbstractRunnable. + */ + private Runnable sometimesAbstractRunnable(Runnable r) { + if (random().nextBoolean()) { + return r; + } + return new AbstractRunnable() { + @Override + public void onFailure(Throwable t) { + throw new RuntimeException(t); + } + + @Override + protected void doRun() throws Exception { + r.run(); + } + }; + } } From 8c4aa75b0cc839d4c1ef77d79828f02c38d70199 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Wed, 16 Mar 2016 17:45:42 +0100 Subject: [PATCH 268/320] Added version 2.4.0 to Version --- core/src/main/java/org/elasticsearch/Version.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index 8b65adf170dc..93d38c612380 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -64,6 +64,8 @@ public class Version { public static final Version V_2_2_1 = new Version(V_2_2_1_ID, org.apache.lucene.util.Version.LUCENE_5_4_1); public static final int V_2_3_0_ID = 2030099; public static final Version V_2_3_0 = new Version(V_2_3_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0); + public static final int V_2_4_0_ID = 2040099; + public static final Version V_2_4_0 = new Version(V_2_4_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0); public static final int V_5_0_0_ID = 5000099; public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_6_0_0); public static final Version CURRENT = V_5_0_0; @@ -81,6 +83,8 @@ public class Version { switch (id) { case V_5_0_0_ID: return V_5_0_0; + case V_2_4_0_ID: + return V_2_4_0; case V_2_3_0_ID: return V_2_3_0; case V_2_2_1_ID: From 0a12e7bb5b1837fa6000e2f3761fedc37bcfe2af Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 16 Mar 2016 22:17:28 +0100 Subject: [PATCH 269/320] Revert "Added version 2.4.0 to Version" This reverts commit 8c4aa75b0cc839d4c1ef77d79828f02c38d70199. --- core/src/main/java/org/elasticsearch/Version.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index 93d38c612380..8b65adf170dc 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -64,8 +64,6 @@ public class Version { public static final Version V_2_2_1 = new Version(V_2_2_1_ID, org.apache.lucene.util.Version.LUCENE_5_4_1); public static final int V_2_3_0_ID = 2030099; public static final Version V_2_3_0 = new Version(V_2_3_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0); - public static final int V_2_4_0_ID = 2040099; - public static final Version V_2_4_0 = new Version(V_2_4_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0); public static final int V_5_0_0_ID = 5000099; public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_6_0_0); public static final Version CURRENT = V_5_0_0; @@ -83,8 +81,6 @@ public class Version { switch (id) { case V_5_0_0_ID: return V_5_0_0; - case V_2_4_0_ID: - return V_2_4_0; case V_2_3_0_ID: return V_2_3_0; case V_2_2_1_ID: From 7aeeb52cf63bab5590bbfe777a4d821f19f55865 Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Tue, 15 Mar 2016 17:43:48 -0400 Subject: [PATCH 270/320] Standardize state format type for global and index level metadata Currently, global and index level state format type can be configured through gateway.format. This commit removes the ability to configure format type for these states. Now we always store these states in SMILE format and ensure we always write them to disk in the most compact way. --- .../cluster/metadata/IndexMetaData.java | 20 +++++ .../cluster/metadata/MetaData.java | 27 ++++++ .../common/util/IndexFolderUpgrader.java | 19 +---- .../gateway/MetaStateService.java | 82 ++----------------- .../common/util/IndexFolderUpgraderTests.java | 29 +------ .../gateway/MetaDataStateFormatTests.java | 37 ++++++--- .../gateway/MetaStateServiceTests.java | 18 ++-- 7 files changed, 88 insertions(+), 144 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index 8c093a72ff3c..20ba36dd9101 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -46,6 +46,8 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.gateway.MetaDataStateFormat; import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.RestStatus; @@ -215,6 +217,7 @@ public class IndexMetaData implements Diffable, FromXContentBuild .numberOfShards(1).numberOfReplicas(0).build(); public static final String KEY_ACTIVE_ALLOCATIONS = "active_allocations"; + public static final String INDEX_STATE_FILE_PREFIX = "state-"; private final int numberOfShards; private final int numberOfReplicas; @@ -1023,4 +1026,21 @@ public class IndexMetaData implements Diffable, FromXContentBuild return builder.build(); } + private static final ToXContent.Params FORMAT_PARAMS = new MapParams(Collections.singletonMap("binary", "true")); + + /** + * State format for {@link IndexMetaData} to write to and load from disk + */ + public static final MetaDataStateFormat FORMAT = new MetaDataStateFormat(XContentType.SMILE, INDEX_STATE_FILE_PREFIX) { + + @Override + public void toXContent(XContentBuilder builder, IndexMetaData state) throws IOException { + Builder.toXContent(state, builder, FORMAT_PARAMS); + } + + @Override + public IndexMetaData fromXContent(XContentParser parser) throws IOException { + return Builder.fromXContent(parser); + } + }; } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 0beae6a77e8c..c19346cf74f6 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -51,6 +51,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.discovery.DiscoverySettings; +import org.elasticsearch.gateway.MetaDataStateFormat; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.store.IndexStoreConfig; @@ -153,6 +154,8 @@ public class MetaData implements Iterable, Diffable, Fr public static final String CONTEXT_MODE_GATEWAY = XContentContext.GATEWAY.toString(); + public static final String GLOBAL_STATE_FILE_PREFIX = "global-"; + private final String clusterUUID; private final long version; @@ -1160,4 +1163,28 @@ public class MetaData implements Iterable, Diffable, Fr return PROTO.readFrom(in); } } + + private final static ToXContent.Params FORMAT_PARAMS; + static { + Map params = new HashMap<>(2); + params.put("binary", "true"); + params.put(MetaData.CONTEXT_MODE_PARAM, MetaData.CONTEXT_MODE_GATEWAY); + FORMAT_PARAMS = new MapParams(params); + } + + /** + * State format for {@link MetaData} to write to and load from disk + */ + public final static MetaDataStateFormat FORMAT = new MetaDataStateFormat(XContentType.SMILE, GLOBAL_STATE_FILE_PREFIX) { + + @Override + public void toXContent(XContentBuilder builder, MetaData state) throws IOException { + Builder.toXContent(state, builder, FORMAT_PARAMS); + } + + @Override + public MetaData fromXContent(XContentParser parser) throws IOException { + return Builder.fromXContent(parser); + } + }; } diff --git a/core/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java b/core/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java index 54dac7447ebb..3640d3e4bec1 100644 --- a/core/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java +++ b/core/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java @@ -47,7 +47,6 @@ public class IndexFolderUpgrader { private final NodeEnvironment nodeEnv; private final Settings settings; private final ESLogger logger = Loggers.getLogger(IndexFolderUpgrader.class); - private final MetaDataStateFormat indexStateFormat = readOnlyIndexMetaDataStateFormat(); /** * Creates a new upgrader instance @@ -90,7 +89,7 @@ public class IndexFolderUpgrader { void upgrade(final String indexFolderName) throws IOException { for (NodeEnvironment.NodePath nodePath : nodeEnv.nodePaths()) { final Path indexFolderPath = nodePath.indicesPath.resolve(indexFolderName); - final IndexMetaData indexMetaData = indexStateFormat.loadLatestState(logger, indexFolderPath); + final IndexMetaData indexMetaData = IndexMetaData.FORMAT.loadLatestState(logger, indexFolderPath); if (indexMetaData != null) { final Index index = indexMetaData.getIndex(); if (needsUpgrade(index, indexFolderName)) { @@ -135,20 +134,4 @@ public class IndexFolderUpgrader { static boolean needsUpgrade(Index index, String indexFolderName) { return indexFolderName.equals(index.getUUID()) == false; } - - static MetaDataStateFormat readOnlyIndexMetaDataStateFormat() { - // NOTE: XContentType param is not used as we use the format read from the serialized index state - return new MetaDataStateFormat(XContentType.SMILE, MetaStateService.INDEX_STATE_FILE_PREFIX) { - - @Override - public void toXContent(XContentBuilder builder, IndexMetaData state) throws IOException { - throw new UnsupportedOperationException(); - } - - @Override - public IndexMetaData fromXContent(XContentParser parser) throws IOException { - return IndexMetaData.Builder.fromXContent(parser); - } - }; - } } diff --git a/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java b/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java index 1f4cc310fdbd..0edfb5631741 100644 --- a/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java +++ b/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java @@ -25,19 +25,13 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.function.Predicate; /** @@ -45,41 +39,12 @@ import java.util.function.Predicate; */ public class MetaStateService extends AbstractComponent { - static final String FORMAT_SETTING = "gateway.format"; - - static final String GLOBAL_STATE_FILE_PREFIX = "global-"; - public static final String INDEX_STATE_FILE_PREFIX = "state-"; - private final NodeEnvironment nodeEnv; - private final XContentType format; - private final ToXContent.Params formatParams; - private final ToXContent.Params gatewayModeFormatParams; - private final MetaDataStateFormat indexStateFormat; - private final MetaDataStateFormat globalStateFormat; - @Inject public MetaStateService(Settings settings, NodeEnvironment nodeEnv) { super(settings); this.nodeEnv = nodeEnv; - this.format = XContentType.fromMediaTypeOrFormat(settings.get(FORMAT_SETTING, "smile")); - if (this.format == XContentType.SMILE) { - Map params = new HashMap<>(); - params.put("binary", "true"); - formatParams = new ToXContent.MapParams(params); - Map gatewayModeParams = new HashMap<>(); - gatewayModeParams.put("binary", "true"); - gatewayModeParams.put(MetaData.CONTEXT_MODE_PARAM, MetaData.CONTEXT_MODE_GATEWAY); - gatewayModeFormatParams = new ToXContent.MapParams(gatewayModeParams); - } else { - formatParams = ToXContent.EMPTY_PARAMS; - Map gatewayModeParams = new HashMap<>(); - gatewayModeParams.put(MetaData.CONTEXT_MODE_PARAM, MetaData.CONTEXT_MODE_GATEWAY); - gatewayModeFormatParams = new ToXContent.MapParams(gatewayModeParams); - } - indexStateFormat = indexStateFormat(format, formatParams); - globalStateFormat = globalStateFormat(format, gatewayModeFormatParams); - } /** @@ -95,7 +60,7 @@ public class MetaStateService extends AbstractComponent { metaDataBuilder = MetaData.builder(); } for (String indexFolderName : nodeEnv.availableIndexFolders()) { - IndexMetaData indexMetaData = indexStateFormat.loadLatestState(logger, nodeEnv.resolveIndexFolder(indexFolderName)); + IndexMetaData indexMetaData = IndexMetaData.FORMAT.loadLatestState(logger, nodeEnv.resolveIndexFolder(indexFolderName)); if (indexMetaData != null) { metaDataBuilder.put(indexMetaData, false); } else { @@ -110,7 +75,7 @@ public class MetaStateService extends AbstractComponent { */ @Nullable IndexMetaData loadIndexState(Index index) throws IOException { - return indexStateFormat.loadLatestState(logger, nodeEnv.indexPaths(index)); + return IndexMetaData.FORMAT.loadLatestState(logger, nodeEnv.indexPaths(index)); } /** @@ -122,7 +87,7 @@ public class MetaStateService extends AbstractComponent { if (excludeIndexPathIdsPredicate.test(indexFolderName)) { continue; } - IndexMetaData indexMetaData = indexStateFormat.loadLatestState(logger, + IndexMetaData indexMetaData = IndexMetaData.FORMAT.loadLatestState(logger, nodeEnv.resolveIndexFolder(indexFolderName)); if (indexMetaData != null) { final String indexPathId = indexMetaData.getIndex().getUUID(); @@ -142,7 +107,7 @@ public class MetaStateService extends AbstractComponent { * Loads the global state, *without* index state, see {@link #loadFullState()} for that. */ MetaData loadGlobalState() throws IOException { - MetaData globalState = globalStateFormat.loadLatestState(logger, nodeEnv.nodeDataPaths()); + MetaData globalState = MetaData.FORMAT.loadLatestState(logger, nodeEnv.nodeDataPaths()); // ES 2.0 now requires units for all time and byte-sized settings, so we add the default unit if it's missing // TODO: can we somehow only do this for pre-2.0 cluster state? if (globalState != null) { @@ -167,7 +132,7 @@ public class MetaStateService extends AbstractComponent { final Index index = indexMetaData.getIndex(); logger.trace("[{}] writing state, reason [{}]", index, reason); try { - indexStateFormat.write(indexMetaData, indexMetaData.getVersion(), locations); + IndexMetaData.FORMAT.write(indexMetaData, indexMetaData.getVersion(), locations); } catch (Throwable ex) { logger.warn("[{}]: failed to write index state", ex, index); throw new IOException("failed to write state for [" + index + "]", ex); @@ -180,45 +145,10 @@ public class MetaStateService extends AbstractComponent { void writeGlobalState(String reason, MetaData metaData) throws Exception { logger.trace("[_global] writing state, reason [{}]", reason); try { - globalStateFormat.write(metaData, metaData.version(), nodeEnv.nodeDataPaths()); + MetaData.FORMAT.write(metaData, metaData.version(), nodeEnv.nodeDataPaths()); } catch (Throwable ex) { logger.warn("[_global]: failed to write global state", ex); throw new IOException("failed to write global state", ex); } } - - /** - * Returns a StateFormat that can read and write {@link MetaData} - */ - static MetaDataStateFormat globalStateFormat(XContentType format, final ToXContent.Params formatParams) { - return new MetaDataStateFormat(format, GLOBAL_STATE_FILE_PREFIX) { - - @Override - public void toXContent(XContentBuilder builder, MetaData state) throws IOException { - MetaData.Builder.toXContent(state, builder, formatParams); - } - - @Override - public MetaData fromXContent(XContentParser parser) throws IOException { - return MetaData.Builder.fromXContent(parser); - } - }; - } - - /** - * Returns a StateFormat that can read and write {@link IndexMetaData} - */ - static MetaDataStateFormat indexStateFormat(XContentType format, final ToXContent.Params formatParams) { - return new MetaDataStateFormat(format, INDEX_STATE_FILE_PREFIX) { - - @Override - public void toXContent(XContentBuilder builder, IndexMetaData state) throws IOException { - IndexMetaData.Builder.toXContent(state, builder, formatParams); } - - @Override - public IndexMetaData fromXContent(XContentParser parser) throws IOException { - return IndexMetaData.Builder.fromXContent(parser); - } - }; - } } diff --git a/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java b/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java index 01c6ec89c7c7..b158b961d9a0 100644 --- a/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java @@ -30,14 +30,9 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.gateway.MetaDataStateFormat; -import org.elasticsearch.gateway.MetaStateService; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.ShardId; @@ -63,25 +58,9 @@ import java.util.Locale; import java.util.Map; import java.util.Set; -import static org.hamcrest.core.Is.is; - @LuceneTestCase.SuppressFileSystems("ExtrasFS") public class IndexFolderUpgraderTests extends ESTestCase { - private static MetaDataStateFormat indexMetaDataStateFormat = - new MetaDataStateFormat(XContentType.SMILE, MetaStateService.INDEX_STATE_FILE_PREFIX) { - - @Override - public void toXContent(XContentBuilder builder, IndexMetaData state) throws IOException { - IndexMetaData.Builder.toXContent(state, builder, ToXContent.EMPTY_PARAMS); - } - - @Override - public IndexMetaData fromXContent(XContentParser parser) throws IOException { - return IndexMetaData.Builder.fromXContent(parser); - } - }; - /** * tests custom data paths are upgraded */ @@ -244,7 +223,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { assertEquals(indexFolders.size(), 1); // ensure index metadata is moved - IndexMetaData indexMetaData = indexMetaDataStateFormat.loadLatestState(logger, + IndexMetaData indexMetaData = IndexMetaData.FORMAT.loadLatestState(logger, nodeEnvironment.resolveIndexFolder(indexFolders.iterator().next())); assertNotNull(indexMetaData); Index index = indexMetaData.getIndex(); @@ -277,7 +256,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { .numberOfReplicas(0) .build(); try (NodeEnvironment nodeEnvironment = newNodeEnvironment()) { - indexMetaDataStateFormat.write(indexState, 1, nodeEnvironment.indexPaths(index)); + IndexMetaData.FORMAT.write(indexState, 1, nodeEnvironment.indexPaths(index)); assertFalse(IndexFolderUpgrader.needsUpgrade(index, index.getUUID())); } } @@ -286,7 +265,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { int numIdxFiles, int numTranslogFiles) throws IOException { final Index index = indexSettings.getIndex(); // ensure index state can be loaded - IndexMetaData loadLatestState = indexMetaDataStateFormat.loadLatestState(logger, nodeEnv.indexPaths(index)); + IndexMetaData loadLatestState = IndexMetaData.FORMAT.loadLatestState(logger, nodeEnv.indexPaths(index)); assertNotNull(loadLatestState); assertEquals(loadLatestState.getIndex(), index); for (int shardId = 0; shardId < indexSettings.getNumberOfShards(); shardId++) { @@ -326,7 +305,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { for (int i = 0; i < nodePaths.length; i++) { oldIndexPaths[i] = nodePaths[i].indicesPath.resolve(indexSettings.getIndex().getName()); } - indexMetaDataStateFormat.write(indexSettings.getIndexMetaData(), 1, oldIndexPaths); + IndexMetaData.FORMAT.write(indexSettings.getIndexMetaData(), 1, oldIndexPaths); for (int id = 0; id < indexSettings.getNumberOfShards(); id++) { Path oldIndexPath = randomFrom(oldIndexPaths); ShardId shardId = new ShardId(indexSettings.getIndex(), id); diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java index dfd8ba51a541..ef9fe55c92d3 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java @@ -234,8 +234,7 @@ public class MetaDataStateFormatTests extends ESTestCase { // If the latest version doesn't use the legacy format while previous versions do, then fail hard public void testLatestVersionDoesNotUseLegacy() throws IOException { - final ToXContent.Params params = ToXContent.EMPTY_PARAMS; - MetaDataStateFormat format = MetaStateService.globalStateFormat(randomFrom(XContentType.values()), params); + MetaDataStateFormat format = metaDataFormat(randomFrom(XContentType.values())); final Path[] dirs = new Path[2]; dirs[0] = createTempDir(); dirs[1] = createTempDir(); @@ -252,9 +251,10 @@ public class MetaDataStateFormatTests extends ESTestCase { for (int i = 0; i < numLegacyFiles; ++i) { final Path dir2 = randomFrom(dirs); final int v2 = v1 + 1 + randomInt(10); - try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(format.format(), Files.newOutputStream(dir2.resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve(MetaStateService.GLOBAL_STATE_FILE_PREFIX + v2)))) { + try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(format.format(), + Files.newOutputStream(dir2.resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve(MetaData.GLOBAL_STATE_FILE_PREFIX + v2)))) { xcontentBuilder.startObject(); - MetaData.Builder.toXContent(randomMeta(), xcontentBuilder, params); + format.toXContent(xcontentBuilder, randomMeta()); xcontentBuilder.endObject(); } } @@ -279,8 +279,7 @@ public class MetaDataStateFormatTests extends ESTestCase { // If both the legacy and the new format are available for the latest version, prefer the new format public void testPrefersNewerFormat() throws IOException { - final ToXContent.Params params = ToXContent.EMPTY_PARAMS; - MetaDataStateFormat format = MetaStateService.globalStateFormat(randomFrom(XContentType.values()), params); + MetaDataStateFormat format = metaDataFormat(randomFrom(XContentType.values())); final Path[] dirs = new Path[2]; dirs[0] = createTempDir(); dirs[1] = createTempDir(); @@ -296,9 +295,10 @@ public class MetaDataStateFormatTests extends ESTestCase { final Path dir2 = randomFrom(dirs); MetaData meta2 = randomMeta(); assertFalse(meta2.clusterUUID().equals(uuid)); - try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(format.format(), Files.newOutputStream(dir2.resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve(MetaStateService.GLOBAL_STATE_FILE_PREFIX + v)))) { + try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(MetaData.FORMAT.format(), + Files.newOutputStream(dir2.resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve(MetaData.GLOBAL_STATE_FILE_PREFIX + v)))) { xcontentBuilder.startObject(); - MetaData.Builder.toXContent(randomMeta(), xcontentBuilder, params); + format.toXContent(xcontentBuilder, randomMeta()); xcontentBuilder.endObject(); } @@ -312,7 +312,6 @@ public class MetaDataStateFormatTests extends ESTestCase { } public void testLoadState() throws IOException { - final ToXContent.Params params = ToXContent.EMPTY_PARAMS; final Path[] dirs = new Path[randomIntBetween(1, 5)]; int numStates = randomIntBetween(1, 5); int numLegacy = randomIntBetween(0, numStates); @@ -321,7 +320,7 @@ public class MetaDataStateFormatTests extends ESTestCase { meta.add(randomMeta()); } Set corruptedFiles = new HashSet<>(); - MetaDataStateFormat format = MetaStateService.globalStateFormat(randomFrom(XContentType.values()), params); + MetaDataStateFormat format = metaDataFormat(randomFrom(XContentType.values())); for (int i = 0; i < dirs.length; i++) { dirs[i] = createTempDir(); Files.createDirectories(dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME)); @@ -331,9 +330,10 @@ public class MetaDataStateFormatTests extends ESTestCase { Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-"+j); Files.createFile(file); // randomly create 0-byte files -- there is extra logic to skip them } else { - try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(type, Files.newOutputStream(dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + j)))) { + try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(MetaData.FORMAT.format(), + Files.newOutputStream(dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + j)))) { xcontentBuilder.startObject(); - MetaData.Builder.toXContent(meta.get(j), xcontentBuilder, params); + format.toXContent(xcontentBuilder, meta.get(j)); xcontentBuilder.endObject(); } } @@ -380,7 +380,20 @@ public class MetaDataStateFormatTests extends ESTestCase { assertThat(ExceptionsHelper.unwrap(ex, CorruptStateException.class), notNullValue()); } } + } + private static MetaDataStateFormat metaDataFormat(XContentType format) { + return new MetaDataStateFormat(format, MetaData.GLOBAL_STATE_FILE_PREFIX) { + @Override + public void toXContent(XContentBuilder builder, MetaData state) throws IOException { + MetaData.Builder.toXContent(state, builder, ToXContent.EMPTY_PARAMS); + } + + @Override + public MetaData fromXContent(XContentParser parser) throws IOException { + return MetaData.Builder.fromXContent(parser); + } + }; } private MetaData randomMeta() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaStateServiceTests.java b/core/src/test/java/org/elasticsearch/gateway/MetaStateServiceTests.java index 5f38456d2d11..82c38748a488 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaStateServiceTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaStateServiceTests.java @@ -41,7 +41,7 @@ public class MetaStateServiceTests extends ESTestCase { public void testWriteLoadIndex() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { - MetaStateService metaStateService = new MetaStateService(randomSettings(), env); + MetaStateService metaStateService = new MetaStateService(Settings.EMPTY, env); IndexMetaData index = IndexMetaData.builder("test1").settings(indexSettings).build(); metaStateService.writeIndex("test_write", index); @@ -51,14 +51,14 @@ public class MetaStateServiceTests extends ESTestCase { public void testLoadMissingIndex() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { - MetaStateService metaStateService = new MetaStateService(randomSettings(), env); + MetaStateService metaStateService = new MetaStateService(Settings.EMPTY, env); assertThat(metaStateService.loadIndexState(new Index("test1", "test1UUID")), nullValue()); } } public void testWriteLoadGlobal() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { - MetaStateService metaStateService = new MetaStateService(randomSettings(), env); + MetaStateService metaStateService = new MetaStateService(Settings.EMPTY, env); MetaData metaData = MetaData.builder() .persistentSettings(Settings.builder().put("test1", "value1").build()) @@ -70,7 +70,7 @@ public class MetaStateServiceTests extends ESTestCase { public void testWriteGlobalStateWithIndexAndNoIndexIsLoaded() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { - MetaStateService metaStateService = new MetaStateService(randomSettings(), env); + MetaStateService metaStateService = new MetaStateService(Settings.EMPTY, env); MetaData metaData = MetaData.builder() .persistentSettings(Settings.builder().put("test1", "value1").build()) @@ -86,7 +86,7 @@ public class MetaStateServiceTests extends ESTestCase { public void testLoadGlobal() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { - MetaStateService metaStateService = new MetaStateService(randomSettings(), env); + MetaStateService metaStateService = new MetaStateService(Settings.EMPTY, env); IndexMetaData index = IndexMetaData.builder("test1").settings(indexSettings).build(); MetaData metaData = MetaData.builder() @@ -103,12 +103,4 @@ public class MetaStateServiceTests extends ESTestCase { assertThat(loadedState.index("test1"), equalTo(index)); } } - - private Settings randomSettings() { - Settings.Builder builder = Settings.builder(); - if (randomBoolean()) { - builder.put(MetaStateService.FORMAT_SETTING, randomFrom(XContentType.values()).shortName()); - } - return builder.build(); - } } From d1eba4baf28456ba729c6aae16bf1edc3d7090be Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Wed, 16 Mar 2016 12:54:09 -0400 Subject: [PATCH 271/320] fix tests --- .../org/elasticsearch/gateway/MetaDataStateFormatTests.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java index ef9fe55c92d3..bac257af2a95 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java @@ -254,7 +254,7 @@ public class MetaDataStateFormatTests extends ESTestCase { try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(format.format(), Files.newOutputStream(dir2.resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve(MetaData.GLOBAL_STATE_FILE_PREFIX + v2)))) { xcontentBuilder.startObject(); - format.toXContent(xcontentBuilder, randomMeta()); + MetaData.Builder.toXContent(randomMeta(), xcontentBuilder, ToXContent.EMPTY_PARAMS); xcontentBuilder.endObject(); } } @@ -298,7 +298,7 @@ public class MetaDataStateFormatTests extends ESTestCase { try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(MetaData.FORMAT.format(), Files.newOutputStream(dir2.resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve(MetaData.GLOBAL_STATE_FILE_PREFIX + v)))) { xcontentBuilder.startObject(); - format.toXContent(xcontentBuilder, randomMeta()); + MetaData.Builder.toXContent(randomMeta(), xcontentBuilder, ToXContent.EMPTY_PARAMS); xcontentBuilder.endObject(); } @@ -333,7 +333,7 @@ public class MetaDataStateFormatTests extends ESTestCase { try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(MetaData.FORMAT.format(), Files.newOutputStream(dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + j)))) { xcontentBuilder.startObject(); - format.toXContent(xcontentBuilder, meta.get(j)); + MetaData.Builder.toXContent(meta.get(j), xcontentBuilder, ToXContent.EMPTY_PARAMS); xcontentBuilder.endObject(); } } From da165f425f6fb6f0a3f68ba8b741f7732858219f Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Wed, 16 Mar 2016 12:54:52 -0400 Subject: [PATCH 272/320] update migration doc for removing gateway.format setting --- docs/reference/migration/migrate_5_0/settings.asciidoc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/reference/migration/migrate_5_0/settings.asciidoc b/docs/reference/migration/migrate_5_0/settings.asciidoc index 87ea356ec7a0..6dd15be0ed44 100644 --- a/docs/reference/migration/migrate_5_0/settings.asciidoc +++ b/docs/reference/migration/migrate_5_0/settings.asciidoc @@ -17,6 +17,11 @@ method. The `name` setting has been removed and is replaced by `node.name`. Usage of `-Dname=some_node_name` is not supported anymore. +==== Gateway settings + +The `gateway.format` setting for configuring global and index state serialization +format has been removed. By default, `smile` is used as the format. + ==== Transport Settings All settings with a `netty` infix have been replaced by their already existing From 44b3dc95a0fc19d36e5613bdbe20587fea3ea8cc Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Wed, 16 Mar 2016 13:27:22 -0400 Subject: [PATCH 273/320] remove irrelvant tests --- .../gateway/MetaDataStateFormatTests.java | 83 +------------------ 1 file changed, 1 insertion(+), 82 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java index bac257af2a95..115e5b68ff0a 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java @@ -32,7 +32,6 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -63,7 +62,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.startsWith; @LuceneTestCase.SuppressFileSystems("ExtrasFS") // TODO: fix test to work with ExtrasFS public class MetaDataStateFormatTests extends ESTestCase { @@ -232,85 +230,6 @@ public class MetaDataStateFormatTests extends ESTestCase { } } - // If the latest version doesn't use the legacy format while previous versions do, then fail hard - public void testLatestVersionDoesNotUseLegacy() throws IOException { - MetaDataStateFormat format = metaDataFormat(randomFrom(XContentType.values())); - final Path[] dirs = new Path[2]; - dirs[0] = createTempDir(); - dirs[1] = createTempDir(); - for (Path dir : dirs) { - Files.createDirectories(dir.resolve(MetaDataStateFormat.STATE_DIR_NAME)); - } - final Path dir1 = randomFrom(dirs); - final int v1 = randomInt(10); - // write a first state file in the new format - format.write(randomMeta(), v1, dir1); - - // write older state files in the old format but with a newer version - final int numLegacyFiles = randomIntBetween(1, 5); - for (int i = 0; i < numLegacyFiles; ++i) { - final Path dir2 = randomFrom(dirs); - final int v2 = v1 + 1 + randomInt(10); - try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(format.format(), - Files.newOutputStream(dir2.resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve(MetaData.GLOBAL_STATE_FILE_PREFIX + v2)))) { - xcontentBuilder.startObject(); - MetaData.Builder.toXContent(randomMeta(), xcontentBuilder, ToXContent.EMPTY_PARAMS); - xcontentBuilder.endObject(); - } - } - - try { - format.loadLatestState(logger, dirs); - fail("latest version can not be read"); - } catch (IllegalStateException ex) { - assertThat(ex.getMessage(), startsWith("Could not find a state file to recover from among ")); - } - // write the next state file in the new format and ensure it get's a higher ID - final MetaData meta = randomMeta(); - format.write(meta, v1, dirs); - final MetaData metaData = format.loadLatestState(logger, dirs); - assertEquals(meta.clusterUUID(), metaData.clusterUUID()); - final Path path = randomFrom(dirs); - final Path[] files = FileSystemUtils.files(path.resolve("_state")); - assertEquals(1, files.length); - assertEquals("global-" + format.findMaxStateId("global-", dirs) + ".st", files[0].getFileName().toString()); - - } - - // If both the legacy and the new format are available for the latest version, prefer the new format - public void testPrefersNewerFormat() throws IOException { - MetaDataStateFormat format = metaDataFormat(randomFrom(XContentType.values())); - final Path[] dirs = new Path[2]; - dirs[0] = createTempDir(); - dirs[1] = createTempDir(); - for (Path dir : dirs) { - Files.createDirectories(dir.resolve(MetaDataStateFormat.STATE_DIR_NAME)); - } - final long v = randomInt(10); - - MetaData meta = randomMeta(); - String uuid = meta.clusterUUID(); - - // write a first state file in the old format - final Path dir2 = randomFrom(dirs); - MetaData meta2 = randomMeta(); - assertFalse(meta2.clusterUUID().equals(uuid)); - try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(MetaData.FORMAT.format(), - Files.newOutputStream(dir2.resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve(MetaData.GLOBAL_STATE_FILE_PREFIX + v)))) { - xcontentBuilder.startObject(); - MetaData.Builder.toXContent(randomMeta(), xcontentBuilder, ToXContent.EMPTY_PARAMS); - xcontentBuilder.endObject(); - } - - // write a second state file in the new format but with the same version - format.write(meta, v, dirs); - - MetaData state = format.loadLatestState(logger, dirs); - final Path path = randomFrom(dirs); - assertTrue(Files.exists(path.resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + (v+1) + ".st"))); - assertEquals(state.clusterUUID(), uuid); - } - public void testLoadState() throws IOException { final Path[] dirs = new Path[randomIntBetween(1, 5)]; int numStates = randomIntBetween(1, 5); @@ -330,7 +249,7 @@ public class MetaDataStateFormatTests extends ESTestCase { Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-"+j); Files.createFile(file); // randomly create 0-byte files -- there is extra logic to skip them } else { - try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(MetaData.FORMAT.format(), + try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(type, Files.newOutputStream(dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + j)))) { xcontentBuilder.startObject(); MetaData.Builder.toXContent(meta.get(j), xcontentBuilder, ToXContent.EMPTY_PARAMS); From 22e12ab7c3296368091b7f2719cb7e60b79a902a Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Wed, 16 Mar 2016 17:01:47 -0400 Subject: [PATCH 274/320] cleanup request parsing in RestSearchAction --- .../rest/action/search/RestSearchAction.java | 15 +++++++-------- .../rest/action/support/RestActions.java | 11 ----------- 2 files changed, 7 insertions(+), 19 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 6eedb0aea044..9d533d15ff20 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -92,7 +92,7 @@ public class RestSearchAction extends BaseRestHandler { @Override public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) throws IOException { SearchRequest searchRequest = new SearchRequest(); - RestSearchAction.parseSearchRequest(searchRequest, queryRegistry, request, parseFieldMatcher, aggParsers, suggesters, null); + parseSearchRequest(searchRequest, queryRegistry, request, parseFieldMatcher, aggParsers, suggesters, null); client.search(searchRequest, new RestStatusToXContentListener<>(channel)); } @@ -123,16 +123,15 @@ public class RestSearchAction extends BaseRestHandler { } if (restContent != null) { QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); - if (isTemplateRequest) { - try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) { - context.reset(parser); - context.parseFieldMatcher(parseFieldMatcher); + try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) { + context.reset(parser); + context.parseFieldMatcher(parseFieldMatcher); + if (isTemplateRequest) { Template template = TemplateQueryParser.parse(parser, context.parseFieldMatcher(), "params", "template"); searchRequest.template(template); + } else { + searchRequest.source().parseXContent(parser, context, aggParsers, suggesters); } - } else { - RestActions.parseRestSearchSource(searchRequest.source(), restContent, indicesQueriesRegistry, parseFieldMatcher, - aggParsers, suggesters); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java index 950828639f7b..550636643439 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java @@ -115,17 +115,6 @@ public class RestActions { return queryBuilder; } - public static void parseRestSearchSource(SearchSourceBuilder source, BytesReference sourceBytes, - IndicesQueriesRegistry queryRegistry, ParseFieldMatcher parseFieldMatcher, - AggregatorParsers aggParsers, Suggesters suggesters) - throws IOException { - XContentParser parser = XContentFactory.xContent(sourceBytes).createParser(sourceBytes); - QueryParseContext queryParseContext = new QueryParseContext(queryRegistry); - queryParseContext.reset(parser); - queryParseContext.parseFieldMatcher(parseFieldMatcher); - source.parseXContent(parser, queryParseContext, aggParsers, suggesters); - } - /** * Get Rest content from either payload or source parameter * @param request Rest request From b46dd3f01c32dd81c75255de3d66e6ccf34483cc Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 16 Mar 2016 18:40:44 -0400 Subject: [PATCH 275/320] es.path.conf can not be empty --- .../resources/packaging/scripts/packaging_test_utils.bash | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash index 852d03ea6f6b..64006483f68b 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash @@ -285,6 +285,9 @@ run_elasticsearch_service() { if [ -f "/tmp/elasticsearch/bin/elasticsearch" ]; then if [ -z "$CONF_DIR" ]; then local CONF_DIR="" + local ES_PATH_CONF="" + else + local ES_PATH_CONF="-Ees.path.conf=$CONF_DIR" fi # we must capture the exit code to compare so we don't want to start as background process in case we expect something other than 0 local background="" @@ -303,7 +306,7 @@ run_elasticsearch_service() { # This line is attempting to emulate the on login behavior of /usr/share/upstart/sessions/jayatana.conf [ -f /usr/share/java/jayatanaag.jar ] && export JAVA_TOOL_OPTIONS="-javaagent:/usr/share/java/jayatanaag.jar" # And now we can start Elasticsearch normally, in the background (-d) and with a pidfile (-p). -$timeoutCommand/tmp/elasticsearch/bin/elasticsearch $background -p /tmp/elasticsearch/elasticsearch.pid -Ees.path.conf=$CONF_DIR $commandLineArgs +$timeoutCommand/tmp/elasticsearch/bin/elasticsearch $background -p /tmp/elasticsearch/elasticsearch.pid $ES_PATH_CONF $commandLineArgs BASH [ "$status" -eq "$expectedStatus" ] elif is_systemd; then From da24bfe5421972c14874f8420689a480642228d2 Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Wed, 16 Mar 2016 19:40:25 -0400 Subject: [PATCH 276/320] simplify handling top-level suggest results --- .../action/suggest/RestSuggestAction.java | 2 +- .../controller/SearchPhaseController.java | 2 +- .../internal/InternalSearchResponse.java | 2 +- .../search/query/QuerySearchResult.java | 2 +- .../elasticsearch/search/suggest/Suggest.java | 45 +++++++------------ .../search/suggest/SuggestPhase.java | 2 +- 6 files changed, 21 insertions(+), 34 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java b/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java index 291eb69254b6..53d9e668de1a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java @@ -97,7 +97,7 @@ public class RestSuggestAction extends BaseRestHandler { buildBroadcastShardsHeader(builder, request, response); Suggest suggest = response.getSuggest(); if (suggest != null) { - suggest.toXContent(builder, request); + suggest.toInnerXContent(builder, request); } builder.endObject(); return new BytesRestResponse(restStatus, builder); diff --git a/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java b/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java index 68343ba59592..d5d4607fba97 100644 --- a/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java +++ b/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java @@ -386,7 +386,7 @@ public class SearchPhaseController extends AbstractComponent { Suggest.group(groupedSuggestions, shardResult); } - suggest = hasSuggestions ? new Suggest(Suggest.Fields.SUGGEST, Suggest.reduce(groupedSuggestions)) : null; + suggest = hasSuggestions ? new Suggest(Suggest.reduce(groupedSuggestions)) : null; } // merge addAggregation diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java index b8255e0bb526..1a2e1f701914 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java +++ b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java @@ -134,7 +134,7 @@ public class InternalSearchResponse implements Streamable, ToXContent { aggregations = InternalAggregations.readAggregations(in); } if (in.readBoolean()) { - suggest = Suggest.readSuggest(Suggest.Fields.SUGGEST, in); + suggest = Suggest.readSuggest(in); } timedOut = in.readBoolean(); diff --git a/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java b/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java index 9223eb5a82d6..2b82633ebfda 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java @@ -207,7 +207,7 @@ public class QuerySearchResult extends QuerySearchResultProvider { this.pipelineAggregators = pipelineAggregators; } if (in.readBoolean()) { - suggest = Suggest.readSuggest(Suggest.Fields.SUGGEST, in); + suggest = Suggest.readSuggest(in); } searchTimedOut = in.readBoolean(); terminatedEarly = in.readOptionalBoolean(); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java b/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java index f2a217b14c81..f9c7092fbf18 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java @@ -46,9 +46,7 @@ import java.util.Map; */ public class Suggest implements Iterable>>, Streamable, ToXContent { - public static class Fields { - public static final XContentBuilderString SUGGEST = new XContentBuilderString("suggest"); - } + private static final XContentBuilderString NAME = new XContentBuilderString("suggest"); private static final Comparator

      + * @throws IOException + * if the index cannot be read + */ + public Relation isFieldWithinQuery(String fieldName, Object from, Object to, boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException { + MappedFieldType mappedFieldType = mapperService.fullName(fieldName); + FieldStats fieldStats = get(fieldName); + if (fieldStats == null) { + // No fieldStats for the field so the field doesn't exist on + // this shard, so relation is DISJOINT + return Relation.DISJOINT; + } else { + // Convert the from and to values to Strings so they can be used + // in the IndexConstraints. Since DateTime is represented as a + // Long field in Lucene we need to use the millisecond value of + // the DateTime in that case + String fromString = null; + if (from != null) { + if (mappedFieldType instanceof DateFieldType) { + long millis = ((DateFieldType) mappedFieldType).parseToMilliseconds(from, !includeLower, timeZone, dateMathParser); + fromString = fieldStats.stringValueOf(millis, null); + } else if (mappedFieldType instanceof IpFieldType) { + if (from instanceof BytesRef) { + from = ((BytesRef) from).utf8ToString(); + } + long ipAsLong = ((IpFieldType) mappedFieldType).value(from); + fromString = fieldStats.stringValueOf(ipAsLong, null); + } else { + fromString = fieldStats.stringValueOf(from, null); + } + } + String toString = null; + if (to != null) { + if (mappedFieldType instanceof DateFieldType) { + long millis = ((DateFieldType) mappedFieldType).parseToMilliseconds(to, includeUpper, timeZone, dateMathParser); + toString = fieldStats.stringValueOf(millis, null); + } else if (mappedFieldType instanceof IpFieldType) { + if (to instanceof BytesRef) { + to = ((BytesRef) to).utf8ToString(); + } + long ipAsLong = ((IpFieldType) mappedFieldType).value(to); + toString = fieldStats.stringValueOf(ipAsLong, null); + } else { + toString = fieldStats.stringValueOf(to, null); + } + } + if ((from == null || fieldStats + .match(new IndexConstraint(fieldName, Property.MIN, includeLower ? Comparison.GTE : Comparison.GT, fromString))) + && (to == null || fieldStats.match( + new IndexConstraint(fieldName, Property.MAX, includeUpper ? Comparison.LTE : Comparison.LT, toString)))) { + // If the min and max terms for the field are both within + // the query range then all documents will match so relation is + // WITHIN + return Relation.WITHIN; + } else if ((to != null && fieldStats + .match(new IndexConstraint(fieldName, Property.MIN, includeUpper ? Comparison.GT : Comparison.GTE, toString))) + || (from != null && fieldStats.match( + new IndexConstraint(fieldName, Property.MAX, includeLower ? Comparison.LT : Comparison.LTE, fromString)))) { + // If the min and max terms are both outside the query range + // then no document will match so relation is DISJOINT (N.B. + // since from <= to we only need + // to check one bould for each side of the query range) + return Relation.DISJOINT; + } + } + // Range of terms doesn't match any of the constraints so must INTERSECT + return Relation.INTERSECTS; + } + + /** + * An enum used to describe the relation between the range of terms in a + * shard when compared with a query range + */ + public static enum Relation { + WITHIN, INTERSECTS, DISJOINT; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java index 048c58297cba..724c37fcfcd7 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java @@ -417,10 +417,15 @@ public class DateFieldMapper extends NumberFieldMapper { } public long parseToMilliseconds(Object value, boolean inclusive, @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) { + if (value instanceof Long) { + return ((Long) value).longValue(); + } + DateMathParser dateParser = dateMathParser(); if (forcedDateParser != null) { dateParser = forcedDateParser; } + String strValue; if (value instanceof BytesRef) { strValue = ((BytesRef) value).utf8ToString(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java index 9a4cf70782bd..2ffb5d4ecf5f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java @@ -22,12 +22,14 @@ package org.elasticsearch.index.mapper.ip; import org.apache.lucene.analysis.LegacyNumericTokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.Terms; import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; +import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Numbers; @@ -262,6 +264,13 @@ public class IpFieldMapper extends NumberFieldMapper { iValue + iSim, true, true); } + + @Override + public FieldStats stats(Terms terms, int maxDoc) throws IOException { + long minValue = LegacyNumericUtils.getMinLong(terms); + long maxValue = LegacyNumericUtils.getMaxLong(terms); + return new FieldStats.Ip(maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue); + } } protected IpFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index e057aff06b1d..11164659b3f7 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.query; import org.elasticsearch.client.Client; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.fieldstats.FieldStatsProvider; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.ScriptService; @@ -31,6 +32,7 @@ public class QueryRewriteContext { protected final IndexSettings indexSettings; protected final IndicesQueriesRegistry indicesQueriesRegistry; protected final QueryParseContext parseContext; + protected FieldStatsProvider fieldStatsProvider; public QueryRewriteContext(IndexSettings indexSettings, ScriptService scriptService, IndicesQueriesRegistry indicesQueriesRegistry) { this.scriptService = scriptService; @@ -39,6 +41,14 @@ public class QueryRewriteContext { this.parseContext = new QueryParseContext(indicesQueriesRegistry); } + public void setFieldStatsProvider(FieldStatsProvider fieldStatsProvider) { + this.fieldStatsProvider = fieldStatsProvider; + } + + public FieldStatsProvider getFieldStatsProvider() { + return fieldStatsProvider; + } + /** * Returns a clients to fetch resources from local or remove nodes. */ diff --git a/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java index cd99bec0f745..2a627c545905 100644 --- a/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java @@ -22,6 +22,10 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.action.fieldstats.FieldStats; +import org.elasticsearch.action.fieldstats.IndexConstraint; +import org.elasticsearch.action.fieldstats.IndexConstraint.Comparison; +import org.elasticsearch.action.fieldstats.IndexConstraint.Property; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -30,6 +34,7 @@ import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.fieldstats.FieldStatsProvider; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.core.DateFieldMapper; import org.joda.time.DateTimeZone; @@ -253,6 +258,48 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i return NAME; } + @Override + protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { + FieldStatsProvider fieldStatsProvider = queryShardContext.getFieldStatsProvider(); + // If the fieldStatsProvider is null we are not on the shard and cannot + // rewrite so just return without rewriting + if (fieldStatsProvider != null) { + DateMathParser dateMathParser = format == null ? null : new DateMathParser(format); + FieldStatsProvider.Relation relation = fieldStatsProvider.isFieldWithinQuery(fieldName, from, to, includeUpper, includeLower, + timeZone, dateMathParser); + switch (relation) { + case DISJOINT: + return new MatchNoneQueryBuilder(); + case WITHIN: + FieldStats fieldStats = fieldStatsProvider.get(fieldName); + if (!(fieldStats.getMinValue().equals(from) && fieldStats.getMaxValue().equals(to) && includeUpper && includeLower)) { + // Rebuild the range query with the bounds for this shard. + // The includeLower/Upper values are preserved only if the + // bound has not been changed by the rewrite + RangeQueryBuilder newRangeQuery = new RangeQueryBuilder(fieldName); + String dateFormatString = format == null ? null : format.format(); + if (fieldStats.getMinValue().equals(fieldStats.getMaxValue())) { + newRangeQuery.from(fieldStats.getMinValue(), true); + newRangeQuery.to(fieldStats.getMaxValue(), true); + } else { + newRangeQuery.from(fieldStats.getMinValue(), includeLower || fieldStats.match(new IndexConstraint(fieldName, + Property.MIN, Comparison.GT, fieldStats.stringValueOf(from, dateFormatString)))); + newRangeQuery.to(fieldStats.getMaxValue(), includeUpper || fieldStats.match(new IndexConstraint(fieldName, + Property.MAX, Comparison.LT, fieldStats.stringValueOf(to, dateFormatString)))); + } + newRangeQuery.format = format; + newRangeQuery.timeZone = timeZone; + return newRangeQuery; + } else { + return this; + } + case INTERSECTS: + break; + } + } + return this; + } + @Override protected Query doToQuery(QueryShardContext context) throws IOException { Query query = null; diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index b0e1bbdbd2bc..f587e7212cf1 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -542,6 +542,7 @@ public class IndicesService extends AbstractLifecycleComponent i * @param indexSettings the shards index settings. * @throws IOException if an IOException occurs */ + @Override public void deleteShardStore(String reason, ShardLock lock, IndexSettings indexSettings) throws IOException { ShardId shardId = lock.getShardId(); logger.trace("{} deleting shard reason [{}]", shardId, reason); @@ -654,6 +655,7 @@ public class IndicesService extends AbstractLifecycleComponent i /** * Adds a pending delete for the given index shard. */ + @Override public void addPendingDelete(ShardId shardId, IndexSettings settings) { if (shardId == null) { throw new IllegalArgumentException("shardId must not be null"); diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 8a400418eb7c..1694533f99af 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -49,6 +49,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.fieldstats.FieldStatsProvider; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.search.stats.ShardSearchStats; @@ -241,7 +242,7 @@ public class SearchService extends AbstractLifecycleComponent imp FutureUtils.cancel(keepAliveReaper); } - public DfsSearchResult executeDfsPhase(ShardSearchRequest request) { + public DfsSearchResult executeDfsPhase(ShardSearchRequest request) throws IOException { final SearchContext context = createAndPutContext(request); try { contextProcessing(context); @@ -270,7 +271,7 @@ public class SearchService extends AbstractLifecycleComponent imp } } - public QuerySearchResultProvider executeQueryPhase(ShardSearchRequest request) { + public QuerySearchResultProvider executeQueryPhase(ShardSearchRequest request) throws IOException { final SearchContext context = createAndPutContext(request); final ShardSearchStats shardSearchStats = context.indexShard().searchService(); try { @@ -362,7 +363,7 @@ public class SearchService extends AbstractLifecycleComponent imp } } - public QueryFetchSearchResult executeFetchPhase(ShardSearchRequest request) { + public QueryFetchSearchResult executeFetchPhase(ShardSearchRequest request) throws IOException { final SearchContext context = createAndPutContext(request); contextProcessing(context); try { @@ -519,7 +520,7 @@ public class SearchService extends AbstractLifecycleComponent imp return context; } - final SearchContext createAndPutContext(ShardSearchRequest request) { + final SearchContext createAndPutContext(ShardSearchRequest request) throws IOException { SearchContext context = createContext(request, null); boolean success = false; try { @@ -537,7 +538,7 @@ public class SearchService extends AbstractLifecycleComponent imp } } - final SearchContext createContext(ShardSearchRequest request, @Nullable Engine.Searcher searcher) { + final SearchContext createContext(ShardSearchRequest request, @Nullable Engine.Searcher searcher) throws IOException { IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); IndexShard indexShard = indexService.getShard(request.shardId().getId()); SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(), indexShard.shardId()); @@ -548,6 +549,8 @@ public class SearchService extends AbstractLifecycleComponent imp indexService, indexShard, scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher, defaultSearchTimeout, fetchPhase); + context.getQueryShardContext().setFieldStatsProvider(new FieldStatsProvider(engineSearcher, indexService.mapperService())); + request.rewrite(context.getQueryShardContext()); SearchContext.setCurrent(context); try { if (request.scroll() != null) { diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index e256838b756c..a20ec535238b 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -40,6 +40,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregatorBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -729,6 +730,59 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ return ext; } + /** + * Rewrites this search source builder into its primitive form. e.g. by + * rewriting the QueryBuilder. If the builder did not change the identity + * reference must be returned otherwise the builder will be rewritten + * infinitely. + */ + public SearchSourceBuilder rewrite(QueryShardContext context) throws IOException { + assert (this.equals(shallowCopy(queryBuilder, postQueryBuilder))); + QueryBuilder queryBuilder = null; + if (this.queryBuilder != null) { + queryBuilder = this.queryBuilder.rewrite(context); + } + QueryBuilder postQueryBuilder = null; + if (this.postQueryBuilder != null) { + postQueryBuilder = this.postQueryBuilder.rewrite(context); + } + boolean rewritten = queryBuilder != this.queryBuilder || postQueryBuilder != this.postQueryBuilder; + if (rewritten) { + return shallowCopy(queryBuilder, postQueryBuilder); + } + return this; + } + + private SearchSourceBuilder shallowCopy(QueryBuilder queryBuilder, QueryBuilder postQueryBuilder) { + SearchSourceBuilder rewrittenBuilder = new SearchSourceBuilder(); + rewrittenBuilder.aggregations = aggregations; + rewrittenBuilder.explain = explain; + rewrittenBuilder.ext = ext; + rewrittenBuilder.fetchSourceContext = fetchSourceContext; + rewrittenBuilder.fieldDataFields = fieldDataFields; + rewrittenBuilder.fieldNames = fieldNames; + rewrittenBuilder.from = from; + rewrittenBuilder.highlightBuilder = highlightBuilder; + rewrittenBuilder.indexBoost = indexBoost; + rewrittenBuilder.innerHitsBuilder = innerHitsBuilder; + rewrittenBuilder.minScore = minScore; + rewrittenBuilder.postQueryBuilder = postQueryBuilder; + rewrittenBuilder.profile = profile; + rewrittenBuilder.queryBuilder = queryBuilder; + rewrittenBuilder.rescoreBuilders = rescoreBuilders; + rewrittenBuilder.scriptFields = scriptFields; + rewrittenBuilder.searchAfterBuilder = searchAfterBuilder; + rewrittenBuilder.size = size; + rewrittenBuilder.sorts = sorts; + rewrittenBuilder.stats = stats; + rewrittenBuilder.suggestBuilder = suggestBuilder; + rewrittenBuilder.terminateAfter = terminateAfter; + rewrittenBuilder.timeoutInMillis = timeoutInMillis; + rewrittenBuilder.trackScores = trackScores; + rewrittenBuilder.version = version; + return rewrittenBuilder; + } + /** * Create a new SearchSourceBuilder with attributes set by an xContent. */ diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java index 56ad8ed9467c..311923503085 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.script.Template; import org.elasticsearch.search.Scroll; @@ -224,4 +225,15 @@ public class ShardSearchLocalRequest implements ShardSearchRequest { // we could potentially keep it without copying, but then pay the price of extra unused bytes up to a page return out.bytes().copyBytesArray(); } + + @Override + public void rewrite(QueryShardContext context) throws IOException { + SearchSourceBuilder source = this.source; + SearchSourceBuilder rewritten = null; + while (rewritten != source) { + rewritten = source.rewrite(context); + source = rewritten; + } + this.source = source; + } } diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java index 82ff69078aa8..aa148e215c81 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.internal; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.script.Template; import org.elasticsearch.search.Scroll; @@ -72,4 +73,10 @@ public interface ShardSearchRequest { * Returns the cache key for this shard search request, based on its content */ BytesReference cacheKey() throws IOException; + + /** + * Rewrites this request into its primitive form. e.g. by rewriting the + * QueryBuilder. + */ + void rewrite(QueryShardContext context) throws IOException; } diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java index dc19f84c7a78..cd6460a686fa 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.script.Template; import org.elasticsearch.search.Scroll; @@ -156,4 +157,16 @@ public class ShardSearchTransportRequest extends TransportRequest implements Sha public boolean isProfile() { return shardSearchLocalRequest.isProfile(); } + + @Override + public void rewrite(QueryShardContext context) throws IOException { + shardSearchLocalRequest.rewrite(context); + } + + private ShardSearchTransportRequest shallowCopy(ShardSearchLocalRequest rewritten) { + ShardSearchTransportRequest newRequest = new ShardSearchTransportRequest(); + newRequest.originalIndices = originalIndices; + newRequest.shardSearchLocalRequest = rewritten; + return newRequest; + } } diff --git a/core/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java b/core/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java new file mode 100644 index 000000000000..cff2d13ce634 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java @@ -0,0 +1,99 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.fieldstats; + +import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.indices.IndicesRequestCache; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESSingleNodeTestCase; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.equalTo; + +public class FieldStatsProviderRefreshTests extends ESSingleNodeTestCase { + + public void testQueryRewriteOnRefresh() throws Exception { + assertAcked(client().admin().indices().prepareCreate("index").addMapping("type", "s", "type=text") + .setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true, + IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1, + IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .get()); + + // Index some documents + indexDocument("1", "d"); + indexDocument("2", "e"); + indexDocument("3", "f"); + refreshIndex(); + + // check request cache stats are clean + assertRequestCacheStats(0, 0); + + // Search for a range and check that it missed the cache (since its the + // first time it has run) + final SearchResponse r1 = client().prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")).get(); + assertSearchResponse(r1); + assertThat(r1.getHits().getTotalHits(), equalTo(3L)); + assertRequestCacheStats(0, 1); + + // Search again and check it hits the cache + final SearchResponse r2 = client().prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")).get(); + assertSearchResponse(r2); + assertThat(r2.getHits().getTotalHits(), equalTo(3L)); + assertRequestCacheStats(1, 1); + + // Index some more documents in the query range and refresh + indexDocument("4", "c"); + indexDocument("5", "g"); + refreshIndex(); + + // Search again and check the request cache for another miss since request cache should be invalidated by refresh + final SearchResponse r3 = client().prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")).get(); + assertSearchResponse(r3); + assertThat(r3.getHits().getTotalHits(), equalTo(5L)); + assertRequestCacheStats(1, 2); + } + + private void assertRequestCacheStats(long expectedHits, long expectedMisses) { + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), + equalTo(expectedHits)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), + equalTo(expectedMisses)); + } + + private void refreshIndex() { + RefreshResponse refreshResponse = client().admin().indices().prepareRefresh("index").get(); + assertThat(refreshResponse.getSuccessfulShards(), equalTo(refreshResponse.getSuccessfulShards())); + } + + private void indexDocument(String id, String sValue) { + IndexResponse response = client().prepareIndex("index", "type", id).setSource("s", sValue).get(); + assertThat(response.status(), anyOf(equalTo(RestStatus.OK), equalTo(RestStatus.CREATED))); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderTests.java b/core/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderTests.java new file mode 100644 index 000000000000..9cad8d3fc8dd --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderTests.java @@ -0,0 +1,446 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.fieldstats; + +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.store.BaseDirectoryWrapper; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.analysis.AnalysisService; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.engine.Engine.Searcher; +import org.elasticsearch.index.fieldstats.FieldStatsProvider.Relation; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperService.MergeReason; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.similarity.SimilarityService; +import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.chrono.ISOChronology; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.Collections; + +import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.hamcrest.Matchers.equalTo; + +public class FieldStatsProviderTests extends ESTestCase { + + private DirectoryReader directoryReader; + private Searcher searcher; + private FieldStatsProvider fieldStatsProvider; + private BaseDirectoryWrapper dir; + private AnalysisRegistry analysisRegistry; + + @Before + public void setup() throws IOException { + Settings nodeSettings = settingsBuilder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); + IndexSettings settings = IndexSettingsModule.newIndexSettings("_na", nodeSettings); + SimilarityService similarityService = new SimilarityService(settings, Collections.emptyMap()); + analysisRegistry = new AnalysisRegistry(null, new Environment(nodeSettings)); + AnalysisService analysisService = analysisRegistry.build(settings); + IndicesModule indicesModule = new IndicesModule(); + MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); + MapperService service = new MapperService(settings, analysisService, similarityService, mapperRegistry, () -> null); + putMapping(service); + dir = newDirectory(); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()); + indexDocument(service, w, "1", 50L, 50.2f, 50.2, "cherry", new DateTime(2014, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()), + "10.10.0.10"); + indexDocument(service, w, "2", 60L, 60.1f, 60.1, "damson", new DateTime(2014, 2, 1, 0, 0, 0, ISOChronology.getInstanceUTC()), + "10.10.0.20"); + indexDocument(service, w, "3", 70L, 70.6f, 70.6, "grape", new DateTime(2014, 3, 1, 0, 0, 0, ISOChronology.getInstanceUTC()), + "10.10.0.30"); + indexDocument(service, w, "4", 80L, 80.2f, 80.2, "kiwi", new DateTime(2014, 4, 1, 0, 0, 0, ISOChronology.getInstanceUTC()), + "10.10.0.40"); + indexDocument(service, w, "5", 90L, 90.4f, 90.4, "lemon", new DateTime(2014, 5, 1, 0, 0, 0, ISOChronology.getInstanceUTC()), + "10.10.0.50"); + indexDocument(service, w, "6", 100L, 100.3f, 100.3, "orange", new DateTime(2014, 6, 1, 0, 0, 0, ISOChronology.getInstanceUTC()), + "10.10.0.60"); + directoryReader = DirectoryReader.open(w, true, true); + w.close(); + ShardId shard = new ShardId("index", "_na_", 0); + directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, shard); + IndexSearcher s = new IndexSearcher(directoryReader); + searcher = new Engine.Searcher("test", s); + fieldStatsProvider = new FieldStatsProvider(searcher, service); + } + + @After + public void teardown() throws IOException { + searcher.close(); + directoryReader.close(); + dir.close(); + analysisRegistry.close(); + } + + public void testiIsFieldWithinQueryLong() throws IOException { + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", 10L, 200L, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", 10L, null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", null, 200L, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", null, null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", 10L, 100L, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", 50L, 200L, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", 30L, 80L, true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", 80L, 200L, true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", 60L, 80L, true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", 10L, 100L, true, false, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", 50L, 200L, false, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", 100L, 200L, true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", 1L, 50L, true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", 150L, 200L, true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", 1L, 8L, true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", null, 8L, true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", 150L, null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", 100L, 200L, false, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", 1L, 50L, true, false, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + } + + public void testiIsFieldWithinQueryFloat() throws IOException { + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", 10.8f, 200.5f, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", 10.8f, null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", null, 200.5f, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", null, null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", 10.8f, 100.3f, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", 50.2f, 200.5f, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", 30.5f, 80.1f, true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", 80.1f, 200.5f, true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", 10.8f, 100.3f, true, false, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", 50.2f, 200.5f, false, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", 100.3f, 200.5f, true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", 1.9f, 50.2f, true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", 60.9f, 80.1f, true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", 150.4f, 200.5f, true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", 1.9f, 8.1f, true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", null, 8.1f, true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", 150.4f, null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", 100.3f, 200.5f, false, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("float_field", 1.9f, 50.2f, true, false, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + } + + public void testiIsFieldWithinQueryDouble() throws IOException { + assertThat(fieldStatsProvider.isFieldWithinQuery("double_field", 10.8, 200.5, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("double_field", 10.8, null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("double_field", null, 200.5, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("double_field", null, null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("double_field", 10.8, 100.3, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("double_field", 50.2, 200.5, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("double_field", 30.5, 80.1, true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("double_field", 80.1, 200.5, true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("double_field", 60.9, 80.1, true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("double_field", 10.8, 100.3, true, false, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("double_field", 50.2, 200.5, false, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("double_field", 100.3, 200.5, true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("double_field", 1.9, 50.2, true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("double_field", 150.4, 200.5, true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("double_field", 1.9, 8.1, true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("double_field", null, 8.1, true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("double_field", 150.4, null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", 100.3, 200.5, false, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("long_field", 1.9, 50.2, true, false, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + } + + public void testiIsFieldWithinQueryText() throws IOException { + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", new BytesRef("banana"), new BytesRef("zebra"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", new BytesRef("banana"), null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", null, new BytesRef("zebra"), true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", null, null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", new BytesRef("banana"), new BytesRef("orange"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", new BytesRef("cherry"), new BytesRef("zebra"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", new BytesRef("banana"), new BytesRef("grape"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", new BytesRef("grape"), new BytesRef("zebra"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", new BytesRef("lime"), new BytesRef("mango"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", new BytesRef("banana"), new BytesRef("orange"), true, false, + DateTimeZone.UTC, null), equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", new BytesRef("cherry"), new BytesRef("zebra"), false, true, + DateTimeZone.UTC, null), equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", new BytesRef("orange"), new BytesRef("zebra"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", new BytesRef("apple"), new BytesRef("cherry"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", new BytesRef("peach"), new BytesRef("zebra"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", new BytesRef("apple"), new BytesRef("banana"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", null, new BytesRef("banana"), true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", new BytesRef("peach"), null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", new BytesRef("orange"), new BytesRef("zebra"), false, true, + DateTimeZone.UTC, null), equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("text_field", new BytesRef("apple"), new BytesRef("cherry"), true, false, + DateTimeZone.UTC, null), equalTo(Relation.DISJOINT)); + } + + public void testiIsFieldWithinQueryKeyword() throws IOException { + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", new BytesRef("banana"), new BytesRef("zebra"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", new BytesRef("banana"), null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", null, new BytesRef("zebra"), true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", null, null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", new BytesRef("banana"), new BytesRef("orange"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", new BytesRef("cherry"), new BytesRef("zebra"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", new BytesRef("banana"), new BytesRef("grape"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", new BytesRef("grape"), new BytesRef("zebra"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", new BytesRef("lime"), new BytesRef("mango"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", new BytesRef("banana"), new BytesRef("orange"), true, false, + DateTimeZone.UTC, null), equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", new BytesRef("cherry"), new BytesRef("zebra"), false, true, + DateTimeZone.UTC, null), equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", new BytesRef("orange"), new BytesRef("zebra"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", new BytesRef("apple"), new BytesRef("cherry"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", new BytesRef("peach"), new BytesRef("zebra"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", new BytesRef("apple"), new BytesRef("banana"), true, true, + DateTimeZone.UTC, null), equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", null, new BytesRef("banana"), true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", new BytesRef("peach"), null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", new BytesRef("orange"), new BytesRef("zebra"), false, true, + DateTimeZone.UTC, null), equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("keyword_field", new BytesRef("apple"), new BytesRef("cherry"), true, false, + DateTimeZone.UTC, null), equalTo(Relation.DISJOINT)); + } + + public void testiIsFieldWithinQueryDate() throws IOException { + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", "2013-01-01", "now", true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", "2013-01-01", null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", null, "now", true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", null, null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", "2013-01-01", "2014-06-01", true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", "2014-01-01", "now", true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", "2013-01-01", "2014-03-01", true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", "2014-03-01", "now", true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", "2014-03-01", "2014-05-01", true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", "2013-01-01", "2014-06-01", true, false, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", "2014-01-01", "now", false, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", "2014-06-01", "now", true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", "2013-01-01", "2014-01-01", true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", "2015-01-01", "now", true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", "2013-01-01", "2013-09-01", true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", null, "2013-09-01", true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", "2015-01-01", null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", "2014-06-01", "now", false, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("date_field", "2013-01-01", "2014-01-01", true, false, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + } + + public void testiIsFieldWithinQueryIp() throws IOException { + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", "10.10.0.1", "10.20.0.1", true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", "10.10.0.1", null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", null, "10.20.0.1", true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", null, null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", "10.10.0.1", "10.10.0.60", true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", "10.10.0.10", "10.20.0.1", true, true, DateTimeZone.UTC, null), + equalTo(Relation.WITHIN)); + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", "10.10.0.1", "10.10.0.40", true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", "10.10.0.40", "10.20.0.1", true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", "10.10.0.30", "10.10.0.40", true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", "10.10.0.1", "10.10.0.60", true, false, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", "10.10.0.10", "10.20.0.1", false, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", "10.10.0.60", "10.20.0.1", true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", "10.0.0.1", "10.10.0.10", true, true, DateTimeZone.UTC, null), + equalTo(Relation.INTERSECTS)); + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", "10.20.0.10", "10.20.0.1", true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", "10.0.0.1", "10.0.0.100", true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", null, "10.0.0.100", true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", "10.20.0.10", null, true, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", "10.10.0.60", "10.20.0.1", false, true, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + assertThat(fieldStatsProvider.isFieldWithinQuery("ip_field", "10.0.0.1", "10.10.0.10", true, false, DateTimeZone.UTC, null), + equalTo(Relation.DISJOINT)); + } + + private void putMapping(MapperService service) throws IOException { + XContentBuilder mappingbuilder = JsonXContent.contentBuilder(); + mappingbuilder.startObject(); + mappingbuilder.startObject("type"); + mappingbuilder.startObject("properties"); + mappingbuilder.startObject("long_field"); + mappingbuilder.field("type", "long"); + mappingbuilder.endObject(); + mappingbuilder.startObject("float_field"); + mappingbuilder.field("type", "float"); + mappingbuilder.endObject(); + mappingbuilder.startObject("double_field"); + mappingbuilder.field("type", "double"); + mappingbuilder.endObject(); + mappingbuilder.startObject("text_field"); + mappingbuilder.field("type", "text"); + mappingbuilder.endObject(); + mappingbuilder.startObject("keyword_field"); + mappingbuilder.field("type", "keyword"); + mappingbuilder.endObject(); + mappingbuilder.startObject("date_field"); + mappingbuilder.field("type", "date"); + mappingbuilder.endObject(); + mappingbuilder.startObject("ip_field"); + mappingbuilder.field("type", "ip"); + mappingbuilder.endObject(); + mappingbuilder.endObject(); + mappingbuilder.endObject(); + mappingbuilder.endObject(); + service.merge("type", new CompressedXContent(mappingbuilder.bytes()), MergeReason.MAPPING_UPDATE, true); + } + + private void indexDocument(MapperService service, IndexWriter writer, String id, long longValue, float floatValue, double doubleValue, + String stringValue, DateTime dateValue, String ipValue) throws IOException { + XContentBuilder docBuilder = JsonXContent.contentBuilder(); + docBuilder.startObject(); + docBuilder.field("long_field", longValue); + docBuilder.field("float_field", floatValue); + docBuilder.field("double_field", doubleValue); + docBuilder.field("text_field", stringValue); + docBuilder.field("keyword_field", stringValue); + docBuilder.field("date_field", dateValue); + docBuilder.field("ip_field", ipValue); + docBuilder.endObject(); + DocumentMapper documentMapper = service.documentMapper("type"); + ParsedDocument doc = documentMapper.parse("index", "type", id, docBuilder.bytes()); + writer.addDocument(doc.rootDoc()); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index 4d2ffcdd11e7..8f63daaa8f46 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -23,7 +23,6 @@ import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.io.JsonStringEncoder; -import org.apache.lucene.index.memory.MemoryIndex; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -341,6 +340,7 @@ public abstract class AbstractQueryTestCase> @After public void afterTest() { + queryShardContext.setFieldStatsProvider(null); clientInvocationHandler.delegate = null; SearchContext.removeCurrent(); } diff --git a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java index 9f99b85a294d..30e32c92da20 100644 --- a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java @@ -22,11 +22,16 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermRangeQuery; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.index.fieldstats.FieldStatsProvider; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; +import org.joda.time.chrono.ISOChronology; import java.io.IOException; import java.util.HashMap; @@ -38,6 +43,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.sameInstance; public class RangeQueryBuilderTests extends AbstractQueryTestCase { @@ -392,4 +398,399 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase> FieldStats get(String field) throws IOException { + assertThat(field, equalTo(fieldName)); + return (FieldStats) new FieldStats.Long(randomLong(), randomLong(), randomLong(), randomLong(), shardMinValue, + shardMaxValue); + } + }; + queryShardContext.setFieldStatsProvider(fieldStatsProvider); + QueryBuilder rewritten = query.rewrite(queryShardContext); + assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); + RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten; + assertThat(rewrittenRange.fieldName(), equalTo(fieldName)); + assertThat(rewrittenRange.from(), equalTo(shardMinValue)); + assertThat(rewrittenRange.to(), equalTo(shardMaxValue)); + assertThat(rewrittenRange.includeLower(), equalTo(true)); + assertThat(rewrittenRange.includeUpper(), equalTo(true)); + } + + public void testRewriteLongToMatchNone() throws IOException { + String fieldName = randomAsciiOfLengthBetween(1, 20); + RangeQueryBuilder query = new RangeQueryBuilder(fieldName); + int queryFromValue = randomIntBetween(-1000000, 1000000); + int queryToValue = randomIntBetween(queryFromValue, 2000000); + query.from((long) queryFromValue); + query.to((long) queryToValue); + QueryShardContext queryShardContext = queryShardContext(); + FieldStatsProvider fieldStatsProvider = new FieldStatsProvider(null, null) { + + @Override + public Relation isFieldWithinQuery(String fieldName, Object from, Object to, boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException { + return Relation.DISJOINT; + } + }; + queryShardContext.setFieldStatsProvider(fieldStatsProvider); + QueryBuilder rewritten = query.rewrite(queryShardContext); + assertThat(rewritten, instanceOf(MatchNoneQueryBuilder.class)); + } + + public void testRewriteLongToSame() throws IOException { + String fieldName = randomAsciiOfLengthBetween(1, 20); + RangeQueryBuilder query = new RangeQueryBuilder(fieldName); + int queryFromValue = randomIntBetween(-1000000, 1000000); + int queryToValue = randomIntBetween(queryFromValue, 2000000); + query.from((long) queryFromValue); + query.to((long) queryToValue); + QueryShardContext queryShardContext = queryShardContext(); + FieldStatsProvider fieldStatsProvider = new FieldStatsProvider(null, null) { + + @Override + public Relation isFieldWithinQuery(String fieldName, Object from, Object to, boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException { + return Relation.INTERSECTS; + } + }; + queryShardContext.setFieldStatsProvider(fieldStatsProvider); + QueryBuilder rewritten = query.rewrite(queryShardContext); + assertThat(rewritten, sameInstance(query)); + } + + public void testRewriteDoubleToMatchAll() throws IOException { + String fieldName = randomAsciiOfLengthBetween(1, 20); + RangeQueryBuilder query = new RangeQueryBuilder(fieldName); + double queryFromValue = randomDoubleBetween(-1000000.0, 1000000.0, true); + double queryToValue = randomDoubleBetween(queryFromValue, 2000000, true); + double shardMinValue = randomDoubleBetween(queryFromValue, queryToValue, true); + double shardMaxValue = randomDoubleBetween(shardMinValue, queryToValue, true); + query.from(queryFromValue); + query.to(queryToValue); + QueryShardContext queryShardContext = queryShardContext(); + FieldStatsProvider fieldStatsProvider = new FieldStatsProvider(null, null) { + + @Override + public Relation isFieldWithinQuery(String fieldName, Object from, Object to, boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException { + return Relation.WITHIN; + } + + @SuppressWarnings("unchecked") + @Override + public > FieldStats get(String field) throws IOException { + assertThat(field, equalTo(fieldName)); + return (FieldStats) new FieldStats.Double(randomLong(), randomLong(), randomLong(), randomLong(), shardMinValue, + shardMaxValue); + } + }; + queryShardContext.setFieldStatsProvider(fieldStatsProvider); + QueryBuilder rewritten = query.rewrite(queryShardContext); + assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); + RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten; + assertThat(rewrittenRange.fieldName(), equalTo(fieldName)); + assertThat(rewrittenRange.from(), equalTo(shardMinValue)); + assertThat(rewrittenRange.to(), equalTo(shardMaxValue)); + assertThat(rewrittenRange.includeLower(), equalTo(true)); + assertThat(rewrittenRange.includeUpper(), equalTo(true)); + } + + public void testRewriteDoubleToMatchNone() throws IOException { + String fieldName = randomAsciiOfLengthBetween(1, 20); + RangeQueryBuilder query = new RangeQueryBuilder(fieldName); + double queryFromValue = randomDoubleBetween(-1000000, 1000000, true); + double queryToValue = randomDoubleBetween(queryFromValue, 2000000, true); + query.from(queryFromValue); + query.to(queryToValue); + QueryShardContext queryShardContext = queryShardContext(); + FieldStatsProvider fieldStatsProvider = new FieldStatsProvider(null, null) { + + @Override + public Relation isFieldWithinQuery(String fieldName, Object from, Object to, boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException { + return Relation.DISJOINT; + } + }; + queryShardContext.setFieldStatsProvider(fieldStatsProvider); + QueryBuilder rewritten = query.rewrite(queryShardContext); + assertThat(rewritten, instanceOf(MatchNoneQueryBuilder.class)); + } + + public void testRewriteDoubleToSame() throws IOException { + String fieldName = randomAsciiOfLengthBetween(1, 20); + RangeQueryBuilder query = new RangeQueryBuilder(fieldName); + double queryFromValue = randomDoubleBetween(-1000000, 1000000, true); + double queryToValue = randomDoubleBetween(queryFromValue, 2000000, true); + query.from(queryFromValue); + query.to(queryToValue); + QueryShardContext queryShardContext = queryShardContext(); + FieldStatsProvider fieldStatsProvider = new FieldStatsProvider(null, null) { + + @Override + public Relation isFieldWithinQuery(String fieldName, Object from, Object to, boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException { + return Relation.INTERSECTS; + } + }; + queryShardContext.setFieldStatsProvider(fieldStatsProvider); + QueryBuilder rewritten = query.rewrite(queryShardContext); + assertThat(rewritten, sameInstance(query)); + } + + public void testRewriteFloatToMatchAll() throws IOException { + String fieldName = randomAsciiOfLengthBetween(1, 20); + RangeQueryBuilder query = new RangeQueryBuilder(fieldName); + float queryFromValue = (float) randomDoubleBetween(-1000000.0, 1000000.0, true); + float queryToValue = (float) randomDoubleBetween(queryFromValue, 2000000, true); + float shardMinValue = (float) randomDoubleBetween(queryFromValue, queryToValue, true); + float shardMaxValue = (float) randomDoubleBetween(shardMinValue, queryToValue, true); + query.from(queryFromValue); + query.to(queryToValue); + QueryShardContext queryShardContext = queryShardContext(); + FieldStatsProvider fieldStatsProvider = new FieldStatsProvider(null, null) { + + @Override + public Relation isFieldWithinQuery(String fieldName, Object from, Object to, boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException { + return Relation.WITHIN; + } + + @SuppressWarnings("unchecked") + @Override + public > FieldStats get(String field) throws IOException { + assertThat(field, equalTo(fieldName)); + return (FieldStats) new FieldStats.Float(randomLong(), randomLong(), randomLong(), randomLong(), shardMinValue, + shardMaxValue); + } + }; + queryShardContext.setFieldStatsProvider(fieldStatsProvider); + QueryBuilder rewritten = query.rewrite(queryShardContext); + assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); + RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten; + assertThat(rewrittenRange.fieldName(), equalTo(fieldName)); + assertThat(rewrittenRange.from(), equalTo(shardMinValue)); + assertThat(rewrittenRange.to(), equalTo(shardMaxValue)); + assertThat(rewrittenRange.includeLower(), equalTo(true)); + assertThat(rewrittenRange.includeUpper(), equalTo(true)); + } + + public void testRewriteFloatToMatchNone() throws IOException { + String fieldName = randomAsciiOfLengthBetween(1, 20); + RangeQueryBuilder query = new RangeQueryBuilder(fieldName); + float queryFromValue = (float) randomDoubleBetween(-1000000, 1000000, true); + float queryToValue = (float) randomDoubleBetween(queryFromValue, 2000000, true); + query.from(queryFromValue); + query.to(queryToValue); + QueryShardContext queryShardContext = queryShardContext(); + FieldStatsProvider fieldStatsProvider = new FieldStatsProvider(null, null) { + + @Override + public Relation isFieldWithinQuery(String fieldName, Object from, Object to, boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException { + return Relation.DISJOINT; + } + }; + queryShardContext.setFieldStatsProvider(fieldStatsProvider); + QueryBuilder rewritten = query.rewrite(queryShardContext); + assertThat(rewritten, instanceOf(MatchNoneQueryBuilder.class)); + } + + public void testRewriteFloatToSame() throws IOException { + String fieldName = randomAsciiOfLengthBetween(1, 20); + RangeQueryBuilder query = new RangeQueryBuilder(fieldName); + float queryFromValue = (float) randomDoubleBetween(-1000000, 1000000, true); + float queryToValue = (float) randomDoubleBetween(queryFromValue, 2000000, true); + query.from(queryFromValue); + query.to(queryToValue); + QueryShardContext queryShardContext = queryShardContext(); + FieldStatsProvider fieldStatsProvider = new FieldStatsProvider(null, null) { + + @Override + public Relation isFieldWithinQuery(String fieldName, Object from, Object to, boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException { + return Relation.INTERSECTS; + } + }; + queryShardContext.setFieldStatsProvider(fieldStatsProvider); + QueryBuilder rewritten = query.rewrite(queryShardContext); + assertThat(rewritten, sameInstance(query)); + } + + public void testRewriteTextToMatchAll() throws IOException { + String fieldName = randomAsciiOfLengthBetween(1, 20); + RangeQueryBuilder query = new RangeQueryBuilder(fieldName); + String queryFromValue = "damson"; + String queryToValue = "plum"; + String shardMinValue = "grape"; + String shardMaxValue = "orange"; + query.from(queryFromValue); + query.to(queryToValue); + QueryShardContext queryShardContext = queryShardContext(); + FieldStatsProvider fieldStatsProvider = new FieldStatsProvider(null, null) { + + @Override + public Relation isFieldWithinQuery(String fieldName, Object from, Object to, boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException { + return Relation.WITHIN; + } + + @SuppressWarnings("unchecked") + @Override + public > FieldStats get(String field) throws IOException { + assertThat(field, equalTo(fieldName)); + return (FieldStats) new FieldStats.Text(randomLong(), randomLong(), randomLong(), randomLong(), + new BytesRef(shardMinValue), new BytesRef(shardMaxValue)); + } + }; + queryShardContext.setFieldStatsProvider(fieldStatsProvider); + QueryBuilder rewritten = query.rewrite(queryShardContext); + assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); + RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten; + assertThat(rewrittenRange.fieldName(), equalTo(fieldName)); + assertThat(rewrittenRange.from(), equalTo(shardMinValue)); + assertThat(rewrittenRange.to(), equalTo(shardMaxValue)); + assertThat(rewrittenRange.includeLower(), equalTo(true)); + assertThat(rewrittenRange.includeUpper(), equalTo(true)); + } + + public void testRewriteTextToMatchNone() throws IOException { + String fieldName = randomAsciiOfLengthBetween(1, 20); + RangeQueryBuilder query = new RangeQueryBuilder(fieldName); + String queryFromValue = "damson"; + String queryToValue = "plum"; + query.from(queryFromValue); + query.to(queryToValue); + QueryShardContext queryShardContext = queryShardContext(); + FieldStatsProvider fieldStatsProvider = new FieldStatsProvider(null, null) { + + @Override + public Relation isFieldWithinQuery(String fieldName, Object from, Object to, boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException { + return Relation.DISJOINT; + } + }; + queryShardContext.setFieldStatsProvider(fieldStatsProvider); + QueryBuilder rewritten = query.rewrite(queryShardContext); + assertThat(rewritten, instanceOf(MatchNoneQueryBuilder.class)); + } + + public void testRewriteTextToSame() throws IOException { + String fieldName = randomAsciiOfLengthBetween(1, 20); + RangeQueryBuilder query = new RangeQueryBuilder(fieldName); + String queryFromValue = "damson"; + String queryToValue = "plum"; + query.from(queryFromValue); + query.to(queryToValue); + QueryShardContext queryShardContext = queryShardContext(); + FieldStatsProvider fieldStatsProvider = new FieldStatsProvider(null, null) { + + @Override + public Relation isFieldWithinQuery(String fieldName, Object from, Object to, boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException { + return Relation.INTERSECTS; + } + }; + queryShardContext.setFieldStatsProvider(fieldStatsProvider); + QueryBuilder rewritten = query.rewrite(queryShardContext); + assertThat(rewritten, sameInstance(query)); + } + + public void testRewriteDateToMatchAll() throws IOException { + String fieldName = randomAsciiOfLengthBetween(1, 20); + RangeQueryBuilder query = new RangeQueryBuilder(fieldName); + DateTime queryFromValue = new DateTime(2015, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); + DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); + DateTime shardMinValue = new DateTime(2015, 3, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); + DateTime shardMaxValue = new DateTime(2015, 9, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); + query.from(queryFromValue); + query.to(queryToValue); + QueryShardContext queryShardContext = queryShardContext(); + FieldStatsProvider fieldStatsProvider = new FieldStatsProvider(null, null) { + + @Override + public Relation isFieldWithinQuery(String fieldName, Object from, Object to, boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException { + return Relation.WITHIN; + } + + @SuppressWarnings("unchecked") + @Override + public > FieldStats get(String field) throws IOException { + assertThat(field, equalTo(fieldName)); + return (FieldStats) new FieldStats.Date(randomLong(), randomLong(), randomLong(), randomLong(), + shardMinValue.getMillis(), shardMaxValue.getMillis(), null); + } + }; + queryShardContext.setFieldStatsProvider(fieldStatsProvider); + QueryBuilder rewritten = query.rewrite(queryShardContext); + assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); + RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten; + assertThat(rewrittenRange.fieldName(), equalTo(fieldName)); + assertThat(rewrittenRange.from(), equalTo(shardMinValue.getMillis())); + assertThat(rewrittenRange.to(), equalTo(shardMaxValue.getMillis())); + assertThat(rewrittenRange.includeLower(), equalTo(true)); + assertThat(rewrittenRange.includeUpper(), equalTo(true)); + } + + public void testRewriteDateToMatchNone() throws IOException { + String fieldName = randomAsciiOfLengthBetween(1, 20); + RangeQueryBuilder query = new RangeQueryBuilder(fieldName); + DateTime queryFromValue = new DateTime(2015, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); + DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); + query.from(queryFromValue); + query.to(queryToValue); + QueryShardContext queryShardContext = queryShardContext(); + FieldStatsProvider fieldStatsProvider = new FieldStatsProvider(null, null) { + + @Override + public Relation isFieldWithinQuery(String fieldName, Object from, Object to, boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException { + return Relation.DISJOINT; + } + }; + queryShardContext.setFieldStatsProvider(fieldStatsProvider); + QueryBuilder rewritten = query.rewrite(queryShardContext); + assertThat(rewritten, instanceOf(MatchNoneQueryBuilder.class)); + } + + public void testRewriteDateToSame() throws IOException { + String fieldName = randomAsciiOfLengthBetween(1, 20); + RangeQueryBuilder query = new RangeQueryBuilder(fieldName); + DateTime queryFromValue = new DateTime(2015, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); + DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); + query.from(queryFromValue); + query.to(queryToValue); + QueryShardContext queryShardContext = queryShardContext(); + FieldStatsProvider fieldStatsProvider = new FieldStatsProvider(null, null) { + + @Override + public Relation isFieldWithinQuery(String fieldName, Object from, Object to, boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException { + return Relation.INTERSECTS; + } + }; + queryShardContext.setFieldStatsProvider(fieldStatsProvider); + QueryBuilder rewritten = query.rewrite(queryShardContext); + assertThat(rewritten, sameInstance(query)); + } } diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java index 1b2ad9c0a1eb..94c41e5c84e1 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java @@ -21,18 +21,19 @@ package org.elasticsearch.indices; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.indices.IndicesRequestCache; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.test.ESIntegTestCase; import org.joda.time.DateTimeZone; - import java.util.List; import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; public class IndicesRequestCacheIT extends ESIntegTestCase { @@ -80,4 +81,156 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { } } + public void testQueryRewrite() throws Exception { + assertAcked(client().admin().indices().prepareCreate("index").addMapping("type", "s", "type=text") + .setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true, + IndexMetaData.SETTING_NUMBER_OF_SHARDS, 5, + IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .get()); + indexRandom(true, client().prepareIndex("index", "type", "1").setRouting("1").setSource("s", "a"), + client().prepareIndex("index", "type", "2").setRouting("1").setSource("s", "b"), + client().prepareIndex("index", "type", "3").setRouting("1").setSource("s", "c"), + client().prepareIndex("index", "type", "4").setRouting("2").setSource("s", "d"), + client().prepareIndex("index", "type", "5").setRouting("2").setSource("s", "e"), + client().prepareIndex("index", "type", "6").setRouting("2").setSource("s", "f"), + client().prepareIndex("index", "type", "7").setRouting("3").setSource("s", "g"), + client().prepareIndex("index", "type", "8").setRouting("3").setSource("s", "h"), + client().prepareIndex("index", "type", "9").setRouting("3").setSource("s", "i")); + ensureSearchable("index"); + + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), + equalTo(0L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), + equalTo(0L)); + + final SearchResponse r1 = client().prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")).get(); + assertSearchResponse(r1); + assertThat(r1.getHits().getTotalHits(), equalTo(7L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), + equalTo(0L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), + equalTo(5L)); + + final SearchResponse r2 = client().prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("b").lte("h")).get(); + assertSearchResponse(r2); + assertThat(r2.getHits().getTotalHits(), equalTo(7L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), + equalTo(3L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), + equalTo(7L)); + + final SearchResponse r3 = client().prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("c").lte("i")).get(); + assertSearchResponse(r3); + assertThat(r3.getHits().getTotalHits(), equalTo(7L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), + equalTo(6L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), + equalTo(9L)); + } + + public void testQueryRewriteMissingValues() throws Exception { + assertAcked(client().admin().indices().prepareCreate("index").addMapping("type", "s", "type=text") + .setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true, IndexMetaData.SETTING_NUMBER_OF_SHARDS, + 1, IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .get()); + indexRandom(true, client().prepareIndex("index", "type", "1").setSource("s", "a"), + client().prepareIndex("index", "type", "2").setSource("s", "b"), + client().prepareIndex("index", "type", "3").setSource("s", "c"), + client().prepareIndex("index", "type", "4").setSource("s", "d"), + client().prepareIndex("index", "type", "5").setSource("s", "e"), + client().prepareIndex("index", "type", "6").setSource("s", "f"), + client().prepareIndex("index", "type", "7").setSource("other", "value"), + client().prepareIndex("index", "type", "8").setSource("s", "h"), + client().prepareIndex("index", "type", "9").setSource("s", "i")); + ensureSearchable("index"); + + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), + equalTo(0L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), + equalTo(0L)); + + final SearchResponse r1 = client().prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("j")).get(); + assertSearchResponse(r1); + assertThat(r1.getHits().getTotalHits(), equalTo(8L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), + equalTo(0L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), + equalTo(1L)); + + final SearchResponse r2 = client().prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("j")).get(); + assertSearchResponse(r2); + assertThat(r2.getHits().getTotalHits(), equalTo(8L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), + equalTo(1L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), + equalTo(1L)); + + final SearchResponse r3 = client().prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("j")).get(); + assertSearchResponse(r3); + assertThat(r3.getHits().getTotalHits(), equalTo(8L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), + equalTo(2L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), + equalTo(1L)); + } + + public void testQueryRewriteDates() throws Exception { + assertAcked(client().admin().indices().prepareCreate("index").addMapping("type", "d", "type=date") + .setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true, + IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1, + IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .get()); + indexRandom(true, client().prepareIndex("index", "type", "1").setSource("d", "2014-01-01T00:00:00"), + client().prepareIndex("index", "type", "2").setSource("d", "2014-02-01T00:00:00"), + client().prepareIndex("index", "type", "3").setSource("d", "2014-03-01T00:00:00"), + client().prepareIndex("index", "type", "4").setSource("d", "2014-04-01T00:00:00"), + client().prepareIndex("index", "type", "5").setSource("d", "2014-05-01T00:00:00"), + client().prepareIndex("index", "type", "6").setSource("d", "2014-06-01T00:00:00"), + client().prepareIndex("index", "type", "7").setSource("d", "2014-07-01T00:00:00"), + client().prepareIndex("index", "type", "8").setSource("d", "2014-08-01T00:00:00"), + client().prepareIndex("index", "type", "9").setSource("d", "2014-09-01T00:00:00")); + ensureSearchable("index"); + + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), + equalTo(0L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), + equalTo(0L)); + + final SearchResponse r1 = client().prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0) + .setQuery(QueryBuilders.rangeQuery("d").gte("2013-01-01T00:00:00").lte("now")) + .get(); + assertSearchResponse(r1); + assertThat(r1.getHits().getTotalHits(), equalTo(9L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), + equalTo(0L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), + equalTo(1L)); + + final SearchResponse r2 = client().prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0) + .setQuery(QueryBuilders.rangeQuery("d").gte("2013-01-01T00:00:00").lte("now")) + .get(); + assertSearchResponse(r2); + assertThat(r2.getHits().getTotalHits(), equalTo(9L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), + equalTo(1L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), + equalTo(1L)); + + final SearchResponse r3 = client().prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0) + .setQuery(QueryBuilders.rangeQuery("d").gte("2013-01-01T00:00:00").lte("now")) + .get(); + assertSearchResponse(r3); + assertThat(r3.getHits().getTotalHits(), equalTo(9L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), + equalTo(2L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), + equalTo(1L)); + } + } diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 68b496cd5665..891911b62024 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -1914,6 +1914,7 @@ public class SearchQueryIT extends ESIntegTestCase { assertHitCount(client().prepareSearch("test").setSize(0).setQuery(rangeQuery("field").lte(-999999999999L)).get(), 3); } + @AwaitsFix(bugUrl = "NOCOMMIT") public void testRangeQueryWithTimeZone() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "date", "type=date", "num", "type=integer")); diff --git a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java index 6c10a1c8aeff..136c1fba2e06 100644 --- a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.search.internal.DefaultSearchContext; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; @@ -270,7 +269,7 @@ public class SimpleSearchIT extends ESIntegTestCase { searchResponse = client().prepareSearch("test") .setQuery(QueryBuilders.rangeQuery("field").gte(1).lte(max)) .setTerminateAfter(i).execute().actionGet(); - assertHitCount(searchResponse, (long)i); + assertHitCount(searchResponse, i); assertTrue(searchResponse.isTerminatedEarly()); } From 30cdc11d7566ff43477be745d03fe7c581103be5 Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Thu, 17 Mar 2016 09:06:32 +0000 Subject: [PATCH 278/320] Enable the indices request cache by default Now we have 16870 we can enable the request cache by default. The caching can still be disabled on a per request basis and can still be disabled in the settings, only the default value has changed. For now this is done regardless of whether the shard is active or inactive. Closes #17134 --- .../java/org/elasticsearch/indices/IndicesRequestCache.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java b/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java index 9129a3b13601..4b4aa4e8df22 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java @@ -70,7 +70,7 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo * since we are checking on the cluster state IndexMetaData always. */ public static final Setting INDEX_CACHE_REQUEST_ENABLED_SETTING = - Setting.boolSetting("index.requests.cache.enable", false, Property.Dynamic, Property.IndexScope); + Setting.boolSetting("index.requests.cache.enable", true, Property.Dynamic, Property.IndexScope); public static final Setting INDICES_CACHE_QUERY_SIZE = Setting.byteSizeSetting("indices.requests.cache.size", "1%", Property.NodeScope); public static final Setting INDICES_CACHE_QUERY_EXPIRE = From d17fd335e494078ab62dc5b08dab9d3b71b50eec Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Thu, 17 Mar 2016 10:24:11 +0000 Subject: [PATCH 279/320] fix range query rewrite so it rewrites correctly when shard min value == shard max value --- .../index/query/RangeQueryBuilder.java | 15 +++++---------- .../search/matchedqueries/MatchedQueriesIT.java | 12 +++++++----- 2 files changed, 12 insertions(+), 15 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java index 2a627c545905..c3953a511703 100644 --- a/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java @@ -265,7 +265,7 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i // rewrite so just return without rewriting if (fieldStatsProvider != null) { DateMathParser dateMathParser = format == null ? null : new DateMathParser(format); - FieldStatsProvider.Relation relation = fieldStatsProvider.isFieldWithinQuery(fieldName, from, to, includeUpper, includeLower, + FieldStatsProvider.Relation relation = fieldStatsProvider.isFieldWithinQuery(fieldName, from, to, includeLower, includeUpper, timeZone, dateMathParser); switch (relation) { case DISJOINT: @@ -278,15 +278,10 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i // bound has not been changed by the rewrite RangeQueryBuilder newRangeQuery = new RangeQueryBuilder(fieldName); String dateFormatString = format == null ? null : format.format(); - if (fieldStats.getMinValue().equals(fieldStats.getMaxValue())) { - newRangeQuery.from(fieldStats.getMinValue(), true); - newRangeQuery.to(fieldStats.getMaxValue(), true); - } else { - newRangeQuery.from(fieldStats.getMinValue(), includeLower || fieldStats.match(new IndexConstraint(fieldName, - Property.MIN, Comparison.GT, fieldStats.stringValueOf(from, dateFormatString)))); - newRangeQuery.to(fieldStats.getMaxValue(), includeUpper || fieldStats.match(new IndexConstraint(fieldName, - Property.MAX, Comparison.LT, fieldStats.stringValueOf(to, dateFormatString)))); - } + newRangeQuery.from(fieldStats.getMinValue(), includeLower || fieldStats.match( + new IndexConstraint(fieldName, Property.MIN, Comparison.GT, fieldStats.stringValueOf(from, dateFormatString)))); + newRangeQuery.to(fieldStats.getMaxValue(), includeUpper || fieldStats.match( + new IndexConstraint(fieldName, Property.MAX, Comparison.LT, fieldStats.stringValueOf(to, dateFormatString)))); newRangeQuery.format = format; newRangeQuery.timeZone = timeZone; return newRangeQuery; diff --git a/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java b/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java index d93d51172746..00fa879f08f5 100644 --- a/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java +++ b/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java @@ -53,15 +53,17 @@ public class MatchedQueriesIT extends ESIntegTestCase { refresh(); SearchResponse searchResponse = client().prepareSearch() - .setQuery(boolQuery().must(matchAllQuery()).filter(boolQuery().should(rangeQuery("number").lte(2).queryName("test1")).should(rangeQuery("number").gt(2).queryName("test2")))).get(); +.setQuery(boolQuery().must(matchAllQuery()).filter(boolQuery() + .should(rangeQuery("number").lt(2).queryName("test1")).should(rangeQuery("number").gte(2).queryName("test2")))) + .get(); assertHitCount(searchResponse, 3L); for (SearchHit hit : searchResponse.getHits()) { - if (hit.id().equals("1") || hit.id().equals("2")) { - assertThat(hit.matchedQueries().length, equalTo(1)); - assertThat(hit.matchedQueries(), hasItemInArray("test1")); - } else if (hit.id().equals("3")) { + if (hit.id().equals("3") || hit.id().equals("2")) { assertThat(hit.matchedQueries().length, equalTo(1)); assertThat(hit.matchedQueries(), hasItemInArray("test2")); + } else if (hit.id().equals("1")) { + assertThat(hit.matchedQueries().length, equalTo(1)); + assertThat(hit.matchedQueries(), hasItemInArray("test1")); } else { fail("Unexpected document returned with id " + hit.id()); } From bd059b8cc3c8e28a5a9866621f96e3c36f9a8ee7 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Thu, 17 Mar 2016 14:13:19 +0100 Subject: [PATCH 280/320] Clarify how `-Djava.security.policy=someURL` must be passed Closes #17160 --- docs/reference/modules/scripting/security.asciidoc | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/docs/reference/modules/scripting/security.asciidoc b/docs/reference/modules/scripting/security.asciidoc index af193b351033..e84289bf1d9f 100644 --- a/docs/reference/modules/scripting/security.asciidoc +++ b/docs/reference/modules/scripting/security.asciidoc @@ -100,7 +100,13 @@ Security Policy either: * system wide: `$JAVA_HOME/lib/security/java.policy`, * for just the `elasticsearch` user: `/home/elasticsearch/.java.policy`, or -* from a file specified on the command line: `-Djava.security.policy=someURL` +* from a file specified in the `JAVA_OPTS` environment variable with `-Djava.security.policy=someURL`: ++ +[source,js] +--------------------------------- +export JAVA_OPTS="${JAVA_OPTS} -Djava.security.policy=file:///path/to/my.policy` +./bin/elasticsearch +--------------------------------- Permissions may be granted at the class, package, or global level. For instance: From 64418526187d4666f7001c79c5aac1f6cc35c8cc Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Wed, 9 Dec 2015 21:36:11 +0100 Subject: [PATCH 281/320] Fix OOM in AbstractXContentParser This commit fixes an OOM error that happens when the XContentParser.readList() method is asked to parse a single value instead of an array. It fixes the UpdateRequest parsing as well as remove some leniency in the readList() method so that it expect to be in an array before parsing values. closes #15338 --- .../action/update/UpdateRequest.java | 13 +++- .../support/AbstractXContentParser.java | 10 ++- .../action/update/UpdateRequestTests.java | 15 ++++ .../common/xcontent/XContentParserTests.java | 78 +++++++++++++++++++ 4 files changed, 112 insertions(+), 4 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java index 14c127c07039..0877ea1c66b3 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java @@ -44,6 +44,7 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService.ScriptType; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -671,9 +672,15 @@ public class UpdateRequest extends InstanceShardOperationRequest } else if ("detect_noop".equals(currentFieldName)) { detectNoop(parser.booleanValue()); } else if ("fields".equals(currentFieldName)) { - List values = parser.list(); - String[] fields = values.toArray(new String[values.size()]); - fields(fields); + List fields = null; + if (token == XContentParser.Token.START_ARRAY) { + fields = (List) parser.list(); + } else if (token.isValue()) { + fields = Collections.singletonList(parser.text()); + } + if (fields != null) { + fields(fields.toArray(new String[fields.size()])); + } } else { //here we don't have settings available, unable to throw deprecation exceptions scriptParameterParser.token(currentFieldName, token, parser, ParseFieldMatcher.EMPTY); diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java b/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java index fd4aa7823026..e994b81832f8 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.xcontent.support; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.XContentParser; @@ -286,14 +287,21 @@ public abstract class AbstractXContentParser implements XContentParser { static List readList(XContentParser parser, MapFactory mapFactory) throws IOException { XContentParser.Token token = parser.currentToken(); + if (token == null) { + token = parser.nextToken(); + } if (token == XContentParser.Token.FIELD_NAME) { token = parser.nextToken(); } if (token == XContentParser.Token.START_ARRAY) { token = parser.nextToken(); + } else { + throw new ElasticsearchParseException("Failed to parse list: expecting " + + XContentParser.Token.START_ARRAY + " but got " + token); } + ArrayList list = new ArrayList<>(); - for (; token != XContentParser.Token.END_ARRAY; token = parser.nextToken()) { + for (; token != null && token != XContentParser.Token.END_ARRAY; token = parser.nextToken()) { list.add(readValue(parser, mapFactory, token)); } return list; diff --git a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index bcb266133882..d105a4bf63bb 100644 --- a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.action.update; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -36,6 +37,7 @@ import org.elasticsearch.test.ESTestCase; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -179,4 +181,17 @@ public class UpdateRequestTests extends ESTestCase { assertThat(e.getMessage(), equalTo("Failed to derive xcontent")); } } + + // Related to issue 15338 + public void testFieldsParsing() throws Exception { + UpdateRequest request = new UpdateRequest("test", "type1", "1") + .source(new BytesArray("{\"doc\": {\"field1\": \"value1\"}, \"fields\": \"_source\"}")); + assertThat(request.doc().sourceAsMap().get("field1").toString(), equalTo("value1")); + assertThat(request.fields(), arrayContaining("_source")); + + request = new UpdateRequest("test", "type2", "2") + .source(new BytesArray("{\"doc\": {\"field2\": \"value2\"}, \"fields\": [\"field1\", \"field2\"]}")); + assertThat(request.doc().sourceAsMap().get("field2").toString(), equalTo("value2")); + assertThat(request.fields(), arrayContaining("field1", "field2")); + } } diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java new file mode 100644 index 000000000000..cce349f417c8 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.List; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.nullValue; + +public class XContentParserTests extends ESTestCase { + + public void testReadList() throws IOException { + assertThat(readList("{\"foo\": [\"bar\"]}"), contains("bar")); + assertThat(readList("{\"foo\": [\"bar\",\"baz\"]}"), contains("bar", "baz")); + assertThat(readList("{\"foo\": [1, 2, 3], \"bar\": 4}"), contains(1, 2, 3)); + assertThat(readList("{\"foo\": [{\"bar\":1},{\"baz\":2},{\"qux\":3}]}"), hasSize(3)); + assertThat(readList("{\"foo\": [null]}"), contains(nullValue())); + assertThat(readList("{\"foo\": []}"), hasSize(0)); + assertThat(readList("{\"foo\": [1]}"), contains(1)); + assertThat(readList("{\"foo\": [1,2]}"), contains(1, 2)); + assertThat(readList("{\"foo\": [{},{},{},{}]}"), hasSize(4)); + } + + public void testReadListThrowsException() throws IOException { + // Calling XContentParser.list() or listOrderedMap() to read a simple + // value or object should throw an exception + assertReadListThrowsException("{\"foo\": \"bar\"}"); + assertReadListThrowsException("{\"foo\": 1, \"bar\": 2}"); + assertReadListThrowsException("{\"foo\": {\"bar\":\"baz\"}}"); + } + + @SuppressWarnings("unchecked") + private static List readList(String source) throws IOException { + try (XContentParser parser = XContentType.JSON.xContent().createParser(source)) { + XContentParser.Token token = parser.nextToken(); + assertThat(token, equalTo(XContentParser.Token.START_OBJECT)); + token = parser.nextToken(); + assertThat(token, equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.currentName(), equalTo("foo")); + return (List) (randomBoolean() ? parser.listOrderedMap() : parser.list()); + } + } + + private void assertReadListThrowsException(String source) { + try { + readList(source); + fail("should have thrown a parse exception"); + } catch (Exception e) { + assertThat(e, instanceOf(ElasticsearchParseException.class)); + assertThat(e.getMessage(), containsString("Failed to parse list")); + } + } +} From e91a141233d0231fa3f43320968686b602d414e3 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 14 Mar 2016 14:00:49 +0100 Subject: [PATCH 282/320] Prevent index level setting from being configured on a node level Today we allow to set all kinds of index level settings on the node level which is error prone and difficult to get right in a consistent manner. For instance if some analyzers are setup in a yaml config file some nodes might not have these analyzers and then index creation fails. Nevertheless, this change allows some selected settings to be specified on a node level for instance: * `index.codec` which is used in a hot/cold node architecture and it's value is really per node or per index * `index.store.fs.fs_lock` which is also dependent on the filesystem a node uses All other index level setting must be specified on the index level. For existing clusters the index must be closed and all settings must be updated via the API on each of the indices. Closes #16799 --- .../resources/checkstyle_suppressions.xml | 2 - .../settings/AbstractScopedSettings.java | 11 ++-- .../common/settings/IndexScopedSettings.java | 8 +++ .../common/settings/Settings.java | 8 +++ .../common/settings/SettingsModule.java | 27 +++++----- .../org/elasticsearch/index/IndexModule.java | 19 +++++-- .../index/analysis/AnalysisRegistry.java | 4 ++ .../index/engine/EngineConfig.java | 34 +++++++----- .../index/store/FsDirectoryService.java | 8 +-- .../indices/analysis/AnalysisModule.java | 12 +++-- .../indices/stats/IndicesStatsTests.java | 1 - .../cluster/ClusterModuleTests.java | 4 +- .../common/settings/ScopedSettingsTests.java | 33 +++++++++--- .../common/settings/SettingsModuleTests.java | 18 +++++-- .../elasticsearch/index/IndexModuleTests.java | 18 ++++++- .../IndexLifecycleActionIT.java | 10 ++-- .../indices/stats/IndexStatsIT.java | 25 ++++++--- .../store/IndicesStoreIntegrationIT.java | 1 + .../search/child/ChildQuerySearchIT.java | 13 ++--- .../messy/tests/ScriptQuerySearchTests.java | 16 +++--- .../index/analysis/AnalysisTestUtils.java | 54 ------------------- .../analysis/SimpleIcuAnalysisTests.java | 10 ++-- .../SimpleIcuCollationTokenFilterTests.java | 31 ++++------- .../SimpleIcuNormalizerCharFilterTests.java | 10 ++-- .../index/analysis/KuromojiAnalysisTests.java | 10 ++-- .../analysis/SimplePhoneticAnalysisTests.java | 24 +-------- .../SimpleSmartChineseAnalysisTests.java | 25 +-------- .../index/analysis/PolishAnalysisTests.java | 28 +--------- .../SimplePolishTokenFilterTests.java | 32 ++--------- .../elasticsearch/test/ESIntegTestCase.java | 12 +++++ .../test/ESSingleNodeTestCase.java | 10 +++- .../org/elasticsearch/test/ESTestCase.java | 26 ++++++++- .../test/InternalSettingsPlugin.java | 6 +-- .../test/InternalTestCluster.java | 12 ----- .../test/store/MockFSDirectoryService.java | 10 ++-- .../test/store/MockFSIndexStore.java | 2 +- 36 files changed, 276 insertions(+), 298 deletions(-) delete mode 100644 plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisTestUtils.java diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index a9c73bca1278..87c049ae0b14 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -460,7 +460,6 @@ - @@ -613,7 +612,6 @@ - diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index baed9c0849fd..adffb8e9e015 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -58,9 +58,8 @@ public abstract class AbstractScopedSettings extends AbstractComponent { if (setting.getProperties().contains(scope) == false) { throw new IllegalArgumentException("Setting must be a " + scope + " setting but has: " + setting.getProperties()); } - if (isValidKey(setting.getKey()) == false && (setting.isGroupSetting() && isValidGroupKey(setting.getKey())) == false) { - throw new IllegalArgumentException("illegal settings key: [" + setting.getKey() + "]"); - } + validateSettingKey(setting); + if (setting.hasComplexMatcher()) { Setting overlappingSetting = findOverlappingSetting(setting, complexMatchers); if (overlappingSetting != null) { @@ -76,6 +75,12 @@ public abstract class AbstractScopedSettings extends AbstractComponent { this.keySettings = Collections.unmodifiableMap(keySettings); } + protected void validateSettingKey(Setting setting) { + if (isValidKey(setting.getKey()) == false && (setting.isGroupSetting() && isValidGroupKey(setting.getKey())) == false) { + throw new IllegalArgumentException("illegal settings key: [" + setting.getKey() + "]"); + } + } + protected AbstractScopedSettings(Settings nodeSettings, Settings scopeSettings, AbstractScopedSettings other) { super(nodeSettings); this.lastSettingsApplied = scopeSettings; diff --git a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index da6c34bdf4ae..e08e4fc49c5e 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -163,6 +163,14 @@ public final class IndexScopedSettings extends AbstractScopedSettings { return new IndexScopedSettings(settings, this, metaData); } + @Override + protected void validateSettingKey(Setting setting) { + if (setting.getKey().startsWith("index.") == false) { + throw new IllegalArgumentException("illegal settings key: [" + setting.getKey() + "] must start with [index.]"); + } + super.validateSettingKey(setting); + } + public boolean isPrivateSetting(String key) { switch (key) { case IndexMetaData.SETTING_CREATION_DATE: diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index e06e4ad893b2..a6784e561d2e 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -761,6 +761,14 @@ public final class Settings implements ToXContent { return builder; } + /** + * Returns true if this settings object contains no settings + * @return true if this settings object contains no settings + */ + public boolean isEmpty() { + return this.settings.isEmpty(); + } + /** * A builder allowing to put different settings and then {@link #build()} an immutable * settings implementation. Use {@link Settings#settingsBuilder()} in order to diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index 9fc2ee257a00..33233ff627e1 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -54,8 +54,7 @@ public class SettingsModule extends AbstractModule { final IndexScopedSettings indexScopedSettings = new IndexScopedSettings(settings, new HashSet<>(this.indexSettings.values())); final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(this.nodeSettings.values())); // by now we are fully configured, lets check node level settings for unregistered index settings - indexScopedSettings.validate(settings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE)); - final Predicate acceptOnlyClusterSettings = TRIBE_CLIENT_NODE_SETTINGS_PREDICATE.or(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE).negate(); + final Predicate acceptOnlyClusterSettings = TRIBE_CLIENT_NODE_SETTINGS_PREDICATE.negate(); clusterSettings.validate(settings.filter(acceptOnlyClusterSettings)); validateTribeSettings(settings, clusterSettings); bind(Settings.class).toInstance(settings); @@ -76,21 +75,19 @@ public class SettingsModule extends AbstractModule { registerSettingsFilter(setting.getKey()); } } - - // We validate scope settings. We should have one and only one scope. - if (setting.hasNodeScope() && setting.hasIndexScope()) { - throw new IllegalArgumentException("More than one scope has been added to the setting [" + setting.getKey() + "]"); - } - if (setting.hasNodeScope()) { - if (nodeSettings.containsKey(setting.getKey())) { - throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); + if (setting.hasNodeScope() || setting.hasIndexScope()) { + if (setting.hasNodeScope()) { + if (nodeSettings.containsKey(setting.getKey())) { + throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); + } + nodeSettings.put(setting.getKey(), setting); } - nodeSettings.put(setting.getKey(), setting); - } else if (setting.hasIndexScope()) { - if (indexSettings.containsKey(setting.getKey())) { - throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); + if (setting.hasIndexScope()) { + if (indexSettings.containsKey(setting.getKey())) { + throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); + } + indexSettings.put(setting.getKey(), setting); } - indexSettings.put(setting.getKey(), setting); } else { throw new IllegalArgumentException("No scope found for setting [" + setting.getKey() + "]"); } diff --git a/core/src/main/java/org/elasticsearch/index/IndexModule.java b/core/src/main/java/org/elasticsearch/index/IndexModule.java index b6120bd9d780..48230e6ec1e1 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/core/src/main/java/org/elasticsearch/index/IndexModule.java @@ -67,12 +67,13 @@ import java.util.function.Function; public final class IndexModule { public static final Setting INDEX_STORE_TYPE_SETTING = - new Setting<>("index.store.type", "", Function.identity(), Property.IndexScope); + new Setting<>("index.store.type", "", Function.identity(), Property.IndexScope, Property.NodeScope); public static final String SIMILARITY_SETTINGS_PREFIX = "index.similarity"; public static final String INDEX_QUERY_CACHE = "index"; public static final String NONE_QUERY_CACHE = "none"; public static final Setting INDEX_QUERY_CACHE_TYPE_SETTING = new Setting<>("index.queries.cache.type", INDEX_QUERY_CACHE, Function.identity(), Property.IndexScope); + // for test purposes only public static final Setting INDEX_QUERY_CACHE_EVERYTHING_SETTING = Setting.boolSetting("index.queries.cache.everything", false, Property.IndexScope); @@ -87,7 +88,7 @@ public final class IndexModule { private final Map> similarities = new HashMap<>(); private final Map> storeTypes = new HashMap<>(); private final Map> queryCaches = new HashMap<>(); - + private final SetOnce forceQueryCacheType = new SetOnce<>(); public IndexModule(IndexSettings indexSettings, IndexStoreConfig indexStoreConfig, AnalysisRegistry analysisRegistry) { this.indexStoreConfig = indexStoreConfig; @@ -265,11 +266,23 @@ public final class IndexModule { } indexSettings.getScopedSettings().addSettingsUpdateConsumer(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING, store::setType); indexSettings.getScopedSettings().addSettingsUpdateConsumer(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, store::setMaxRate); - final String queryCacheType = indexSettings.getValue(INDEX_QUERY_CACHE_TYPE_SETTING); + final String queryCacheType = forceQueryCacheType.get() != null ? forceQueryCacheType.get() : indexSettings.getValue(INDEX_QUERY_CACHE_TYPE_SETTING); final BiFunction queryCacheProvider = queryCaches.get(queryCacheType); final QueryCache queryCache = queryCacheProvider.apply(indexSettings, indicesQueryCache); return new IndexService(indexSettings, environment, new SimilarityService(indexSettings, similarities), shardStoreDeleter, analysisRegistry, engineFactory.get(), servicesProvider, queryCache, store, eventListener, searcherWrapperFactory, mapperRegistry, indicesFieldDataCache, listeners); } + /** + * Forces a certain query cache type. If this is set + * the given cache type is overriding the default as well as the type + * set on the index level. + * NOTE: this can only be set once + * + * @see #INDEX_QUERY_CACHE_TYPE_SETTING + */ + public void forceQueryCacheType(String type) { + this.forceQueryCacheType.set(type); + } + } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index a8a7b4fe0042..3c2d6bfb260e 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -85,6 +85,10 @@ public final class AnalysisRegistry implements Closeable { this.analyzers = Collections.unmodifiableMap(analyzerBuilder); } + public HunspellService getHunspellService() { + return hunspellService; + } + /** * Returns a registered {@link TokenizerFactory} provider by name or null if the tokenizer was not registered */ diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java index 14a8f043234a..a290e98f3f7b 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java +++ b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java @@ -40,6 +40,8 @@ import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.IndexingMemoryController; import org.elasticsearch.threadpool.ThreadPool; +import java.util.function.Function; + /* * Holds all the configuration that is used to create an {@link Engine}. * Once {@link Engine} has been created with this object, changes to this @@ -69,20 +71,23 @@ public final class EngineConfig { /** * Index setting to change the low level lucene codec used for writing new segments. * This setting is not realtime updateable. + * This setting is also settable on the node and the index level, it's commonly used in hot/cold node archs where index is likely + * allocated on both `kind` of nodes. */ - public static final Setting INDEX_CODEC_SETTING = new Setting<>("index.codec", "default", (s) -> { - switch(s) { + public static final Setting INDEX_CODEC_SETTING = new Setting<>("index.codec", "default", s -> { + switch (s) { case "default": case "best_compression": case "lucene_default": return s; default: if (Codec.availableCodecs().contains(s) == false) { // we don't error message the not officially supported ones - throw new IllegalArgumentException("unknown value for [index.codec] must be one of [default, best_compression] but was: " + s); + throw new IllegalArgumentException( + "unknown value for [index.codec] must be one of [default, best_compression] but was: " + s); } return s; } - }, Property.IndexScope); + }, Property.IndexScope, Property.NodeScope); /** if set to true the engine will start even if the translog id in the commit point can not be found */ public static final String INDEX_FORCE_NEW_TRANSLOG = "index.engine.force_new_translog"; @@ -97,7 +102,8 @@ public final class EngineConfig { IndexSettings indexSettings, Engine.Warmer warmer, Store store, SnapshotDeletionPolicy deletionPolicy, MergePolicy mergePolicy,Analyzer analyzer, Similarity similarity, CodecService codecService, Engine.EventListener eventListener, - TranslogRecoveryPerformer translogRecoveryPerformer, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy, TranslogConfig translogConfig, TimeValue flushMergesAfter) { + TranslogRecoveryPerformer translogRecoveryPerformer, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy, + TranslogConfig translogConfig, TimeValue flushMergesAfter) { this.shardId = shardId; final Settings settings = indexSettings.getSettings(); this.indexSettings = indexSettings; @@ -138,7 +144,8 @@ public final class EngineConfig { } /** - * Returns the initial index buffer size. This setting is only read on startup and otherwise controlled by {@link IndexingMemoryController} + * Returns the initial index buffer size. This setting is only read on startup and otherwise controlled + * by {@link IndexingMemoryController} */ public ByteSizeValue getIndexingBufferSize() { return indexingBufferSize; @@ -146,11 +153,12 @@ public final class EngineConfig { /** * Returns true iff delete garbage collection in the engine should be enabled. This setting is updateable - * in realtime and forces a volatile read. Consumers can safely read this value directly go fetch it's latest value. The default is true + * in realtime and forces a volatile read. Consumers can safely read this value directly go fetch it's latest value. + * The default is true *

      * Engine GC deletion if enabled collects deleted documents from in-memory realtime data structures after a certain amount of - * time ({@link IndexSettings#getGcDeletesInMillis()} if enabled. Before deletes are GCed they will cause re-adding the document that was deleted - * to fail. + * time ({@link IndexSettings#getGcDeletesInMillis()} if enabled. Before deletes are GCed they will cause re-adding the document + * that was deleted to fail. *

      */ public boolean isEnableGcDeletes() { @@ -168,7 +176,8 @@ public final class EngineConfig { } /** - * Returns a thread-pool mainly used to get estimated time stamps from {@link org.elasticsearch.threadpool.ThreadPool#estimatedTimeInMillis()} and to schedule + * Returns a thread-pool mainly used to get estimated time stamps from + * {@link org.elasticsearch.threadpool.ThreadPool#estimatedTimeInMillis()} and to schedule * async force merge calls on the {@link org.elasticsearch.threadpool.ThreadPool.Names#FORCE_MERGE} thread-pool */ public ThreadPool getThreadPool() { @@ -183,8 +192,9 @@ public final class EngineConfig { } /** - * Returns the {@link org.elasticsearch.index.store.Store} instance that provides access to the {@link org.apache.lucene.store.Directory} - * used for the engines {@link org.apache.lucene.index.IndexWriter} to write it's index files to. + * Returns the {@link org.elasticsearch.index.store.Store} instance that provides access to the + * {@link org.apache.lucene.store.Directory} used for the engines {@link org.apache.lucene.index.IndexWriter} to write it's index files + * to. *

      * Note: In order to use this instance the consumer needs to increment the stores reference before it's used the first time and hold * it's reference until it's not needed anymore. diff --git a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java index 933fd7845887..584b98cff334 100644 --- a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java +++ b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java @@ -61,8 +61,9 @@ public class FsDirectoryService extends DirectoryService implements StoreRateLim return SimpleFSLockFactory.INSTANCE; default: throw new IllegalArgumentException("unrecognized [index.store.fs.fs_lock] \"" + s + "\": must be native or simple"); - } - }, Property.IndexScope); + } // can we set on both - node and index level, some nodes might be running on NFS so they might need simple rather than native + }, Property.IndexScope, Property.NodeScope); + private final CounterMetric rateLimitingTimeInNanos = new CounterMetric(); private final ShardPath path; @@ -108,7 +109,8 @@ public class FsDirectoryService extends DirectoryService implements StoreRateLim protected Directory newFSDirectory(Path location, LockFactory lockFactory) throws IOException { - final String storeType = indexSettings.getSettings().get(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.DEFAULT.getSettingsKey()); + final String storeType = indexSettings.getSettings().get(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), + IndexModule.Type.DEFAULT.getSettingsKey()); if (IndexModule.Type.FS.match(storeType) || IndexModule.Type.DEFAULT.match(storeType)) { final FSDirectory open = FSDirectory.open(location, lockFactory); // use lucene defaults if (open instanceof MMapDirectory && Constants.WINDOWS == false) { diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java index e73396fcd7fa..20a1d341cf96 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java @@ -160,15 +160,21 @@ public final class AnalysisModule extends AbstractModule { @Override protected void configure() { try { - HunspellService service = new HunspellService(environment.settings(), environment, knownDictionaries); - AnalysisRegistry registry = new AnalysisRegistry(service, environment, charFilters, tokenFilters, tokenizers, analyzers); - bind(HunspellService.class).toInstance(service); + AnalysisRegistry registry = buildRegistry(); + bind(HunspellService.class).toInstance(registry.getHunspellService()); bind(AnalysisRegistry.class).toInstance(registry); } catch (IOException e) { throw new ElasticsearchException("failed to load hunspell service", e); } } + /** + * Builds an {@link AnalysisRegistry} from the current configuration. + */ + public AnalysisRegistry buildRegistry() throws IOException { + return new AnalysisRegistry(new HunspellService(environment.settings(), environment, knownDictionaries), environment, charFilters, tokenFilters, tokenizers, analyzers); + } + /** * AnalysisProvider is the basic factory interface for registering analysis components like: *

        diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java index 13a2bb299812..77bd7c89927b 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java @@ -99,7 +99,6 @@ public class IndicesStatsTests extends ESSingleNodeTestCase { assertThat(commitStats.getId(), notNullValue()); assertThat(commitStats.getUserData(), hasKey(Translog.TRANSLOG_GENERATION_KEY)); assertThat(commitStats.getUserData(), hasKey(Translog.TRANSLOG_UUID_KEY)); - } } diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java index f1a4496b1360..4a930bc9c282 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java @@ -87,8 +87,8 @@ public class ClusterModuleTests extends ModuleTestCase { public void testRegisterIndexDynamicSetting() { SettingsModule module = new SettingsModule(Settings.EMPTY); - module.registerSetting(Setting.boolSetting("foo.bar", false, Property.Dynamic, Property.IndexScope)); - assertInstanceBinding(module, IndexScopedSettings.class, service -> service.hasDynamicSetting("foo.bar")); + module.registerSetting(Setting.boolSetting("index.foo.bar", false, Property.Dynamic, Property.IndexScope)); + assertInstanceBinding(module, IndexScopedSettings.class, service -> service.hasDynamicSetting("index.foo.bar")); } public void testRegisterAllocationDeciderDuplicate() { diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 60848d0d459e..4ca50245140a 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -251,22 +251,41 @@ public class ScopedSettingsTests extends ESTestCase { try { new IndexScopedSettings( - Settings.EMPTY, Collections.singleton(Setting.groupSetting("boo .", Property.IndexScope))); + Settings.EMPTY, Collections.singleton(Setting.groupSetting("foo.bar.", Property.IndexScope))); fail(); } catch (IllegalArgumentException e) { - assertEquals("illegal settings key: [boo .]", e.getMessage()); + assertEquals("illegal settings key: [foo.bar.] must start with [index.]", e.getMessage()); } - new IndexScopedSettings( - Settings.EMPTY, Collections.singleton(Setting.groupSetting("boo.", Property.IndexScope))); + try { new IndexScopedSettings( - Settings.EMPTY, Collections.singleton(Setting.boolSetting("boo.", true, Property.IndexScope))); + Settings.EMPTY, Collections.singleton(Setting.simpleString("foo.bar", Property.IndexScope))); fail(); } catch (IllegalArgumentException e) { - assertEquals("illegal settings key: [boo.]", e.getMessage()); + assertEquals("illegal settings key: [foo.bar] must start with [index.]", e.getMessage()); + } + + try { + new IndexScopedSettings( + Settings.EMPTY, Collections.singleton(Setting.groupSetting("index. foo.", Property.IndexScope))); + fail(); + } catch (IllegalArgumentException e) { + assertEquals("illegal settings key: [index. foo.]", e.getMessage()); } new IndexScopedSettings( - Settings.EMPTY, Collections.singleton(Setting.boolSetting("boo", true, Property.IndexScope))); + Settings.EMPTY, Collections.singleton(Setting.groupSetting("index.", Property.IndexScope))); + try { + new IndexScopedSettings( + Settings.EMPTY, Collections.singleton(Setting.boolSetting("index.", true, Property.IndexScope))); + fail(); + } catch (IllegalArgumentException e) { + assertEquals("illegal settings key: [index.]", e.getMessage()); + } + new IndexScopedSettings( + Settings.EMPTY, Collections.singleton(Setting.boolSetting("index.boo", true, Property.IndexScope))); + + new ClusterSettings( + Settings.EMPTY, Collections.singleton(Setting.boolSetting("index.boo", true, Property.NodeScope))); } public void testLoggingUpdates() { diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java index bc6afda9a011..e96949815399 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java @@ -164,12 +164,22 @@ public class SettingsModuleTests extends ModuleTestCase { } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("No scope found for setting")); } - // Those should fail + // Some settings have both scopes - that's fine too if they have per-node defaults + SettingsModule module = new SettingsModule(Settings.EMPTY); + module.registerSetting(Setting.simpleString("foo.bar", Property.IndexScope, Property.NodeScope)); + try { - new SettingsModule(Settings.EMPTY).registerSetting(Setting.simpleString("foo.bar", Property.IndexScope, Property.NodeScope)); - fail("Multiple scopes should fail"); + module.registerSetting(Setting.simpleString("foo.bar", Property.NodeScope)); + fail("already registered"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("More than one scope has been added to the setting")); + assertThat(e.getMessage(), containsString("Cannot register setting [foo.bar] twice")); + } + + try { + module.registerSetting(Setting.simpleString("foo.bar", Property.IndexScope)); + fail("already registered"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("Cannot register setting [foo.bar] twice")); } } } diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index a02c3df00ac4..b7c2c29eb075 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -195,9 +195,9 @@ public class IndexModuleTests extends ESTestCase { public void testListener() throws IOException { - Setting booleanSetting = Setting.boolSetting("foo.bar", false, Property.Dynamic, Property.IndexScope); + Setting booleanSetting = Setting.boolSetting("index.foo.bar", false, Property.Dynamic, Property.IndexScope); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings, booleanSetting), null, new AnalysisRegistry(null, environment)); - Setting booleanSetting2 = Setting.boolSetting("foo.bar.baz", false, Property.Dynamic, Property.IndexScope); + Setting booleanSetting2 = Setting.boolSetting("index.foo.bar.baz", false, Property.Dynamic, Property.IndexScope); AtomicBoolean atomicBoolean = new AtomicBoolean(false); module.addSettingsUpdateConsumer(booleanSetting, atomicBoolean::set); @@ -333,6 +333,20 @@ public class IndexModuleTests extends ESTestCase { indexService.close("simon says", false); } + public void testForceCacheType() throws IOException { + Settings indexSettings = Settings.settingsBuilder() + .put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), "none") + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); + module.forceQueryCacheType("custom"); + module.registerQueryCache("custom", (a, b) -> new CustomQueryCache()); + IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, + new IndicesFieldDataCache(settings, listener)); + assertTrue(indexService.cache().query() instanceof CustomQueryCache); + indexService.close("simon says", false); + } + class CustomQueryCache implements QueryCache { @Override diff --git a/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java b/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java index 0c36a8566826..905e45cf9f73 100644 --- a/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java +++ b/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java @@ -61,18 +61,18 @@ import static org.hamcrest.Matchers.nullValue; public class IndexLifecycleActionIT extends ESIntegTestCase { public void testIndexLifecycleActionsWith11Shards1Backup() throws Exception { Settings settings = settingsBuilder() + .put(indexSettings()) .put(SETTING_NUMBER_OF_SHARDS, 11) .put(SETTING_NUMBER_OF_REPLICAS, 1) - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "0s") .build(); // start one server logger.info("Starting sever1"); - final String server_1 = internalCluster().startNode(settings); + final String server_1 = internalCluster().startNode(); final String node1 = getLocalNodeId(server_1); logger.info("Creating index [test]"); - CreateIndexResponse createIndexResponse = client().admin().indices().create(createIndexRequest("test")).actionGet(); + CreateIndexResponse createIndexResponse = client().admin().indices().create(createIndexRequest("test").settings(settings)).actionGet(); assertThat(createIndexResponse.isAcknowledged(), equalTo(true)); logger.info("Running Cluster Health"); @@ -87,7 +87,7 @@ public class IndexLifecycleActionIT extends ESIntegTestCase { logger.info("Starting server2"); // start another server - String server_2 = internalCluster().startNode(settings); + String server_2 = internalCluster().startNode(); // first wait for 2 nodes in the cluster logger.info("Running Cluster Health"); @@ -122,7 +122,7 @@ public class IndexLifecycleActionIT extends ESIntegTestCase { logger.info("Starting server3"); // start another server - String server_3 = internalCluster().startNode(settings); + String server_3 = internalCluster().startNode(); // first wait for 3 nodes in the cluster clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForGreenStatus().waitForNodes("3")).actionGet(); diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index 467aa4d3309a..fd8ee45a0625 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -58,6 +58,7 @@ import java.io.IOException; import java.util.EnumSet; import java.util.Random; +import static org.elasticsearch.cluster.metadata.IndexMetaData.PROTO; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -79,13 +80,23 @@ public class IndexStatsIT extends ESIntegTestCase { //Filter/Query cache is cleaned periodically, default is 60s, so make sure it runs often. Thread.sleep for 60s is bad return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)) .put(IndicesService.INDICES_CACHE_CLEAN_INTERVAL_SETTING.getKey(), "1ms") - .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true) - .put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), IndexModule.INDEX_QUERY_CACHE) .build(); } + @Override + public Settings indexSettings() { + return Settings.settingsBuilder().put(super.indexSettings()) + .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true) + .put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), IndexModule.INDEX_QUERY_CACHE) + .build(); + } + + private Settings.Builder settingsBuilder() { + return Settings.builder().put(indexSettings()); + } + public void testFieldDataStats() { - client().admin().indices().prepareCreate("test").setSettings(Settings.settingsBuilder().put("index.number_of_shards", 2)).execute().actionGet(); + client().admin().indices().prepareCreate("test").setSettings(settingsBuilder().put("index.number_of_shards", 2)).execute().actionGet(); ensureGreen(); client().prepareIndex("test", "type", "1").setSource("field", "value1", "field2", "value1").execute().actionGet(); client().prepareIndex("test", "type", "2").setSource("field", "value2", "field2", "value2").execute().actionGet(); @@ -130,7 +141,7 @@ public class IndexStatsIT extends ESIntegTestCase { public void testClearAllCaches() throws Exception { client().admin().indices().prepareCreate("test") - .setSettings(Settings.settingsBuilder().put("index.number_of_replicas", 0).put("index.number_of_shards", 2)) + .setSettings(settingsBuilder().put("index.number_of_replicas", 0).put("index.number_of_shards", 2)) .execute().actionGet(); ensureGreen(); client().admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet(); @@ -276,7 +287,7 @@ public class IndexStatsIT extends ESIntegTestCase { public void testNonThrottleStats() throws Exception { assertAcked(prepareCreate("test") - .setSettings(Settings.builder() + .setSettings(settingsBuilder() .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "merge") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") @@ -308,7 +319,7 @@ public class IndexStatsIT extends ESIntegTestCase { public void testThrottleStats() throws Exception { assertAcked(prepareCreate("test") - .setSettings(Settings.builder() + .setSettings(settingsBuilder() .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "merge") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") @@ -988,7 +999,7 @@ public class IndexStatsIT extends ESIntegTestCase { } public void testFilterCacheStats() throws Exception { - assertAcked(prepareCreate("index").setSettings("number_of_replicas", 0).get()); + assertAcked(prepareCreate("index").setSettings(Settings.builder().put(indexSettings()).put("number_of_replicas", 0).build()).get()); indexRandom(true, client().prepareIndex("index", "type", "1").setSource("foo", "bar"), client().prepareIndex("index", "type", "2").setSource("foo", "baz")); diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index bdc53d0de306..9342ab043872 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -309,6 +309,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { final String node4 = nodesFutures.get().get(3); assertAcked(prepareCreate("test").setSettings(Settings.builder() + .put(indexSettings()) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 3) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "_name", node4) diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index 2d178488dd96..2fb9366dc145 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -93,13 +93,14 @@ import static org.hamcrest.Matchers.notNullValue; */ @ClusterScope(scope = Scope.SUITE) public class ChildQuerySearchIT extends ESIntegTestCase { + @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)) - // aggressive filter caching so that we can assert on the filter cache size - .put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), IndexModule.INDEX_QUERY_CACHE) - .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true) - .build(); + public Settings indexSettings() { + return Settings.settingsBuilder().put(super.indexSettings()) + // aggressive filter caching so that we can assert on the filter cache size + .put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), IndexModule.INDEX_QUERY_CACHE) + .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true) + .build(); } public void testSelfReferentialIsForbidden() { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java index 752165902ed6..aa47fe98bb39 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java @@ -50,8 +50,8 @@ public class ScriptQuerySearchTests extends ESIntegTestCase { } @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)) + public Settings indexSettings() { + return Settings.settingsBuilder().put(super.indexSettings()) // aggressive filter caching so that we can assert on the number of iterations of the script filters .put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), IndexModule.INDEX_QUERY_CACHE) .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true) @@ -80,9 +80,9 @@ public class ScriptQuerySearchTests extends ESIntegTestCase { assertThat(response.getHits().totalHits(), equalTo(2L)); assertThat(response.getHits().getAt(0).id(), equalTo("2")); - assertThat((Double) response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(2.0)); + assertThat(response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(2.0)); assertThat(response.getHits().getAt(1).id(), equalTo("3")); - assertThat((Double) response.getHits().getAt(1).fields().get("sNum1").values().get(0), equalTo(3.0)); + assertThat(response.getHits().getAt(1).fields().get("sNum1").values().get(0), equalTo(3.0)); Map params = new HashMap<>(); params.put("param1", 2); @@ -95,7 +95,7 @@ public class ScriptQuerySearchTests extends ESIntegTestCase { assertThat(response.getHits().totalHits(), equalTo(1L)); assertThat(response.getHits().getAt(0).id(), equalTo("3")); - assertThat((Double) response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(3.0)); + assertThat(response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(3.0)); params = new HashMap<>(); params.put("param1", -1); @@ -108,11 +108,11 @@ public class ScriptQuerySearchTests extends ESIntegTestCase { assertThat(response.getHits().totalHits(), equalTo(3L)); assertThat(response.getHits().getAt(0).id(), equalTo("1")); - assertThat((Double) response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(1.0)); + assertThat(response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(1.0)); assertThat(response.getHits().getAt(1).id(), equalTo("2")); - assertThat((Double) response.getHits().getAt(1).fields().get("sNum1").values().get(0), equalTo(2.0)); + assertThat(response.getHits().getAt(1).fields().get("sNum1").values().get(0), equalTo(2.0)); assertThat(response.getHits().getAt(2).id(), equalTo("3")); - assertThat((Double) response.getHits().getAt(2).fields().get("sNum1").values().get(0), equalTo(3.0)); + assertThat(response.getHits().getAt(2).fields().get("sNum1").values().get(0), equalTo(3.0)); } private static AtomicInteger scriptCounter = new AtomicInteger(0); diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisTestUtils.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisTestUtils.java deleted file mode 100644 index a952c8982cc5..000000000000 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisTestUtils.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.analysis; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.inject.Injector; -import org.elasticsearch.common.inject.ModulesBuilder; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.EnvironmentModule; -import org.elasticsearch.index.Index; -import org.elasticsearch.indices.analysis.AnalysisModule; -import org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin; -import org.elasticsearch.test.IndexSettingsModule; - -import java.io.IOException; - -import static org.elasticsearch.common.settings.Settings.settingsBuilder; - -public class AnalysisTestUtils { - - public static AnalysisService createAnalysisService(Settings settings) throws IOException { - Index index = new Index("test", "_na_"); - Settings indexSettings = settingsBuilder().put(settings) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .build(); - AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); - new AnalysisICUPlugin().onModule(analysisModule); - Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), - new EnvironmentModule(new Environment(settings)), analysisModule) - .createInjector(); - final AnalysisService analysisService = parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, indexSettings)); - return analysisService; - } -} diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java index efd60427e234..b399dfd34f42 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java @@ -20,21 +20,19 @@ package org.elasticsearch.index.analysis; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; +import org.elasticsearch.index.Index; +import org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin; import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.index.analysis.AnalysisTestUtils.createAnalysisService; import static org.hamcrest.Matchers.instanceOf; /** */ public class SimpleIcuAnalysisTests extends ESTestCase { public void testDefaultsIcuAnalysis() throws IOException { - Settings settings = settingsBuilder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); - AnalysisService analysisService = createAnalysisService(settings); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), + Settings.EMPTY, new AnalysisICUPlugin()::onModule); TokenizerFactory tokenizerFactory = analysisService.tokenizer("icu_tokenizer"); assertThat(tokenizerFactory, instanceOf(IcuTokenizerFactory.class)); diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java index 632f3f539d64..adf1faaf92f5 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java @@ -27,13 +27,13 @@ import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.KeywordTokenizer; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; +import org.elasticsearch.index.Index; +import org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.io.StringReader; -import static org.elasticsearch.index.analysis.AnalysisTestUtils.createAnalysisService; import static org.hamcrest.Matchers.equalTo; // Tests borrowed from Solr's Icu collation key filter factory test. @@ -46,12 +46,11 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testBasicUsage() throws Exception { Settings settings = Settings.settingsBuilder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "tr") .put("index.analysis.filter.myCollator.strength", "primary") .build(); - AnalysisService analysisService = createAnalysisService(settings); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "I WİLL USE TURKİSH CASING", "ı will use turkish casıng"); @@ -62,13 +61,12 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testNormalization() throws IOException { Settings settings = Settings.settingsBuilder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "tr") .put("index.analysis.filter.myCollator.strength", "primary") .put("index.analysis.filter.myCollator.decomposition", "canonical") .build(); - AnalysisService analysisService = createAnalysisService(settings); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "I W\u0049\u0307LL USE TURKİSH CASING", "ı will use turkish casıng"); @@ -79,13 +77,12 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testSecondaryStrength() throws IOException { Settings settings = Settings.settingsBuilder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.strength", "secondary") .put("index.analysis.filter.myCollator.decomposition", "no") .build(); - AnalysisService analysisService = createAnalysisService(settings); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "TESTING", "testing"); @@ -97,13 +94,12 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testIgnorePunctuation() throws IOException { Settings settings = Settings.settingsBuilder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.strength", "primary") .put("index.analysis.filter.myCollator.alternate", "shifted") .build(); - AnalysisService analysisService = createAnalysisService(settings); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "foo-bar", "foo bar"); @@ -115,14 +111,13 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testIgnoreWhitespace() throws IOException { Settings settings = Settings.settingsBuilder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.strength", "primary") .put("index.analysis.filter.myCollator.alternate", "shifted") .put("index.analysis.filter.myCollator.variableTop", " ") .build(); - AnalysisService analysisService = createAnalysisService(settings); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "foo bar", "foobar"); @@ -136,12 +131,11 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testNumerics() throws IOException { Settings settings = Settings.settingsBuilder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.numeric", "true") .build(); - AnalysisService analysisService = createAnalysisService(settings); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollation(filterFactory, "foobar-9", "foobar-10", -1); @@ -153,13 +147,12 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testIgnoreAccentsButNotCase() throws IOException { Settings settings = Settings.settingsBuilder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.strength", "primary") .put("index.analysis.filter.myCollator.caseLevel", "true") .build(); - AnalysisService analysisService = createAnalysisService(settings); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "résumé", "resume"); @@ -174,13 +167,12 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testUpperCaseFirst() throws IOException { Settings settings = Settings.settingsBuilder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.strength", "tertiary") .put("index.analysis.filter.myCollator.caseFirst", "upper") .build(); - AnalysisService analysisService = createAnalysisService(settings); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollation(filterFactory, "Resume", "resume", -1); @@ -204,12 +196,11 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { String tailoredRules = tailoredCollator.getRules(); Settings settings = Settings.settingsBuilder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.rules", tailoredRules) .put("index.analysis.filter.myCollator.strength", "primary") .build(); - AnalysisService analysisService = createAnalysisService(settings); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "Töne", "Toene"); diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java index 7ebb783d1dbe..749b04b2260c 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java @@ -22,12 +22,12 @@ package org.elasticsearch.index.analysis; import com.ibm.icu.text.Normalizer2; import org.apache.lucene.analysis.CharFilter; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; +import org.elasticsearch.index.Index; +import org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin; import org.elasticsearch.test.ESTestCase; import java.io.StringReader; -import static org.elasticsearch.index.analysis.AnalysisTestUtils.createAnalysisService; /** * Test @@ -35,10 +35,9 @@ import static org.elasticsearch.index.analysis.AnalysisTestUtils.createAnalysisS public class SimpleIcuNormalizerCharFilterTests extends ESTestCase { public void testDefaultSetting() throws Exception { Settings settings = Settings.settingsBuilder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.char_filter.myNormalizerChar.type", "icu_normalizer") .build(); - AnalysisService analysisService = createAnalysisService(settings); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); CharFilterFactory charFilterFactory = analysisService.charFilter("myNormalizerChar"); String input = "ʰ㌰゙5℃№㈱㌘,バッファーの正規化のテスト.㋐㋑㋒㋓㋔カキクケコザジズゼゾg̈각/각நிเกषिchkʷक्षि"; @@ -58,12 +57,11 @@ public class SimpleIcuNormalizerCharFilterTests extends ESTestCase { public void testNameAndModeSetting() throws Exception { Settings settings = Settings.settingsBuilder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.char_filter.myNormalizerChar.type", "icu_normalizer") .put("index.analysis.char_filter.myNormalizerChar.name", "nfkc") .put("index.analysis.char_filter.myNormalizerChar.mode", "decompose") .build(); - AnalysisService analysisService = createAnalysisService(settings); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); CharFilterFactory charFilterFactory = analysisService.charFilter("myNormalizerChar"); String input = "ʰ㌰゙5℃№㈱㌘,バッファーの正規化のテスト.㋐㋑㋒㋓㋔カキクケコザジズゼゾg̈각/각நிเกषिchkʷक्षि"; diff --git a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java index 0b3f026b010f..b81de20d73d4 100644 --- a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java +++ b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java @@ -198,18 +198,20 @@ public class KuromojiAnalysisTests extends ESTestCase { String json = "/org/elasticsearch/index/analysis/kuromoji_analysis.json"; Settings settings = Settings.settingsBuilder() - .put(Environment.PATH_HOME_SETTING.getKey(), home) .loadFromStream(json, getClass().getResourceAsStream(json)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); - final SettingsModule settingsModule = new SettingsModule(settings); + Settings nodeSettings = Settings.settingsBuilder() + .put(Environment.PATH_HOME_SETTING.getKey(), home).build(); + final SettingsModule settingsModule = new SettingsModule(nodeSettings); settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); Index index = new Index("test", "_na_"); - AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); + Environment environment = new Environment(nodeSettings); + AnalysisModule analysisModule = new AnalysisModule(environment); new AnalysisKuromojiPlugin().onModule(analysisModule); Injector parentInjector = new ModulesBuilder().add(settingsModule, - new EnvironmentModule(new Environment(settings)), analysisModule) + new EnvironmentModule(environment), analysisModule) .createInjector(); return parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, settings)); diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java index f3d1d12f45af..688394b68440 100644 --- a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java +++ b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java @@ -21,18 +21,10 @@ package org.elasticsearch.index.analysis; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.inject.Injector; -import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.EnvironmentModule; import org.elasticsearch.index.Index; -import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugin.analysis.AnalysisPhoneticPlugin; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.test.InternalSettingsPlugin; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -47,22 +39,10 @@ public class SimplePhoneticAnalysisTests extends ESTestCase { String yaml = "/org/elasticsearch/index/analysis/phonetic-1.yml"; Settings settings = settingsBuilder().loadFromStream(yaml, getClass().getResourceAsStream(yaml)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); - AnalysisService analysisService = testSimpleConfiguration(settings); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, + new AnalysisPhoneticPlugin()::onModule); TokenFilterFactory filterFactory = analysisService.tokenFilter("phonetic"); MatcherAssert.assertThat(filterFactory, instanceOf(PhoneticTokenFilterFactory.class)); } - - private AnalysisService testSimpleConfiguration(Settings settings) throws IOException { - Index index = new Index("test", "_na_"); - AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); - new AnalysisPhoneticPlugin().onModule(analysisModule); - SettingsModule settingsModule = new SettingsModule(settings); - settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); - Injector parentInjector = new ModulesBuilder().add(settingsModule, - new EnvironmentModule(new Environment(settings)), analysisModule) - .createInjector(); - return parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, settings)); - } } diff --git a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java index 76761a67c9f4..0fcc42643d4e 100644 --- a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java +++ b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java @@ -19,44 +19,21 @@ package org.elasticsearch.index.analysis; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.inject.Injector; -import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.EnvironmentModule; import org.elasticsearch.index.Index; -import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugin.analysis.smartcn.AnalysisSmartChinesePlugin; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.test.InternalSettingsPlugin; import org.hamcrest.MatcherAssert; import java.io.IOException; -import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.instanceOf; /** */ public class SimpleSmartChineseAnalysisTests extends ESTestCase { public void testDefaultsIcuAnalysis() throws IOException { - Index index = new Index("test", "_na_"); - Settings settings = settingsBuilder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .build(); - AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); - new AnalysisSmartChinesePlugin().onModule(analysisModule); - SettingsModule settingsModule = new SettingsModule(settings); - settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); - Injector parentInjector = new ModulesBuilder().add(settingsModule, - new EnvironmentModule(new Environment(settings)), analysisModule) - .createInjector(); - final AnalysisService analysisService = parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, settings)); + final AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), Settings.EMPTY, new AnalysisSmartChinesePlugin()::onModule); TokenizerFactory tokenizerFactory = analysisService.tokenizer("smartcn_tokenizer"); MatcherAssert.assertThat(tokenizerFactory, instanceOf(SmartChineseTokenizerTokenizerFactory.class)); } diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java index 8f76c908e4be..9bfcc2c2f3ff 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java @@ -21,46 +21,22 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.pl.PolishAnalyzer; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.inject.Injector; -import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.EnvironmentModule; +import org.elasticsearch.index.Index; import org.elasticsearch.index.analysis.pl.PolishStemTokenFilterFactory; -import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugin.analysis.stempel.AnalysisStempelPlugin; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.test.InternalSettingsPlugin; import org.hamcrest.MatcherAssert; import java.io.IOException; -import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.instanceOf; /** */ public class PolishAnalysisTests extends ESTestCase { public void testDefaultsPolishAnalysis() throws IOException { - Settings settings = settingsBuilder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .build(); - - - AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); - new AnalysisStempelPlugin().onModule(analysisModule); - SettingsModule settingsModule = new SettingsModule(settings); - settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); - Injector parentInjector = new ModulesBuilder().add(settingsModule, - new EnvironmentModule(new Environment(settings)), analysisModule) - .createInjector(); - - final AnalysisService analysisService = parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings("test", settings)); + final AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), Settings.EMPTY, new AnalysisStempelPlugin()::onModule); TokenFilterFactory tokenizerFactory = analysisService.tokenFilter("polish_stem"); MatcherAssert.assertThat(tokenizerFactory, instanceOf(PolishStemTokenFilterFactory.class)); diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java index 890f4eceec1c..193cfea68114 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java @@ -24,20 +24,10 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.KeywordTokenizer; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.inject.Injector; -import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.EnvironmentModule; import org.elasticsearch.index.Index; -import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugin.analysis.stempel.AnalysisStempelPlugin; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; import java.io.StringReader; @@ -57,11 +47,9 @@ public class SimplePolishTokenFilterTests extends ESTestCase { private void testToken(String source, String expected) throws IOException { Index index = new Index("test", "_na_"); Settings settings = Settings.settingsBuilder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myStemmer.type", "polish_stem") .build(); - AnalysisService analysisService = createAnalysisService(index, settings); + AnalysisService analysisService = createAnalysisService(index, settings, new AnalysisStempelPlugin()::onModule); TokenFilterFactory filterFactory = analysisService.tokenFilter("myStemmer"); @@ -77,12 +65,8 @@ public class SimplePolishTokenFilterTests extends ESTestCase { } private void testAnalyzer(String source, String... expected_terms) throws IOException { - Index index = new Index("test", "_na_"); - Settings settings = Settings.settingsBuilder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .build(); - AnalysisService analysisService = createAnalysisService(index, settings); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), Settings.EMPTY, + new AnalysisStempelPlugin()::onModule); Analyzer analyzer = analysisService.analyzer("polish").analyzer(); @@ -97,14 +81,4 @@ public class SimplePolishTokenFilterTests extends ESTestCase { } } - private AnalysisService createAnalysisService(Index index, Settings settings) throws IOException { - AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); - new AnalysisStempelPlugin().onModule(analysisModule); - SettingsModule settingsModule = new SettingsModule(settings); - settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); - Injector parentInjector = new ModulesBuilder().add(settingsModule, - new EnvironmentModule(new Environment(settings)), analysisModule) - .createInjector(); - return parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, settings)); - } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 493efa9021e2..f0df49cf3922 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -98,6 +98,7 @@ import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.env.Environment; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.MockEngineFactoryPlugin; import org.elasticsearch.index.IndexSettings; @@ -388,6 +389,7 @@ public abstract class ESIntegTestCase extends ESTestCase { } else { randomSettingsBuilder.put("index.codec", CodecService.LUCENE_DEFAULT_CODEC); } + XContentBuilder mappings = null; if (frequently() && randomDynamicTemplates()) { mappings = XContentFactory.jsonBuilder().startObject().startObject("_default_"); @@ -454,7 +456,15 @@ public abstract class ESIntegTestCase extends ESTestCase { for (String setting : randomSettingsBuilder.internalMap().keySet()) { assertThat("non index. prefix setting set on index template, its a node setting...", setting, startsWith("index.")); } + // always default delayed allocation to 0 to make sure we have tests are not delayed + randomSettingsBuilder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), 0); + if (randomBoolean()) { + randomSettingsBuilder.put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), randomBoolean() ? IndexModule.INDEX_QUERY_CACHE : IndexModule.NONE_QUERY_CACHE); + } + if (randomBoolean()) { + randomSettingsBuilder.put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), randomBoolean()); + } PutIndexTemplateRequestBuilder putTemplate = client().admin().indices() .preparePutTemplate("random_index_template") .setTemplate("*") @@ -740,6 +750,8 @@ public abstract class ESIntegTestCase extends ESTestCase { logger.info("using custom data_path for index: [{}]", dataPath); builder.put(IndexMetaData.SETTING_DATA_PATH, dataPath); } + // always default delayed allocation to 0 to make sure we have tests are not delayed + builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), 0); return builder.build(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index d72afca41bfc..8eeb96a94bf8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -23,6 +23,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequestBuilder; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; @@ -84,6 +85,12 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { // SERVICE_UNAVAILABLE/1/state not recovered / initialized block ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForGreenStatus().get(); assertFalse(clusterHealthResponse.isTimedOut()); + client().admin().indices() + .preparePutTemplate("random_index_template") + .setTemplate("*") + .setOrder(0) + .setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get(); } private static void stopNode() throws IOException { @@ -172,8 +179,7 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { // This needs to tie into the ESIntegTestCase#indexSettings() method .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), createTempDir().getParent()) .put("node.name", nodeName()) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put("script.inline", "true") .put("script.indexed", "true") .put(EsExecutors.PROCESSORS_SETTING.getKey(), 1) // limit the number of threads created diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index a8f642900c49..23e58b0ed17b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -29,7 +29,6 @@ import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter; -import junit.framework.AssertionFailedError; import org.apache.lucene.uninverting.UninvertingReader; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; @@ -46,10 +45,14 @@ import org.elasticsearch.common.io.PathUtilsForTesting; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.analysis.AnalysisService; +import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.search.MockSearchService; import org.elasticsearch.test.junit.listeners.LoggingListener; import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter; @@ -79,6 +82,7 @@ import java.util.function.BooleanSupplier; import java.util.function.Consumer; import java.util.function.Supplier; +import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; import static org.hamcrest.Matchers.equalTo; @@ -680,4 +684,24 @@ public abstract class ESTestCase extends LuceneTestCase { } return elapsed; } + + /** + * Creates an AnalysisService to test analysis factories and analyzers. + */ + @SafeVarargs + public static AnalysisService createAnalysisService(Index index, Settings settings, Consumer... moduleConsumers) throws IOException { + Settings indexSettings = settingsBuilder().put(settings) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build(); + Settings nodeSettings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); + Environment env = new Environment(nodeSettings); + AnalysisModule analysisModule = new AnalysisModule(env); + for (Consumer consumer : moduleConsumers) { + consumer.accept(analysisModule); + } + SettingsModule settingsModule = new SettingsModule(nodeSettings); + settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); + final AnalysisService analysisService = analysisModule.buildRegistry().build(IndexSettingsModule.newIndexSettings(index, indexSettings)); + return analysisService; + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java index b57afd5df2d7..f76ae7b4b56c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java @@ -36,11 +36,11 @@ public final class InternalSettingsPlugin extends Plugin { } public static final Setting VERSION_CREATED = - Setting.intSetting("index.version.created", 0, Property.IndexScope); + Setting.intSetting("index.version.created", 0, Property.IndexScope, Property.NodeScope); public static final Setting MERGE_ENABLED = - Setting.boolSetting("index.merge.enabled", true, Property.IndexScope); + Setting.boolSetting("index.merge.enabled", true, Property.IndexScope, Property.NodeScope); public static final Setting INDEX_CREATION_DATE_SETTING = - Setting.longSetting(IndexMetaData.SETTING_CREATION_DATE, -1, -1, Property.IndexScope); + Setting.longSetting(IndexMetaData.SETTING_CREATION_DATE, -1, -1, Property.IndexScope, Property.NodeScope); public void onModule(SettingsModule module) { module.registerSetting(VERSION_CREATED); diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index f8dc889a6b6d..a9d0f483e5aa 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -43,7 +43,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.common.Nullable; @@ -419,14 +418,6 @@ public final class InternalTestCluster extends TestCluster { builder.put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING.getKey(), "noop"); } - if (random.nextBoolean()) { - builder.put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), random.nextBoolean() ? IndexModule.INDEX_QUERY_CACHE : IndexModule.NONE_QUERY_CACHE); - } - - if (random.nextBoolean()) { - builder.put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), random.nextBoolean()); - } - if (random.nextBoolean()) { if (random.nextInt(10) == 0) { // do something crazy slow here builder.put(IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(), new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 10), ByteSizeUnit.MB)); @@ -457,9 +448,6 @@ public final class InternalTestCluster extends TestCluster { builder.put(ScriptService.SCRIPT_CACHE_EXPIRE_SETTING.getKey(), TimeValue.timeValueMillis(RandomInts.randomIntBetween(random, 750, 10000000))); } - // always default delayed allocation to 0 to make sure we have tests are not delayed - builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), 0); - return builder.build(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java index c50c1ed5446b..7d17746d54fa 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java @@ -61,15 +61,15 @@ import java.util.Random; public class MockFSDirectoryService extends FsDirectoryService { public static final Setting RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING = - Setting.doubleSetting("index.store.mock.random.io_exception_rate_on_open", 0.0d, 0.0d, Property.IndexScope); + Setting.doubleSetting("index.store.mock.random.io_exception_rate_on_open", 0.0d, 0.0d, Property.IndexScope, Property.NodeScope); public static final Setting RANDOM_IO_EXCEPTION_RATE_SETTING = - Setting.doubleSetting("index.store.mock.random.io_exception_rate", 0.0d, 0.0d, Property.IndexScope); + Setting.doubleSetting("index.store.mock.random.io_exception_rate", 0.0d, 0.0d, Property.IndexScope, Property.NodeScope); public static final Setting RANDOM_PREVENT_DOUBLE_WRITE_SETTING = - Setting.boolSetting("index.store.mock.random.prevent_double_write", true, Property.IndexScope);// true is default in MDW + Setting.boolSetting("index.store.mock.random.prevent_double_write", true, Property.IndexScope, Property.NodeScope);// true is default in MDW public static final Setting RANDOM_NO_DELETE_OPEN_FILE_SETTING = - Setting.boolSetting("index.store.mock.random.no_delete_open_file", true, Property.IndexScope);// true is default in MDW + Setting.boolSetting("index.store.mock.random.no_delete_open_file", true, Property.IndexScope, Property.NodeScope);// true is default in MDW public static final Setting CRASH_INDEX_SETTING = - Setting.boolSetting("index.store.mock.random.crash_index", true, Property.IndexScope);// true is default in MDW + Setting.boolSetting("index.store.mock.random.crash_index", true, Property.IndexScope, Property.NodeScope);// true is default in MDW private final FsDirectoryService delegateService; private final Random random; diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java index 44e3ad598ebb..d44cf60e9e3b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java @@ -46,7 +46,7 @@ import java.util.Map; public class MockFSIndexStore extends IndexStore { public static final Setting INDEX_CHECK_INDEX_ON_CLOSE_SETTING = - Setting.boolSetting("index.store.mock.check_index_on_close", true, Property.IndexScope); + Setting.boolSetting("index.store.mock.check_index_on_close", true, Property.IndexScope, Property.NodeScope); public static class TestPlugin extends Plugin { @Override From 68282dd9e9833fa16ce413f7bb46ea808ddb5598 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 17 Mar 2016 14:58:57 +0100 Subject: [PATCH 283/320] parent/child: `parent_id` query should take the child type into account too. If this query doesn't take the child type into account then it can match other child document types pointing to the same parent type and that have the same id too. --- .../index/query/ParentIdQueryBuilder.java | 12 +++++++++++- .../index/query/ParentIdQueryBuilderTests.java | 15 ++++++++++++--- 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/ParentIdQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/ParentIdQueryBuilder.java index f9bd7623f35e..4a2efa95c9a3 100644 --- a/core/src/main/java/org/elasticsearch/index/query/ParentIdQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/ParentIdQueryBuilder.java @@ -19,13 +19,18 @@ package org.elasticsearch.index.query; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DocValuesTermsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.internal.ParentFieldMapper; +import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import java.io.IOException; import java.util.Objects; @@ -71,7 +76,12 @@ public final class ParentIdQueryBuilder extends AbstractQueryBuilder Date: Thu, 17 Mar 2016 17:07:20 +0100 Subject: [PATCH 284/320] Debug log on testMergesHappening --- .../elasticsearch/index/engine/InternalEngineMergeIT.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineMergeIT.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineMergeIT.java index a844f971eac7..cf56f41c83ad 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineMergeIT.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineMergeIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.hamcrest.Matchers; import java.io.IOException; @@ -38,8 +39,10 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFa @ClusterScope(numDataNodes = 1, scope = Scope.SUITE) public class InternalEngineMergeIT extends ESIntegTestCase { + + @TestLogging("_root:DEBUG") public void testMergesHappening() throws InterruptedException, IOException, ExecutionException { - final int numOfShards = randomIntBetween(1,5); + final int numOfShards = randomIntBetween(1, 5); // some settings to keep num segments low assertAcked(prepareCreate("test").setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numOfShards) From 800c844ebd63d4b6eb00b56ef1ea18b3773e4219 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Thu, 17 Mar 2016 10:15:13 -0700 Subject: [PATCH 285/320] Split up Analyzer and Writer into multiple pieces. Closes #17158 --- .../org/elasticsearch/painless/Analyzer.java | 2859 +---------------- .../painless/AnalyzerCaster.java | 563 ++++ .../painless/AnalyzerExpression.java | 868 +++++ .../painless/AnalyzerExternal.java | 816 +++++ .../painless/AnalyzerPromoter.java | 281 ++ .../painless/AnalyzerStatement.java | 581 ++++ .../painless/AnalyzerUtility.java | 144 + .../org/elasticsearch/painless/Compiler.java | 2 +- .../org/elasticsearch/painless/Metadata.java | 57 +- .../org/elasticsearch/painless/Writer.java | 2601 +++------------ .../elasticsearch/painless/WriterCaster.java | 86 + .../painless/WriterConstants.java | 138 + .../painless/WriterExpression.java | 684 ++++ .../painless/WriterExternal.java | 769 +++++ .../painless/WriterStatement.java | 391 +++ .../elasticsearch/painless/WriterUtility.java | 387 +++ 16 files changed, 6166 insertions(+), 5061 deletions(-) create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerExpression.java create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerExternal.java create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerPromoter.java create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerStatement.java create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerUtility.java create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterCaster.java create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterExpression.java create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterExternal.java create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterStatement.java create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterUtility.java diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java index 33ab695d5278..090667b45437 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java @@ -19,21 +19,10 @@ package org.elasticsearch.painless; -import org.antlr.v4.runtime.ParserRuleContext; -import org.elasticsearch.painless.Definition.Cast; -import org.elasticsearch.painless.Definition.Constructor; -import org.elasticsearch.painless.Definition.Field; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.Pair; -import org.elasticsearch.painless.Definition.Sort; -import org.elasticsearch.painless.Definition.Struct; -import org.elasticsearch.painless.Definition.Transform; -import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.PainlessParser.AfterthoughtContext; import org.elasticsearch.painless.PainlessParser.ArgumentsContext; import org.elasticsearch.painless.PainlessParser.AssignmentContext; import org.elasticsearch.painless.PainlessParser.BinaryContext; -import org.elasticsearch.painless.PainlessParser.BlockContext; import org.elasticsearch.painless.PainlessParser.BoolContext; import org.elasticsearch.painless.PainlessParser.BreakContext; import org.elasticsearch.painless.PainlessParser.CastContext; @@ -47,8 +36,8 @@ import org.elasticsearch.painless.PainlessParser.DecltypeContext; import org.elasticsearch.painless.PainlessParser.DeclvarContext; import org.elasticsearch.painless.PainlessParser.DoContext; import org.elasticsearch.painless.PainlessParser.EmptyContext; +import org.elasticsearch.painless.PainlessParser.EmptyscopeContext; import org.elasticsearch.painless.PainlessParser.ExprContext; -import org.elasticsearch.painless.PainlessParser.ExpressionContext; import org.elasticsearch.painless.PainlessParser.ExtbraceContext; import org.elasticsearch.painless.PainlessParser.ExtcallContext; import org.elasticsearch.painless.PainlessParser.ExtcastContext; @@ -75,7 +64,6 @@ import org.elasticsearch.painless.PainlessParser.PreincContext; import org.elasticsearch.painless.PainlessParser.ReturnContext; import org.elasticsearch.painless.PainlessParser.SingleContext; import org.elasticsearch.painless.PainlessParser.SourceContext; -import org.elasticsearch.painless.PainlessParser.StatementContext; import org.elasticsearch.painless.PainlessParser.ThrowContext; import org.elasticsearch.painless.PainlessParser.TrapContext; import org.elasticsearch.painless.PainlessParser.TrueContext; @@ -83,3077 +71,384 @@ import org.elasticsearch.painless.PainlessParser.TryContext; import org.elasticsearch.painless.PainlessParser.UnaryContext; import org.elasticsearch.painless.PainlessParser.WhileContext; -import java.util.ArrayDeque; -import java.util.Arrays; -import java.util.Deque; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.painless.PainlessParser.ADD; -import static org.elasticsearch.painless.PainlessParser.BWAND; -import static org.elasticsearch.painless.PainlessParser.BWOR; -import static org.elasticsearch.painless.PainlessParser.BWXOR; -import static org.elasticsearch.painless.PainlessParser.DIV; -import static org.elasticsearch.painless.PainlessParser.LSH; -import static org.elasticsearch.painless.PainlessParser.MUL; -import static org.elasticsearch.painless.PainlessParser.REM; -import static org.elasticsearch.painless.PainlessParser.RSH; -import static org.elasticsearch.painless.PainlessParser.SUB; -import static org.elasticsearch.painless.PainlessParser.USH; - class Analyzer extends PainlessParserBaseVisitor { - private static class Variable { - final String name; - final Type type; - final int slot; - - private Variable(final String name, final Type type, final int slot) { - this.name = name; - this.type = type; - this.slot = slot; - } - } - static void analyze(final Metadata metadata) { new Analyzer(metadata); } - private final Metadata metadata; - private final Definition definition; - private final CompilerSettings settings; - - private final Deque scopes = new ArrayDeque<>(); - private final Deque variables = new ArrayDeque<>(); + private final AnalyzerStatement statement; + private final AnalyzerExpression expression; + private final AnalyzerExternal external; private Analyzer(final Metadata metadata) { - this.metadata = metadata; - definition = metadata.definition; - settings = metadata.settings; + final Definition definition = metadata.definition; - incrementScope(); - addVariable(null, "#this", definition.execType); - metadata.inputValueSlot = addVariable(null, "input", definition.smapType).slot; - metadata.scoreValueSlot = addVariable(null, "_score", definition.floatType).slot; - metadata.loopCounterSlot = addVariable(null, "#loop", definition.intType).slot; + final AnalyzerUtility utility = new AnalyzerUtility(); + final AnalyzerCaster caster = new AnalyzerCaster(definition); + final AnalyzerPromoter promoter = new AnalyzerPromoter(definition); + + statement = new AnalyzerStatement(metadata, this, utility, caster); + expression = new AnalyzerExpression(metadata, this, caster, promoter); + external = new AnalyzerExternal(metadata, this, utility, caster, promoter); + + utility.incrementScope(); + utility.addVariable(null, "#this", definition.execType); + metadata.inputValueSlot = utility.addVariable(null, "input", definition.smapType).slot; + metadata.scoreValueSlot = utility.addVariable(null, "_score", definition.floatType).slot; + metadata.loopCounterSlot = utility.addVariable(null, "#loop", definition.intType).slot; metadata.createStatementMetadata(metadata.root); visit(metadata.root); - decrementScope(); - } - - void incrementScope() { - scopes.push(0); - } - - void decrementScope() { - int remove = scopes.pop(); - - while (remove > 0) { - variables.pop(); - --remove; - } - } - - Variable getVariable(final String name) { - final Iterator itr = variables.iterator(); - - while (itr.hasNext()) { - final Variable variable = itr.next(); - - if (variable.name.equals(name)) { - return variable; - } - } - - return null; - } - - Variable addVariable(final ParserRuleContext source, final String name, final Type type) { - if (getVariable(name) != null) { - if (source == null) { - throw new IllegalArgumentException("Argument name [" + name + "] already defined within the scope."); - } else { - throw new IllegalArgumentException( - Metadata.error(source) + "Variable name [" + name + "] already defined within the scope."); - } - } - - final Variable previous = variables.peekFirst(); - int slot = 0; - - if (previous != null) { - slot += previous.slot + previous.type.type.getSize(); - } - - final Variable variable = new Variable(name, type, slot); - variables.push(variable); - - final int update = scopes.pop() + 1; - scopes.push(update); - - return variable; + utility.decrementScope(); } @Override public Void visitSource(final SourceContext ctx) { - final Metadata.StatementMetadata sourcesmd = metadata.getStatementMetadata(ctx); - final List statectxs = ctx.statement(); - final StatementContext lastctx = statectxs.get(statectxs.size() - 1); - - incrementScope(); - - for (final StatementContext statectx : statectxs) { - if (sourcesmd.allLast) { - throw new IllegalArgumentException(Metadata.error(statectx) + - "Statement will never be executed because all prior paths escape."); - } - - final Metadata.StatementMetadata statesmd = metadata.createStatementMetadata(statectx); - statesmd.lastSource = statectx == lastctx; - visit(statectx); - - sourcesmd.methodEscape = statesmd.methodEscape; - sourcesmd.allLast = statesmd.allLast; - } - - decrementScope(); + statement.processSource(ctx); return null; } @Override public Void visitIf(final IfContext ctx) { - final Metadata.StatementMetadata ifsmd = metadata.getStatementMetadata(ctx); - - final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); - expremd.to = definition.booleanType; - visit(exprctx); - markCast(expremd); - - if (expremd.postConst != null) { - throw new IllegalArgumentException(Metadata.error(ctx) + "If statement is not necessary."); - } - - final BlockContext blockctx0 = ctx.block(0); - final Metadata.StatementMetadata blocksmd0 = metadata.createStatementMetadata(blockctx0); - blocksmd0.lastSource = ifsmd.lastSource; - blocksmd0.inLoop = ifsmd.inLoop; - blocksmd0.lastLoop = ifsmd.lastLoop; - incrementScope(); - visit(blockctx0); - decrementScope(); - - ifsmd.anyContinue = blocksmd0.anyContinue; - ifsmd.anyBreak = blocksmd0.anyBreak; - - ifsmd.count = blocksmd0.count; - - if (ctx.ELSE() != null) { - final BlockContext blockctx1 = ctx.block(1); - final Metadata.StatementMetadata blocksmd1 = metadata.createStatementMetadata(blockctx1); - blocksmd1.lastSource = ifsmd.lastSource; - incrementScope(); - visit(blockctx1); - decrementScope(); - - ifsmd.methodEscape = blocksmd0.methodEscape && blocksmd1.methodEscape; - ifsmd.loopEscape = blocksmd0.loopEscape && blocksmd1.loopEscape; - ifsmd.allLast = blocksmd0.allLast && blocksmd1.allLast; - ifsmd.anyContinue |= blocksmd1.anyContinue; - ifsmd.anyBreak |= blocksmd1.anyBreak; - - ifsmd.count = Math.max(ifsmd.count, blocksmd1.count); - } + statement.processIf(ctx); return null; } @Override public Void visitWhile(final WhileContext ctx) { - final Metadata.StatementMetadata whilesmd = metadata.getStatementMetadata(ctx); - - incrementScope(); - - final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); - expremd.to = definition.booleanType; - visit(exprctx); - markCast(expremd); - - boolean continuous = false; - - if (expremd.postConst != null) { - continuous = (boolean)expremd.postConst; - - if (!continuous) { - throw new IllegalArgumentException(Metadata.error(ctx) + "The loop will never be executed."); - } - - if (ctx.empty() != null) { - throw new IllegalArgumentException(Metadata.error(ctx) + "The loop will never exit."); - } - } - - final BlockContext blockctx = ctx.block(); - - if (blockctx != null) { - final Metadata.StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); - blocksmd.beginLoop = true; - blocksmd.inLoop = true; - visit(blockctx); - - if (blocksmd.loopEscape && !blocksmd.anyContinue) { - throw new IllegalArgumentException(Metadata.error(ctx) + "All paths escape so the loop is not necessary."); - } - - if (continuous && !blocksmd.anyBreak) { - whilesmd.methodEscape = true; - whilesmd.allLast = true; - } - } - - whilesmd.count = 1; - - decrementScope(); + statement.processWhile(ctx); return null; } @Override public Void visitDo(final DoContext ctx) { - final Metadata.StatementMetadata dosmd = metadata.getStatementMetadata(ctx); - - incrementScope(); - - final BlockContext blockctx = ctx.block(); - final Metadata.StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); - blocksmd.beginLoop = true; - blocksmd.inLoop = true; - visit(blockctx); - - if (blocksmd.loopEscape && !blocksmd.anyContinue) { - throw new IllegalArgumentException(Metadata.error(ctx) + "All paths escape so the loop is not necessary."); - } - - final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); - expremd.to = definition.booleanType; - visit(exprctx); - markCast(expremd); - - if (expremd.postConst != null) { - final boolean continuous = (boolean)expremd.postConst; - - if (!continuous) { - throw new IllegalArgumentException(Metadata.error(ctx) + "All paths escape so the loop is not necessary."); - } - - if (!blocksmd.anyBreak) { - dosmd.methodEscape = true; - dosmd.allLast = true; - } - } - - dosmd.count = 1; - - decrementScope(); + statement.processDo(ctx); return null; } @Override public Void visitFor(final ForContext ctx) { - final Metadata.StatementMetadata forsmd = metadata.getStatementMetadata(ctx); - boolean continuous = false; - - incrementScope(); - - final InitializerContext initctx = ctx.initializer(); - - if (initctx != null) { - metadata.createStatementMetadata(initctx); - visit(initctx); - } - - final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - - if (exprctx != null) { - final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); - expremd.to = definition.booleanType; - visit(exprctx); - markCast(expremd); - - if (expremd.postConst != null) { - continuous = (boolean)expremd.postConst; - - if (!continuous) { - throw new IllegalArgumentException(Metadata.error(ctx) + "The loop will never be executed."); - } - - if (ctx.empty() != null) { - throw new IllegalArgumentException(Metadata.error(ctx) + "The loop is continuous."); - } - } - } else { - continuous = true; - } - - final AfterthoughtContext atctx = ctx.afterthought(); - - if (atctx != null) { - metadata.createStatementMetadata(atctx); - visit(atctx); - } - - final BlockContext blockctx = ctx.block(); - - if (blockctx != null) { - final Metadata.StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); - blocksmd.beginLoop = true; - blocksmd.inLoop = true; - visit(blockctx); - - if (blocksmd.loopEscape && !blocksmd.anyContinue) { - throw new IllegalArgumentException(Metadata.error(ctx) + "All paths escape so the loop is not necessary."); - } - - if (continuous && !blocksmd.anyBreak) { - forsmd.methodEscape = true; - forsmd.allLast = true; - } - } - - forsmd.count = 1; - - decrementScope(); + statement.processFor(ctx); return null; } @Override public Void visitDecl(final DeclContext ctx) { - final Metadata.StatementMetadata declsmd = metadata.getStatementMetadata(ctx); - - final DeclarationContext declctx = ctx.declaration(); - metadata.createStatementMetadata(declctx); - visit(declctx); - - declsmd.count = 1; + statement.processDecl(ctx); return null; } @Override public Void visitContinue(final ContinueContext ctx) { - final Metadata.StatementMetadata continuesmd = metadata.getStatementMetadata(ctx); - - if (!continuesmd.inLoop) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Cannot have a continue statement outside of a loop."); - } - - if (continuesmd.lastLoop) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Unnessary continue statement at the end of a loop."); - } - - continuesmd.allLast = true; - continuesmd.anyContinue = true; - - continuesmd.count = 1; + statement.processContinue(ctx); return null; } @Override public Void visitBreak(final BreakContext ctx) { - final Metadata.StatementMetadata breaksmd = metadata.getStatementMetadata(ctx); - - if (!breaksmd.inLoop) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Cannot have a break statement outside of a loop."); - } - - breaksmd.loopEscape = true; - breaksmd.allLast = true; - breaksmd.anyBreak = true; - - breaksmd.count = 1; + statement.processBreak(ctx); return null; } @Override public Void visitReturn(final ReturnContext ctx) { - final Metadata.StatementMetadata returnsmd = metadata.getStatementMetadata(ctx); - - final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); - expremd.to = definition.objectType; - visit(exprctx); - markCast(expremd); - - returnsmd.methodEscape = true; - returnsmd.loopEscape = true; - returnsmd.allLast = true; - - returnsmd.count = 1; + statement.processReturn(ctx); return null; } @Override public Void visitTry(final TryContext ctx) { - final Metadata.StatementMetadata trysmd = metadata.getStatementMetadata(ctx); - - final BlockContext blockctx = ctx.block(); - final Metadata.StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); - blocksmd.lastSource = trysmd.lastSource; - blocksmd.inLoop = trysmd.inLoop; - blocksmd.lastLoop = trysmd.lastLoop; - incrementScope(); - visit(blockctx); - decrementScope(); - - trysmd.methodEscape = blocksmd.methodEscape; - trysmd.loopEscape = blocksmd.loopEscape; - trysmd.allLast = blocksmd.allLast; - trysmd.anyContinue = blocksmd.anyContinue; - trysmd.anyBreak = blocksmd.anyBreak; - - int trapcount = 0; - - for (final TrapContext trapctx : ctx.trap()) { - final Metadata.StatementMetadata trapsmd = metadata.createStatementMetadata(trapctx); - trapsmd.lastSource = trysmd.lastSource; - trapsmd.inLoop = trysmd.inLoop; - trapsmd.lastLoop = trysmd.lastLoop; - incrementScope(); - visit(trapctx); - decrementScope(); - - trysmd.methodEscape &= trapsmd.methodEscape; - trysmd.loopEscape &= trapsmd.loopEscape; - trysmd.allLast &= trapsmd.allLast; - trysmd.anyContinue |= trapsmd.anyContinue; - trysmd.anyBreak |= trapsmd.anyBreak; - - trapcount = Math.max(trapcount, trapsmd.count); - } - - trysmd.count = blocksmd.count + trapcount; + statement.processTry(ctx); return null; } @Override public Void visitThrow(final ThrowContext ctx) { - final Metadata.StatementMetadata throwsmd = metadata.getStatementMetadata(ctx); - - final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); - expremd.to = definition.exceptionType; - visit(exprctx); - markCast(expremd); - - throwsmd.methodEscape = true; - throwsmd.loopEscape = true; - throwsmd.allLast = true; - - throwsmd.count = 1; + statement.processThrow(ctx); return null; } @Override public Void visitExpr(final ExprContext ctx) { - final Metadata.StatementMetadata exprsmd = metadata.getStatementMetadata(ctx); - final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); - expremd.read = exprsmd.lastSource; - visit(exprctx); - - if (!expremd.statement && !exprsmd.lastSource) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Not a statement."); - } - - final boolean rtn = exprsmd.lastSource && expremd.from.sort != Sort.VOID; - exprsmd.methodEscape = rtn; - exprsmd.loopEscape = rtn; - exprsmd.allLast = rtn; - expremd.to = rtn ? definition.objectType : expremd.from; - markCast(expremd); - - exprsmd.count = 1; + statement.processExpr(ctx); return null; } @Override public Void visitMultiple(final MultipleContext ctx) { - final Metadata.StatementMetadata multiplesmd = metadata.getStatementMetadata(ctx); - final List statectxs = ctx.statement(); - final StatementContext lastctx = statectxs.get(statectxs.size() - 1); - - for (StatementContext statectx : statectxs) { - if (multiplesmd.allLast) { - throw new IllegalArgumentException(Metadata.error(statectx) + - "Statement will never be executed because all prior paths escape."); - } - - final Metadata.StatementMetadata statesmd = metadata.createStatementMetadata(statectx); - statesmd.lastSource = multiplesmd.lastSource && statectx == lastctx; - statesmd.inLoop = multiplesmd.inLoop; - statesmd.lastLoop = (multiplesmd.beginLoop || multiplesmd.lastLoop) && statectx == lastctx; - visit(statectx); - - multiplesmd.methodEscape = statesmd.methodEscape; - multiplesmd.loopEscape = statesmd.loopEscape; - multiplesmd.allLast = statesmd.allLast; - multiplesmd.anyContinue |= statesmd.anyContinue; - multiplesmd.anyBreak |= statesmd.anyBreak; - - multiplesmd.count += statesmd.count; - } + statement.processMultiple(ctx); return null; } @Override public Void visitSingle(final SingleContext ctx) { - final Metadata.StatementMetadata singlesmd = metadata.getStatementMetadata(ctx); - - final StatementContext statectx = ctx.statement(); - final Metadata.StatementMetadata statesmd = metadata.createStatementMetadata(statectx); - statesmd.lastSource = singlesmd.lastSource; - statesmd.inLoop = singlesmd.inLoop; - statesmd.lastLoop = singlesmd.beginLoop || singlesmd.lastLoop; - visit(statectx); - - singlesmd.methodEscape = statesmd.methodEscape; - singlesmd.loopEscape = statesmd.loopEscape; - singlesmd.allLast = statesmd.allLast; - singlesmd.anyContinue = statesmd.anyContinue; - singlesmd.anyBreak = statesmd.anyBreak; - - singlesmd.count = statesmd.count; + statement.processSingle(ctx); return null; } @Override public Void visitEmpty(final EmptyContext ctx) { - throw new UnsupportedOperationException(Metadata.error(ctx) + "Unexpected parser state."); + throw new UnsupportedOperationException(AnalyzerUtility.error(ctx) + "Unexpected state."); } @Override - public Void visitInitializer(InitializerContext ctx) { - final DeclarationContext declctx = ctx.declaration(); - final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + public Void visitEmptyscope(final EmptyscopeContext ctx) { + throw new UnsupportedOperationException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } - if (declctx != null) { - metadata.createStatementMetadata(declctx); - visit(declctx); - } else if (exprctx != null) { - final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); - expremd.read = false; - visit(exprctx); - - expremd.to = expremd.from; - markCast(expremd); - - if (!expremd.statement) { - throw new IllegalArgumentException(Metadata.error(exprctx) + - "The intializer of a for loop must be a statement."); - } - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } + @Override + public Void visitInitializer(final InitializerContext ctx) { + statement.processInitializer(ctx); return null; } @Override - public Void visitAfterthought(AfterthoughtContext ctx) { - final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - - if (exprctx != null) { - final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); - expremd.read = false; - visit(exprctx); - - expremd.to = expremd.from; - markCast(expremd); - - if (!expremd.statement) { - throw new IllegalArgumentException(Metadata.error(exprctx) + - "The afterthought of a for loop must be a statement."); - } - } + public Void visitAfterthought(final AfterthoughtContext ctx) { + statement.processAfterthought(ctx); return null; } @Override public Void visitDeclaration(final DeclarationContext ctx) { - final DecltypeContext decltypectx = ctx.decltype(); - final Metadata.ExpressionMetadata decltypeemd = metadata.createExpressionMetadata(decltypectx); - visit(decltypectx); - - for (final DeclvarContext declvarctx : ctx.declvar()) { - final Metadata.ExpressionMetadata declvaremd = metadata.createExpressionMetadata(declvarctx); - declvaremd.to = decltypeemd.from; - visit(declvarctx); - } + statement.processDeclaration(ctx); return null; } @Override public Void visitDecltype(final DecltypeContext ctx) { - final Metadata.ExpressionMetadata decltypeemd = metadata.getExpressionMetadata(ctx); - - final String name = ctx.getText(); - decltypeemd.from = definition.getType(name); + statement.processDecltype(ctx); return null; } @Override public Void visitDeclvar(final DeclvarContext ctx) { - final Metadata.ExpressionMetadata declvaremd = metadata.getExpressionMetadata(ctx); - - final String name = ctx.ID().getText(); - declvaremd.postConst = addVariable(ctx, name, declvaremd.to).slot; - - final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - - if (exprctx != null) { - final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); - expremd.to = declvaremd.to; - visit(exprctx); - markCast(expremd); - } + statement.processDeclvar(ctx); return null; } @Override public Void visitTrap(final TrapContext ctx) { - final Metadata.StatementMetadata trapsmd = metadata.getStatementMetadata(ctx); - - final String type = ctx.TYPE().getText(); - trapsmd.exception = definition.getType(type); - - try { - trapsmd.exception.clazz.asSubclass(Exception.class); - } catch (final ClassCastException exception) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Invalid exception type [" + trapsmd.exception.name + "]."); - } - - final String id = ctx.ID().getText(); - trapsmd.slot = addVariable(ctx, id, trapsmd.exception).slot; - - final BlockContext blockctx = ctx.block(); - - if (blockctx != null) { - final Metadata.StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); - blocksmd.lastSource = trapsmd.lastSource; - blocksmd.inLoop = trapsmd.inLoop; - blocksmd.lastLoop = trapsmd.lastLoop; - visit(blockctx); - - trapsmd.methodEscape = blocksmd.methodEscape; - trapsmd.loopEscape = blocksmd.loopEscape; - trapsmd.allLast = blocksmd.allLast; - trapsmd.anyContinue = blocksmd.anyContinue; - trapsmd.anyBreak = blocksmd.anyBreak; - } else if (ctx.emptyscope() == null) { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } + statement.processTrap(ctx); return null; } @Override public Void visitPrecedence(final PrecedenceContext ctx) { - throw new UnsupportedOperationException(Metadata.error(ctx) + "Unexpected parser state."); + throw new UnsupportedOperationException(AnalyzerUtility.error(ctx) + "Unexpected state."); } @Override public Void visitNumeric(final NumericContext ctx) { - final Metadata.ExpressionMetadata numericemd = metadata.getExpressionMetadata(ctx); - final boolean negate = ctx.parent instanceof UnaryContext && ((UnaryContext)ctx.parent).SUB() != null; - - if (ctx.DECIMAL() != null) { - final String svalue = (negate ? "-" : "") + ctx.DECIMAL().getText(); - - if (svalue.endsWith("f") || svalue.endsWith("F")) { - try { - numericemd.from = definition.floatType; - numericemd.preConst = Float.parseFloat(svalue.substring(0, svalue.length() - 1)); - } catch (NumberFormatException exception) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Invalid float constant [" + svalue + "]."); - } - } else { - try { - numericemd.from = definition.doubleType; - numericemd.preConst = Double.parseDouble(svalue); - } catch (NumberFormatException exception) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Invalid double constant [" + svalue + "]."); - } - } - } else { - String svalue = negate ? "-" : ""; - int radix; - - if (ctx.OCTAL() != null) { - svalue += ctx.OCTAL().getText(); - radix = 8; - } else if (ctx.INTEGER() != null) { - svalue += ctx.INTEGER().getText(); - radix = 10; - } else if (ctx.HEX() != null) { - svalue += ctx.HEX().getText(); - radix = 16; - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - - if (svalue.endsWith("d") || svalue.endsWith("D")) { - try { - numericemd.from = definition.doubleType; - numericemd.preConst = Double.parseDouble(svalue.substring(0, svalue.length() - 1)); - } catch (NumberFormatException exception) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Invalid float constant [" + svalue + "]."); - } - } else if (svalue.endsWith("f") || svalue.endsWith("F")) { - try { - numericemd.from = definition.floatType; - numericemd.preConst = Float.parseFloat(svalue.substring(0, svalue.length() - 1)); - } catch (NumberFormatException exception) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Invalid float constant [" + svalue + "]."); - } - } else if (svalue.endsWith("l") || svalue.endsWith("L")) { - try { - numericemd.from = definition.longType; - numericemd.preConst = Long.parseLong(svalue.substring(0, svalue.length() - 1), radix); - } catch (NumberFormatException exception) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Invalid long constant [" + svalue + "]."); - } - } else { - try { - final Type type = numericemd.to; - final Sort sort = type == null ? Sort.INT : type.sort; - final int value = Integer.parseInt(svalue, radix); - - if (sort == Sort.BYTE && value >= Byte.MIN_VALUE && value <= Byte.MAX_VALUE) { - numericemd.from = definition.byteType; - numericemd.preConst = (byte)value; - } else if (sort == Sort.CHAR && value >= Character.MIN_VALUE && value <= Character.MAX_VALUE) { - numericemd.from = definition.charType; - numericemd.preConst = (char)value; - } else if (sort == Sort.SHORT && value >= Short.MIN_VALUE && value <= Short.MAX_VALUE) { - numericemd.from = definition.shortType; - numericemd.preConst = (short)value; - } else { - numericemd.from = definition.intType; - numericemd.preConst = value; - } - } catch (NumberFormatException exception) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Invalid int constant [" + svalue + "]."); - } - } - } + expression.processNumeric(ctx); return null; } @Override public Void visitChar(final CharContext ctx) { - final Metadata.ExpressionMetadata charemd = metadata.getExpressionMetadata(ctx); - - if (ctx.CHAR() == null) { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - - charemd.preConst = ctx.CHAR().getText().charAt(0); - charemd.from = definition.charType; + expression.processChar(ctx); return null; } @Override public Void visitTrue(final TrueContext ctx) { - final Metadata.ExpressionMetadata trueemd = metadata.getExpressionMetadata(ctx); - - if (ctx.TRUE() == null) { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - - trueemd.preConst = true; - trueemd.from = definition.booleanType; + expression.processTrue(ctx); return null; } @Override public Void visitFalse(final FalseContext ctx) { - final Metadata.ExpressionMetadata falseemd = metadata.getExpressionMetadata(ctx); - - if (ctx.FALSE() == null) { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - - falseemd.preConst = false; - falseemd.from = definition.booleanType; + expression.processFalse(ctx); return null; } @Override public Void visitNull(final NullContext ctx) { - final Metadata.ExpressionMetadata nullemd = metadata.getExpressionMetadata(ctx); - - if (ctx.NULL() == null) { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - - nullemd.isNull = true; - - if (nullemd.to != null) { - if (nullemd.to.sort.primitive) { - throw new IllegalArgumentException("Cannot cast null to a primitive type [" + nullemd.to.name + "]."); - } - - nullemd.from = nullemd.to; - } else { - nullemd.from = definition.objectType; - } + expression.processNull(ctx); return null; } @Override public Void visitExternal(final ExternalContext ctx) { - final Metadata.ExpressionMetadata extemd = metadata.getExpressionMetadata(ctx); - - final ExtstartContext extstartctx = ctx.extstart(); - final Metadata.ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); - extstartemd.read = extemd.read; - visit(extstartctx); - - extemd.statement = extstartemd.statement; - extemd.preConst = extstartemd.constant; - extemd.from = extstartemd.current; - extemd.typesafe = extstartemd.current.sort != Sort.DEF; + expression.processExternal(ctx); return null; } @Override public Void visitPostinc(final PostincContext ctx) { - final Metadata.ExpressionMetadata postincemd = metadata.getExpressionMetadata(ctx); - - final ExtstartContext extstartctx = ctx.extstart(); - final Metadata.ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); - extstartemd.read = postincemd.read; - extstartemd.storeExpr = ctx.increment(); - extstartemd.token = ADD; - extstartemd.post = true; - visit(extstartctx); - - postincemd.statement = true; - postincemd.from = extstartemd.read ? extstartemd.current : definition.voidType; - postincemd.typesafe = extstartemd.current.sort != Sort.DEF; + expression.processPostinc(ctx); return null; } @Override public Void visitPreinc(final PreincContext ctx) { - final Metadata.ExpressionMetadata preincemd = metadata.getExpressionMetadata(ctx); - - final ExtstartContext extstartctx = ctx.extstart(); - final Metadata.ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); - extstartemd.read = preincemd.read; - extstartemd.storeExpr = ctx.increment(); - extstartemd.token = ADD; - extstartemd.pre = true; - visit(extstartctx); - - preincemd.statement = true; - preincemd.from = extstartemd.read ? extstartemd.current : definition.voidType; - preincemd.typesafe = extstartemd.current.sort != Sort.DEF; + expression.processPreinc(ctx); return null; } @Override public Void visitUnary(final UnaryContext ctx) { - final Metadata.ExpressionMetadata unaryemd = metadata.getExpressionMetadata(ctx); - - final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); - - if (ctx.BOOLNOT() != null) { - expremd.to = definition.booleanType; - visit(exprctx); - markCast(expremd); - - if (expremd.postConst != null) { - unaryemd.preConst = !(boolean)expremd.postConst; - } - - unaryemd.from = definition.booleanType; - } else if (ctx.BWNOT() != null || ctx.ADD() != null || ctx.SUB() != null) { - visit(exprctx); - - final Type promote = promoteNumeric(expremd.from, ctx.BWNOT() == null, true); - - if (promote == null) { - throw new ClassCastException("Cannot apply [" + ctx.getChild(0).getText() + "] " + - "operation to type [" + expremd.from.name + "]."); - } - - expremd.to = promote; - markCast(expremd); - - if (expremd.postConst != null) { - final Sort sort = promote.sort; - - if (ctx.BWNOT() != null) { - if (sort == Sort.INT) { - unaryemd.preConst = ~(int)expremd.postConst; - } else if (sort == Sort.LONG) { - unaryemd.preConst = ~(long)expremd.postConst; - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } else if (ctx.SUB() != null) { - if (exprctx instanceof NumericContext) { - unaryemd.preConst = expremd.postConst; - } else { - if (sort == Sort.INT) { - if (settings.getNumericOverflow()) { - unaryemd.preConst = -(int)expremd.postConst; - } else { - unaryemd.preConst = Math.negateExact((int)expremd.postConst); - } - } else if (sort == Sort.LONG) { - if (settings.getNumericOverflow()) { - unaryemd.preConst = -(long)expremd.postConst; - } else { - unaryemd.preConst = Math.negateExact((long)expremd.postConst); - } - } else if (sort == Sort.FLOAT) { - unaryemd.preConst = -(float)expremd.postConst; - } else if (sort == Sort.DOUBLE) { - unaryemd.preConst = -(double)expremd.postConst; - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } - } else if (ctx.ADD() != null) { - if (sort == Sort.INT) { - unaryemd.preConst = +(int)expremd.postConst; - } else if (sort == Sort.LONG) { - unaryemd.preConst = +(long)expremd.postConst; - } else if (sort == Sort.FLOAT) { - unaryemd.preConst = +(float)expremd.postConst; - } else if (sort == Sort.DOUBLE) { - unaryemd.preConst = +(double)expremd.postConst; - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } - - unaryemd.from = promote; - unaryemd.typesafe = expremd.typesafe; - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } + expression.processUnary(ctx); return null; } @Override public Void visitCast(final CastContext ctx) { - final Metadata.ExpressionMetadata castemd = metadata.getExpressionMetadata(ctx); - - final DecltypeContext decltypectx = ctx.decltype(); - final Metadata.ExpressionMetadata decltypemd = metadata.createExpressionMetadata(decltypectx); - visit(decltypectx); - - final Type type = decltypemd.from; - castemd.from = type; - - final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); - expremd.to = type; - expremd.explicit = true; - visit(exprctx); - markCast(expremd); - - if (expremd.postConst != null) { - castemd.preConst = expremd.postConst; - } - - castemd.typesafe = expremd.typesafe && castemd.from.sort != Sort.DEF; + expression.processCast(ctx); return null; } @Override public Void visitBinary(final BinaryContext ctx) { - final Metadata.ExpressionMetadata binaryemd = metadata.getExpressionMetadata(ctx); - - final ExpressionContext exprctx0 = metadata.updateExpressionTree(ctx.expression(0)); - final Metadata.ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); - visit(exprctx0); - - final ExpressionContext exprctx1 = metadata.updateExpressionTree(ctx.expression(1)); - final Metadata.ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); - visit(exprctx1); - - final boolean decimal = ctx.MUL() != null || ctx.DIV() != null || ctx.REM() != null || ctx.SUB() != null; - final boolean add = ctx.ADD() != null; - final boolean xor = ctx.BWXOR() != null; - final Type promote = add ? promoteAdd(expremd0.from, expremd1.from) : - xor ? promoteXor(expremd0.from, expremd1.from) : - promoteNumeric(expremd0.from, expremd1.from, decimal, true); - - if (promote == null) { - throw new ClassCastException("Cannot apply [" + ctx.getChild(1).getText() + "] " + - "operation to types [" + expremd0.from.name + "] and [" + expremd1.from.name + "]."); - } - - final Sort sort = promote.sort; - expremd0.to = add && sort == Sort.STRING ? expremd0.from : promote; - expremd1.to = add && sort == Sort.STRING ? expremd1.from : promote; - markCast(expremd0); - markCast(expremd1); - - if (expremd0.postConst != null && expremd1.postConst != null) { - if (ctx.MUL() != null) { - if (sort == Sort.INT) { - if (settings.getNumericOverflow()) { - binaryemd.preConst = (int)expremd0.postConst * (int)expremd1.postConst; - } else { - binaryemd.preConst = Math.multiplyExact((int)expremd0.postConst, (int)expremd1.postConst); - } - } else if (sort == Sort.LONG) { - if (settings.getNumericOverflow()) { - binaryemd.preConst = (long)expremd0.postConst * (long)expremd1.postConst; - } else { - binaryemd.preConst = Math.multiplyExact((long)expremd0.postConst, (long)expremd1.postConst); - } - } else if (sort == Sort.FLOAT) { - if (settings.getNumericOverflow()) { - binaryemd.preConst = (float)expremd0.postConst * (float)expremd1.postConst; - } else { - binaryemd.preConst = Utility.multiplyWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst); - } - } else if (sort == Sort.DOUBLE) { - if (settings.getNumericOverflow()) { - binaryemd.preConst = (double)expremd0.postConst * (double)expremd1.postConst; - } else { - binaryemd.preConst = Utility.multiplyWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); - } - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } else if (ctx.DIV() != null) { - if (sort == Sort.INT) { - if (settings.getNumericOverflow()) { - binaryemd.preConst = (int)expremd0.postConst / (int)expremd1.postConst; - } else { - binaryemd.preConst = Utility.divideWithoutOverflow((int)expremd0.postConst, (int)expremd1.postConst); - } - } else if (sort == Sort.LONG) { - if (settings.getNumericOverflow()) { - binaryemd.preConst = (long)expremd0.postConst / (long)expremd1.postConst; - } else { - binaryemd.preConst = Utility.divideWithoutOverflow((long)expremd0.postConst, (long)expremd1.postConst); - } - } else if (sort == Sort.FLOAT) { - if (settings.getNumericOverflow()) { - binaryemd.preConst = (float)expremd0.postConst / (float)expremd1.postConst; - } else { - binaryemd.preConst = Utility.divideWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst); - } - } else if (sort == Sort.DOUBLE) { - if (settings.getNumericOverflow()) { - binaryemd.preConst = (double)expremd0.postConst / (double)expremd1.postConst; - } else { - binaryemd.preConst = Utility.divideWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); - } - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } else if (ctx.REM() != null) { - if (sort == Sort.INT) { - binaryemd.preConst = (int)expremd0.postConst % (int)expremd1.postConst; - } else if (sort == Sort.LONG) { - binaryemd.preConst = (long)expremd0.postConst % (long)expremd1.postConst; - } else if (sort == Sort.FLOAT) { - if (settings.getNumericOverflow()) { - binaryemd.preConst = (float)expremd0.postConst % (float)expremd1.postConst; - } else { - binaryemd.preConst = Utility.remainderWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst); - } - } else if (sort == Sort.DOUBLE) { - if (settings.getNumericOverflow()) { - binaryemd.preConst = (double)expremd0.postConst % (double)expremd1.postConst; - } else { - binaryemd.preConst = Utility.remainderWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); - } - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } else if (ctx.ADD() != null) { - if (sort == Sort.INT) { - if (settings.getNumericOverflow()) { - binaryemd.preConst = (int)expremd0.postConst + (int)expremd1.postConst; - } else { - binaryemd.preConst = Math.addExact((int)expremd0.postConst, (int)expremd1.postConst); - } - } else if (sort == Sort.LONG) { - if (settings.getNumericOverflow()) { - binaryemd.preConst = (long)expremd0.postConst + (long)expremd1.postConst; - } else { - binaryemd.preConst = Math.addExact((long)expremd0.postConst, (long)expremd1.postConst); - } - } else if (sort == Sort.FLOAT) { - if (settings.getNumericOverflow()) { - binaryemd.preConst = (float)expremd0.postConst + (float)expremd1.postConst; - } else { - binaryemd.preConst = Utility.addWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst); - } - } else if (sort == Sort.DOUBLE) { - if (settings.getNumericOverflow()) { - binaryemd.preConst = (double)expremd0.postConst + (double)expremd1.postConst; - } else { - binaryemd.preConst = Utility.addWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); - } - } else if (sort == Sort.STRING) { - binaryemd.preConst = "" + expremd0.postConst + expremd1.postConst; - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } else if (ctx.SUB() != null) { - if (sort == Sort.INT) { - if (settings.getNumericOverflow()) { - binaryemd.preConst = (int)expremd0.postConst - (int)expremd1.postConst; - } else { - binaryemd.preConst = Math.subtractExact((int)expremd0.postConst, (int)expremd1.postConst); - } - } else if (sort == Sort.LONG) { - if (settings.getNumericOverflow()) { - binaryemd.preConst = (long)expremd0.postConst - (long)expremd1.postConst; - } else { - binaryemd.preConst = Math.subtractExact((long)expremd0.postConst, (long)expremd1.postConst); - } - } else if (sort == Sort.FLOAT) { - if (settings.getNumericOverflow()) { - binaryemd.preConst = (float)expremd0.postConst - (float)expremd1.postConst; - } else { - binaryemd.preConst = Utility.subtractWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst); - } - } else if (sort == Sort.DOUBLE) { - if (settings.getNumericOverflow()) { - binaryemd.preConst = (double)expremd0.postConst - (double)expremd1.postConst; - } else { - binaryemd.preConst = Utility.subtractWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); - } - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } else if (ctx.LSH() != null) { - if (sort == Sort.INT) { - binaryemd.preConst = (int)expremd0.postConst << (int)expremd1.postConst; - } else if (sort == Sort.LONG) { - binaryemd.preConst = (long)expremd0.postConst << (long)expremd1.postConst; - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } else if (ctx.RSH() != null) { - if (sort == Sort.INT) { - binaryemd.preConst = (int)expremd0.postConst >> (int)expremd1.postConst; - } else if (sort == Sort.LONG) { - binaryemd.preConst = (long)expremd0.postConst >> (long)expremd1.postConst; - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } else if (ctx.USH() != null) { - if (sort == Sort.INT) { - binaryemd.preConst = (int)expremd0.postConst >>> (int)expremd1.postConst; - } else if (sort == Sort.LONG) { - binaryemd.preConst = (long)expremd0.postConst >>> (long)expremd1.postConst; - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } else if (ctx.BWAND() != null) { - if (sort == Sort.INT) { - binaryemd.preConst = (int)expremd0.postConst & (int)expremd1.postConst; - } else if (sort == Sort.LONG) { - binaryemd.preConst = (long)expremd0.postConst & (long)expremd1.postConst; - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } else if (ctx.BWXOR() != null) { - if (sort == Sort.BOOL) { - binaryemd.preConst = (boolean)expremd0.postConst ^ (boolean)expremd1.postConst; - } else if (sort == Sort.INT) { - binaryemd.preConst = (int)expremd0.postConst ^ (int)expremd1.postConst; - } else if (sort == Sort.LONG) { - binaryemd.preConst = (long)expremd0.postConst ^ (long)expremd1.postConst; - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } else if (ctx.BWOR() != null) { - if (sort == Sort.INT) { - binaryemd.preConst = (int)expremd0.postConst | (int)expremd1.postConst; - } else if (sort == Sort.LONG) { - binaryemd.preConst = (long)expremd0.postConst | (long)expremd1.postConst; - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } - - binaryemd.from = promote; - binaryemd.typesafe = expremd0.typesafe && expremd1.typesafe; + expression.processBinary(ctx); return null; } @Override public Void visitComp(final CompContext ctx) { - final Metadata.ExpressionMetadata compemd = metadata.getExpressionMetadata(ctx); - final boolean equality = ctx.EQ() != null || ctx.NE() != null; - final boolean reference = ctx.EQR() != null || ctx.NER() != null; - - final ExpressionContext exprctx0 = metadata.updateExpressionTree(ctx.expression(0)); - final Metadata.ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); - visit(exprctx0); - - final ExpressionContext exprctx1 = metadata.updateExpressionTree(ctx.expression(1)); - final Metadata.ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); - visit(exprctx1); - - if (expremd0.isNull && expremd1.isNull) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Unnecessary comparison of null constants."); - } - - final Type promote = equality ? promoteEquality(expremd0.from, expremd1.from) : - reference ? promoteReference(expremd0.from, expremd1.from) : - promoteNumeric(expremd0.from, expremd1.from, true, true); - - if (promote == null) { - throw new ClassCastException("Cannot apply [" + ctx.getChild(1).getText() + "] " + - "operation to types [" + expremd0.from.name + "] and [" + expremd1.from.name + "]."); - } - - expremd0.to = promote; - expremd1.to = promote; - markCast(expremd0); - markCast(expremd1); - - if (expremd0.postConst != null && expremd1.postConst != null) { - final Sort sort = promote.sort; - - if (ctx.EQ() != null || ctx.EQR() != null) { - if (sort == Sort.BOOL) { - compemd.preConst = (boolean)expremd0.postConst == (boolean)expremd1.postConst; - } else if (sort == Sort.INT) { - compemd.preConst = (int)expremd0.postConst == (int)expremd1.postConst; - } else if (sort == Sort.LONG) { - compemd.preConst = (long)expremd0.postConst == (long)expremd1.postConst; - } else if (sort == Sort.FLOAT) { - compemd.preConst = (float)expremd0.postConst == (float)expremd1.postConst; - } else if (sort == Sort.DOUBLE) { - compemd.preConst = (double)expremd0.postConst == (double)expremd1.postConst; - } else { - if (ctx.EQ() != null && !expremd0.isNull && !expremd1.isNull) { - compemd.preConst = expremd0.postConst.equals(expremd1.postConst); - } else if (ctx.EQR() != null) { - compemd.preConst = expremd0.postConst == expremd1.postConst; - } - } - } else if (ctx.NE() != null || ctx.NER() != null) { - if (sort == Sort.BOOL) { - compemd.preConst = (boolean)expremd0.postConst != (boolean)expremd1.postConst; - } else if (sort == Sort.INT) { - compemd.preConst = (int)expremd0.postConst != (int)expremd1.postConst; - } else if (sort == Sort.LONG) { - compemd.preConst = (long)expremd0.postConst != (long)expremd1.postConst; - } else if (sort == Sort.FLOAT) { - compemd.preConst = (float)expremd0.postConst != (float)expremd1.postConst; - } else if (sort == Sort.DOUBLE) { - compemd.preConst = (double)expremd0.postConst != (double)expremd1.postConst; - } else { - if (ctx.NE() != null && !expremd0.isNull && !expremd1.isNull) { - compemd.preConst = expremd0.postConst.equals(expremd1.postConst); - } else if (ctx.NER() != null) { - compemd.preConst = expremd0.postConst == expremd1.postConst; - } - } - } else if (ctx.GTE() != null) { - if (sort == Sort.INT) { - compemd.preConst = (int)expremd0.postConst >= (int)expremd1.postConst; - } else if (sort == Sort.LONG) { - compemd.preConst = (long)expremd0.postConst >= (long)expremd1.postConst; - } else if (sort == Sort.FLOAT) { - compemd.preConst = (float)expremd0.postConst >= (float)expremd1.postConst; - } else if (sort == Sort.DOUBLE) { - compemd.preConst = (double)expremd0.postConst >= (double)expremd1.postConst; - } - } else if (ctx.GT() != null) { - if (sort == Sort.INT) { - compemd.preConst = (int)expremd0.postConst > (int)expremd1.postConst; - } else if (sort == Sort.LONG) { - compemd.preConst = (long)expremd0.postConst > (long)expremd1.postConst; - } else if (sort == Sort.FLOAT) { - compemd.preConst = (float)expremd0.postConst > (float)expremd1.postConst; - } else if (sort == Sort.DOUBLE) { - compemd.preConst = (double)expremd0.postConst > (double)expremd1.postConst; - } - } else if (ctx.LTE() != null) { - if (sort == Sort.INT) { - compemd.preConst = (int)expremd0.postConst <= (int)expremd1.postConst; - } else if (sort == Sort.LONG) { - compemd.preConst = (long)expremd0.postConst <= (long)expremd1.postConst; - } else if (sort == Sort.FLOAT) { - compemd.preConst = (float)expremd0.postConst <= (float)expremd1.postConst; - } else if (sort == Sort.DOUBLE) { - compemd.preConst = (double)expremd0.postConst <= (double)expremd1.postConst; - } - } else if (ctx.LT() != null) { - if (sort == Sort.INT) { - compemd.preConst = (int)expremd0.postConst < (int)expremd1.postConst; - } else if (sort == Sort.LONG) { - compemd.preConst = (long)expremd0.postConst < (long)expremd1.postConst; - } else if (sort == Sort.FLOAT) { - compemd.preConst = (float)expremd0.postConst < (float)expremd1.postConst; - } else if (sort == Sort.DOUBLE) { - compemd.preConst = (double)expremd0.postConst < (double)expremd1.postConst; - } - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } - - compemd.from = definition.booleanType; - compemd.typesafe = expremd0.typesafe && expremd1.typesafe; + expression.processComp(ctx); return null; } @Override public Void visitBool(final BoolContext ctx) { - final Metadata.ExpressionMetadata boolemd = metadata.getExpressionMetadata(ctx); - - final ExpressionContext exprctx0 = metadata.updateExpressionTree(ctx.expression(0)); - final Metadata.ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); - expremd0.to = definition.booleanType; - visit(exprctx0); - markCast(expremd0); - - final ExpressionContext exprctx1 = metadata.updateExpressionTree(ctx.expression(1)); - final Metadata.ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); - expremd1.to = definition.booleanType; - visit(exprctx1); - markCast(expremd1); - - if (expremd0.postConst != null && expremd1.postConst != null) { - if (ctx.BOOLAND() != null) { - boolemd.preConst = (boolean)expremd0.postConst && (boolean)expremd1.postConst; - } else if (ctx.BOOLOR() != null) { - boolemd.preConst = (boolean)expremd0.postConst || (boolean)expremd1.postConst; - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } - - boolemd.from = definition.booleanType; - boolemd.typesafe = expremd0.typesafe && expremd1.typesafe; + expression.processBool(ctx); return null; } @Override public Void visitConditional(final ConditionalContext ctx) { - final Metadata.ExpressionMetadata condemd = metadata.getExpressionMetadata(ctx); - - final ExpressionContext exprctx0 = metadata.updateExpressionTree(ctx.expression(0)); - final Metadata.ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); - expremd0.to = definition.booleanType; - visit(exprctx0); - markCast(expremd0); - - if (expremd0.postConst != null) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Unnecessary conditional statement."); - } - - final ExpressionContext exprctx1 = metadata.updateExpressionTree(ctx.expression(1)); - final Metadata.ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); - expremd1.to = condemd.to; - expremd1.explicit = condemd.explicit; - visit(exprctx1); - - final ExpressionContext exprctx2 = metadata.updateExpressionTree(ctx.expression(2)); - final Metadata.ExpressionMetadata expremd2 = metadata.createExpressionMetadata(exprctx2); - expremd2.to = condemd.to; - expremd2.explicit = condemd.explicit; - visit(exprctx2); - - if (condemd.to == null) { - final Type promote = promoteConditional(expremd1.from, expremd2.from, expremd1.preConst, expremd2.preConst); - - expremd1.to = promote; - expremd2.to = promote; - condemd.from = promote; - } else { - condemd.from = condemd.to; - } - - markCast(expremd1); - markCast(expremd2); - - condemd.typesafe = expremd0.typesafe && expremd1.typesafe; + expression.processConditional(ctx); return null; } @Override public Void visitAssignment(final AssignmentContext ctx) { - final Metadata.ExpressionMetadata assignemd = metadata.getExpressionMetadata(ctx); - - final ExtstartContext extstartctx = ctx.extstart(); - final Metadata.ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); - - extstartemd.read = assignemd.read; - extstartemd.storeExpr = metadata.updateExpressionTree(ctx.expression()); - - if (ctx.AMUL() != null) { - extstartemd.token = MUL; - } else if (ctx.ADIV() != null) { - extstartemd.token = DIV; - } else if (ctx.AREM() != null) { - extstartemd.token = REM; - } else if (ctx.AADD() != null) { - extstartemd.token = ADD; - } else if (ctx.ASUB() != null) { - extstartemd.token = SUB; - } else if (ctx.ALSH() != null) { - extstartemd.token = LSH; - } else if (ctx.AUSH() != null) { - extstartemd.token = USH; - } else if (ctx.ARSH() != null) { - extstartemd.token = RSH; - } else if (ctx.AAND() != null) { - extstartemd.token = BWAND; - } else if (ctx.AXOR() != null) { - extstartemd.token = BWXOR; - } else if (ctx.AOR() != null) { - extstartemd.token = BWOR; - } - - visit(extstartctx); - - assignemd.statement = true; - assignemd.from = extstartemd.read ? extstartemd.current : definition.voidType; - assignemd.typesafe = extstartemd.current.sort != Sort.DEF; + expression.processAssignment(ctx); return null; } @Override public Void visitExtstart(final ExtstartContext ctx) { - final ExtprecContext precctx = ctx.extprec(); - final ExtcastContext castctx = ctx.extcast(); - final ExttypeContext typectx = ctx.exttype(); - final ExtvarContext varctx = ctx.extvar(); - final ExtnewContext newctx = ctx.extnew(); - final ExtstringContext stringctx = ctx.extstring(); - - if (precctx != null) { - metadata.createExtNodeMetadata(ctx, precctx); - visit(precctx); - } else if (castctx != null) { - metadata.createExtNodeMetadata(ctx, castctx); - visit(castctx); - } else if (typectx != null) { - metadata.createExtNodeMetadata(ctx, typectx); - visit(typectx); - } else if (varctx != null) { - metadata.createExtNodeMetadata(ctx, varctx); - visit(varctx); - } else if (newctx != null) { - metadata.createExtNodeMetadata(ctx, newctx); - visit(newctx); - } else if (stringctx != null) { - metadata.createExtNodeMetadata(ctx, stringctx); - visit(stringctx); - } else { - throw new IllegalStateException(); - } + external.processExtstart(ctx); return null; } @Override public Void visitExtprec(final ExtprecContext ctx) { - final Metadata.ExtNodeMetadata precenmd = metadata.getExtNodeMetadata(ctx); - final ParserRuleContext parent = precenmd.parent; - final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); - - final ExtprecContext precctx = ctx.extprec(); - final ExtcastContext castctx = ctx.extcast(); - final ExttypeContext typectx = ctx.exttype(); - final ExtvarContext varctx = ctx.extvar(); - final ExtnewContext newctx = ctx.extnew(); - final ExtstringContext stringctx = ctx.extstring(); - - final ExtdotContext dotctx = ctx.extdot(); - final ExtbraceContext bracectx = ctx.extbrace(); - - if (dotctx != null || bracectx != null) { - ++parentemd.scope; - } - - if (precctx != null) { - metadata.createExtNodeMetadata(parent, precctx); - visit(precctx); - } else if (castctx != null) { - metadata.createExtNodeMetadata(parent, castctx); - visit(castctx); - } else if (typectx != null) { - metadata.createExtNodeMetadata(parent, typectx); - visit(typectx); - } else if (varctx != null) { - metadata.createExtNodeMetadata(parent, varctx); - visit(varctx); - } else if (newctx != null) { - metadata.createExtNodeMetadata(parent, newctx); - visit(newctx); - } else if (stringctx != null) { - metadata.createExtNodeMetadata(ctx, stringctx); - visit(stringctx); - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - - parentemd.statement = false; - - if (dotctx != null) { - --parentemd.scope; - - metadata.createExtNodeMetadata(parent, dotctx); - visit(dotctx); - } else if (bracectx != null) { - --parentemd.scope; - - metadata.createExtNodeMetadata(parent, bracectx); - visit(bracectx); - } + external.processExtprec(ctx); return null; } @Override public Void visitExtcast(final ExtcastContext ctx) { - final Metadata.ExtNodeMetadata castenmd = metadata.getExtNodeMetadata(ctx); - final ParserRuleContext parent = castenmd.parent; - final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); - - final ExtprecContext precctx = ctx.extprec(); - final ExtcastContext castctx = ctx.extcast(); - final ExttypeContext typectx = ctx.exttype(); - final ExtvarContext varctx = ctx.extvar(); - final ExtnewContext newctx = ctx.extnew(); - final ExtstringContext stringctx = ctx.extstring(); - - if (precctx != null) { - metadata.createExtNodeMetadata(parent, precctx); - visit(precctx); - } else if (castctx != null) { - metadata.createExtNodeMetadata(parent, castctx); - visit(castctx); - } else if (typectx != null) { - metadata.createExtNodeMetadata(parent, typectx); - visit(typectx); - } else if (varctx != null) { - metadata.createExtNodeMetadata(parent, varctx); - visit(varctx); - } else if (newctx != null) { - metadata.createExtNodeMetadata(parent, newctx); - visit(newctx); - } else if (stringctx != null) { - metadata.createExtNodeMetadata(ctx, stringctx); - visit(stringctx); - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - - final DecltypeContext declctx = ctx.decltype(); - final Metadata.ExpressionMetadata declemd = metadata.createExpressionMetadata(declctx); - visit(declctx); - - castenmd.castTo = getLegalCast(ctx, parentemd.current, declemd.from, true); - castenmd.type = declemd.from; - parentemd.current = declemd.from; - parentemd.statement = false; + external.processExtcast(ctx); return null; } @Override public Void visitExtbrace(final ExtbraceContext ctx) { - final Metadata.ExtNodeMetadata braceenmd = metadata.getExtNodeMetadata(ctx); - final ParserRuleContext parent = braceenmd.parent; - final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); - - final boolean array = parentemd.current.sort == Sort.ARRAY; - final boolean def = parentemd.current.sort == Sort.DEF; - boolean map = false; - boolean list = false; - - try { - parentemd.current.clazz.asSubclass(Map.class); - map = true; - } catch (ClassCastException exception) { - // Do nothing. - } - - try { - parentemd.current.clazz.asSubclass(List.class); - list = true; - } catch (ClassCastException exception) { - // Do nothing. - } - - final ExtdotContext dotctx = ctx.extdot(); - final ExtbraceContext bracectx = ctx.extbrace(); - - braceenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; - - final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); - - if (array || def) { - expremd.to = array ? definition.intType : definition.objectType; - visit(exprctx); - markCast(expremd); - - braceenmd.target = "#brace"; - braceenmd.type = def ? definition.defType : - definition.getType(parentemd.current.struct, parentemd.current.type.getDimensions() - 1); - analyzeLoadStoreExternal(ctx); - parentemd.current = braceenmd.type; - - if (dotctx != null) { - metadata.createExtNodeMetadata(parent, dotctx); - visit(dotctx); - } else if (bracectx != null) { - metadata.createExtNodeMetadata(parent, bracectx); - visit(bracectx); - } - } else { - final boolean store = braceenmd.last && parentemd.storeExpr != null; - final boolean get = parentemd.read || parentemd.token > 0 || !braceenmd.last; - final boolean set = braceenmd.last && store; - - Method getter; - Method setter; - Type valuetype; - Type settype; - - if (map) { - getter = parentemd.current.struct.methods.get("get"); - setter = parentemd.current.struct.methods.get("put"); - - if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1)) { - throw new IllegalArgumentException(Metadata.error(ctx) + - "Illegal map get shortcut for type [" + parentemd.current.name + "]."); - } - - if (setter != null && setter.arguments.size() != 2) { - throw new IllegalArgumentException(Metadata.error(ctx) + - "Illegal map set shortcut for type [" + parentemd.current.name + "]."); - } - - if (getter != null && setter != null && (!getter.arguments.get(0).equals(setter.arguments.get(0)) - || !getter.rtn.equals(setter.arguments.get(1)))) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Shortcut argument types must match."); - } - - valuetype = setter != null ? setter.arguments.get(0) : getter != null ? getter.arguments.get(0) : null; - settype = setter == null ? null : setter.arguments.get(1); - } else if (list) { - getter = parentemd.current.struct.methods.get("get"); - setter = parentemd.current.struct.methods.get("set"); - - if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || - getter.arguments.get(0).sort != Sort.INT)) { - throw new IllegalArgumentException(Metadata.error(ctx) + - "Illegal list get shortcut for type [" + parentemd.current.name + "]."); - } - - if (setter != null && (setter.arguments.size() != 2 || setter.arguments.get(0).sort != Sort.INT)) { - throw new IllegalArgumentException(Metadata.error(ctx) + - "Illegal list set shortcut for type [" + parentemd.current.name + "]."); - } - - if (getter != null && setter != null && (!getter.arguments.get(0).equals(setter.arguments.get(0)) - || !getter.rtn.equals(setter.arguments.get(1)))) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Shortcut argument types must match."); - } - - valuetype = definition.intType; - settype = setter == null ? null : setter.arguments.get(1); - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - - if ((get || set) && (!get || getter != null) && (!set || setter != null)) { - expremd.to = valuetype; - visit(exprctx); - markCast(expremd); - - braceenmd.target = new Object[] {getter, setter, true, null}; - braceenmd.type = get ? getter.rtn : settype; - analyzeLoadStoreExternal(ctx); - parentemd.current = get ? getter.rtn : setter.rtn; - } - } - - if (braceenmd.target == null) { - throw new IllegalArgumentException(Metadata.error(ctx) + - "Attempting to address a non-array type [" + parentemd.current.name + "] as an array."); - } + external.processExtbrace(ctx); return null; } @Override public Void visitExtdot(final ExtdotContext ctx) { - final Metadata.ExtNodeMetadata dotemnd = metadata.getExtNodeMetadata(ctx); - final ParserRuleContext parent = dotemnd.parent; - - final ExtcallContext callctx = ctx.extcall(); - final ExtfieldContext fieldctx = ctx.extfield(); - - if (callctx != null) { - metadata.createExtNodeMetadata(parent, callctx); - visit(callctx); - } else if (fieldctx != null) { - metadata.createExtNodeMetadata(parent, fieldctx); - visit(fieldctx); - } + external.processExtdot(ctx); return null; } @Override public Void visitExttype(final ExttypeContext ctx) { - final Metadata.ExtNodeMetadata typeenmd = metadata.getExtNodeMetadata(ctx); - final ParserRuleContext parent = typeenmd.parent; - final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); - - if (parentemd.current != null) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Unexpected static type."); - } - - final String typestr = ctx.TYPE().getText(); - typeenmd.type = definition.getType(typestr); - parentemd.current = typeenmd.type; - parentemd.statik = true; - - final ExtdotContext dotctx = ctx.extdot(); - metadata.createExtNodeMetadata(parent, dotctx); - visit(dotctx); + external.processExttype(ctx); return null; } @Override public Void visitExtcall(final ExtcallContext ctx) { - final Metadata.ExtNodeMetadata callenmd = metadata.getExtNodeMetadata(ctx); - final ParserRuleContext parent = callenmd.parent; - final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); - - final ExtdotContext dotctx = ctx.extdot(); - final ExtbraceContext bracectx = ctx.extbrace(); - - callenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; - - final String name = ctx.EXTID().getText(); - - if (parentemd.current.sort == Sort.ARRAY) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Unexpected call [" + name + "] on an array."); - } else if (callenmd.last && parentemd.storeExpr != null) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Cannot assign a value to a call [" + name + "]."); - } - - final Struct struct = parentemd.current.struct; - final List arguments = ctx.arguments().expression(); - final int size = arguments.size(); - Type[] types; - - final Method method = parentemd.statik ? struct.functions.get(name) : struct.methods.get(name); - final boolean def = parentemd.current.sort == Sort.DEF; - - if (method == null && !def) { - throw new IllegalArgumentException( - Metadata.error(ctx) + "Unknown call [" + name + "] on type [" + struct.name + "]."); - } else if (method != null) { - types = new Type[method.arguments.size()]; - method.arguments.toArray(types); - - callenmd.target = method; - callenmd.type = method.rtn; - parentemd.statement = !parentemd.read && callenmd.last; - parentemd.current = method.rtn; - - if (size != types.length) { - throw new IllegalArgumentException(Metadata.error(ctx) + "When calling [" + name + "] on type " + - "[" + struct.name + "] expected [" + types.length + "] arguments," + - " but found [" + arguments.size() + "]."); - } - } else { - types = new Type[arguments.size()]; - Arrays.fill(types, definition.defType); - - callenmd.target = name; - callenmd.type = definition.defType; - parentemd.statement = !parentemd.read && callenmd.last; - parentemd.current = callenmd.type; - } - - for (int argument = 0; argument < size; ++argument) { - final ExpressionContext exprctx = metadata.updateExpressionTree(arguments.get(argument)); - final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); - expremd.to = types[argument]; - visit(exprctx); - markCast(expremd); - } - - parentemd.statik = false; - - if (dotctx != null) { - metadata.createExtNodeMetadata(parent, dotctx); - visit(dotctx); - } else if (bracectx != null) { - metadata.createExtNodeMetadata(parent, bracectx); - visit(bracectx); - } + external.processExtcall(ctx); return null; } @Override public Void visitExtvar(final ExtvarContext ctx) { - final Metadata.ExtNodeMetadata varenmd = metadata.getExtNodeMetadata(ctx); - final ParserRuleContext parent = varenmd.parent; - final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); - - final String name = ctx.ID().getText(); - - final ExtdotContext dotctx = ctx.extdot(); - final ExtbraceContext bracectx = ctx.extbrace(); - - if (parentemd.current != null) { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected variable [" + name + "] load."); - } - - varenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; - - final Variable variable = getVariable(name); - - if (variable == null) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Unknown variable [" + name + "]."); - } - - varenmd.target = variable.slot; - varenmd.type = variable.type; - analyzeLoadStoreExternal(ctx); - parentemd.current = varenmd.type; - - if (dotctx != null) { - metadata.createExtNodeMetadata(parent, dotctx); - visit(dotctx); - } else if (bracectx != null) { - metadata.createExtNodeMetadata(parent, bracectx); - visit(bracectx); - } + external.processExtvar(ctx); return null; } @Override public Void visitExtfield(final ExtfieldContext ctx) { - final Metadata.ExtNodeMetadata memberenmd = metadata.getExtNodeMetadata(ctx); - final ParserRuleContext parent = memberenmd.parent; - final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); - - if (ctx.EXTID() == null && ctx.EXTINTEGER() == null) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Unexpected parser state."); - } - - final String value = ctx.EXTID() == null ? ctx.EXTINTEGER().getText() : ctx.EXTID().getText(); - - final ExtdotContext dotctx = ctx.extdot(); - final ExtbraceContext bracectx = ctx.extbrace(); - - memberenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; - final boolean store = memberenmd.last && parentemd.storeExpr != null; - - if (parentemd.current == null) { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected field [" + value + "] load."); - } - - if (parentemd.current.sort == Sort.ARRAY) { - if ("length".equals(value)) { - if (!parentemd.read) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Must read array field [length]."); - } else if (store) { - throw new IllegalArgumentException( - Metadata.error(ctx) + "Cannot write to read-only array field [length]."); - } - - memberenmd.target = "#length"; - memberenmd.type = definition.intType; - parentemd.current = definition.intType; - } else { - throw new IllegalArgumentException(Metadata.error(ctx) + "Unexpected array field [" + value + "]."); - } - } else if (parentemd.current.sort == Sort.DEF) { - memberenmd.target = value; - memberenmd.type = definition.defType; - analyzeLoadStoreExternal(ctx); - parentemd.current = memberenmd.type; - } else { - final Struct struct = parentemd.current.struct; - final Field field = parentemd.statik ? struct.statics.get(value) : struct.members.get(value); - - if (field != null) { - if (store && java.lang.reflect.Modifier.isFinal(field.reflect.getModifiers())) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Cannot write to read-only" + - " field [" + value + "] for type [" + struct.name + "]."); - } - - memberenmd.target = field; - memberenmd.type = field.type; - analyzeLoadStoreExternal(ctx); - parentemd.current = memberenmd.type; - } else { - final boolean get = parentemd.read || parentemd.token > 0 || !memberenmd.last; - final boolean set = memberenmd.last && store; - - Method getter = struct.methods.get("get" + Character.toUpperCase(value.charAt(0)) + value.substring(1)); - Method setter = struct.methods.get("set" + Character.toUpperCase(value.charAt(0)) + value.substring(1)); - Object constant = null; - - if (getter != null && (getter.rtn.sort == Sort.VOID || !getter.arguments.isEmpty())) { - throw new IllegalArgumentException(Metadata.error(ctx) + - "Illegal get shortcut on field [" + value + "] for type [" + struct.name + "]."); - } - - if (setter != null && (setter.rtn.sort != Sort.VOID || setter.arguments.size() != 1)) { - throw new IllegalArgumentException(Metadata.error(ctx) + - "Illegal set shortcut on field [" + value + "] for type [" + struct.name + "]."); - } - - Type settype = setter == null ? null : setter.arguments.get(0); - - if (getter == null && setter == null) { - if (ctx.EXTID() != null) { - try { - parentemd.current.clazz.asSubclass(Map.class); - - getter = parentemd.current.struct.methods.get("get"); - setter = parentemd.current.struct.methods.get("put"); - - if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || - getter.arguments.get(0).sort != Sort.STRING)) { - throw new IllegalArgumentException(Metadata.error(ctx) + - "Illegal map get shortcut [" + value + "] for type [" + struct.name + "]."); - } - - if (setter != null && (setter.arguments.size() != 2 || - setter.arguments.get(0).sort != Sort.STRING)) { - throw new IllegalArgumentException(Metadata.error(ctx) + - "Illegal map set shortcut [" + value + "] for type [" + struct.name + "]."); - } - - if (getter != null && setter != null && !getter.rtn.equals(setter.arguments.get(1))) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Shortcut argument types must match."); - } - - settype = setter == null ? null : setter.arguments.get(1); - constant = value; - } catch (ClassCastException exception) { - //Do nothing. - } - } else if (ctx.EXTINTEGER() != null) { - try { - parentemd.current.clazz.asSubclass(List.class); - - getter = parentemd.current.struct.methods.get("get"); - setter = parentemd.current.struct.methods.get("set"); - - if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || - getter.arguments.get(0).sort != Sort.INT)) { - throw new IllegalArgumentException(Metadata.error(ctx) + - "Illegal list get shortcut [" + value + "] for type [" + struct.name + "]."); - } - - if (setter != null && (setter.rtn.sort != Sort.VOID || setter.arguments.size() != 2 || - setter.arguments.get(0).sort != Sort.INT)) { - throw new IllegalArgumentException(Metadata.error(ctx) + - "Illegal list set shortcut [" + value + "] for type [" + struct.name + "]."); - } - - if (getter != null && setter != null && !getter.rtn.equals(setter.arguments.get(1))) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Shortcut argument types must match."); - } - - settype = setter == null ? null : setter.arguments.get(1); - - try { - constant = Integer.parseInt(value); - } catch (NumberFormatException exception) { - throw new IllegalArgumentException(Metadata.error(ctx) + - "Illegal list shortcut value [" + value + "]."); - } - } catch (ClassCastException exception) { - //Do nothing. - } - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); - } - } - - if ((get || set) && (!get || getter != null) && (!set || setter != null)) { - memberenmd.target = new Object[] {getter, setter, constant != null, constant}; - memberenmd.type = get ? getter.rtn : settype; - analyzeLoadStoreExternal(ctx); - parentemd.current = get ? getter.rtn : setter.rtn; - } - } - - if (memberenmd.target == null) { - throw new IllegalArgumentException( - Metadata.error(ctx) + "Unknown field [" + value + "] for type [" + struct.name + "]."); - } - } - - parentemd.statik = false; - - if (dotctx != null) { - metadata.createExtNodeMetadata(parent, dotctx); - visit(dotctx); - } else if (bracectx != null) { - metadata.createExtNodeMetadata(parent, bracectx); - visit(bracectx); - } + external.processExtfield(ctx); return null; } @Override - public Void visitExtnew(ExtnewContext ctx) { - final Metadata.ExtNodeMetadata newenmd = metadata.getExtNodeMetadata(ctx); - final ParserRuleContext parent = newenmd.parent; - final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); - - final ExtdotContext dotctx = ctx.extdot(); - final ExtbraceContext bracectx = ctx.extbrace(); - - newenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; - - final String name = ctx.TYPE().getText(); - final Struct struct = definition.structs.get(name); - - if (parentemd.current != null) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Unexpected new call."); - } else if (struct == null) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Specified type [" + name + "] not found."); - } else if (newenmd.last && parentemd.storeExpr != null) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Cannot assign a value to a new call."); - } - - final boolean newclass = ctx.arguments() != null; - final boolean newarray = !ctx.expression().isEmpty(); - - final List arguments = newclass ? ctx.arguments().expression() : ctx.expression(); - final int size = arguments.size(); - - Type[] types; - - if (newarray) { - if (!parentemd.read) { - throw new IllegalArgumentException(Metadata.error(ctx) + "A newly created array must be assigned."); - } - - types = new Type[size]; - Arrays.fill(types, definition.intType); - - newenmd.target = "#makearray"; - - if (size > 1) { - newenmd.type = definition.getType(struct, size); - parentemd.current = newenmd.type; - } else if (size == 1) { - newenmd.type = definition.getType(struct, 0); - parentemd.current = definition.getType(struct, 1); - } else { - throw new IllegalArgumentException(Metadata.error(ctx) + "A newly created array cannot have zero dimensions."); - } - } else if (newclass) { - final Constructor constructor = struct.constructors.get("new"); - - if (constructor != null) { - types = new Type[constructor.arguments.size()]; - constructor.arguments.toArray(types); - - newenmd.target = constructor; - newenmd.type = definition.getType(struct, 0); - parentemd.statement = !parentemd.read && newenmd.last; - parentemd.current = newenmd.type; - } else { - throw new IllegalArgumentException( - Metadata.error(ctx) + "Unknown new call on type [" + struct.name + "]."); - } - } else { - throw new IllegalArgumentException(Metadata.error(ctx) + "Unknown parser state."); - } - - if (size != types.length) { - throw new IllegalArgumentException(Metadata.error(ctx) + "When calling [" + name + "] on type " + - "[" + struct.name + "] expected [" + types.length + "] arguments," + - " but found [" + arguments.size() + "]."); - } - - for (int argument = 0; argument < size; ++argument) { - final ExpressionContext exprctx = metadata.updateExpressionTree(arguments.get(argument)); - final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); - expremd.to = types[argument]; - visit(exprctx); - markCast(expremd); - } - - if (dotctx != null) { - metadata.createExtNodeMetadata(parent, dotctx); - visit(dotctx); - } else if (bracectx != null) { - metadata.createExtNodeMetadata(parent, bracectx); - visit(bracectx); - } + public Void visitExtnew(final ExtnewContext ctx) { + external.processExtnew(ctx); return null; } @Override public Void visitExtstring(final ExtstringContext ctx) { - final Metadata.ExtNodeMetadata memberenmd = metadata.getExtNodeMetadata(ctx); - final ParserRuleContext parent = memberenmd.parent; - final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); - - final String string = ctx.STRING().getText(); - - final ExtdotContext dotctx = ctx.extdot(); - final ExtbraceContext bracectx = ctx.extbrace(); - - memberenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; - final boolean store = memberenmd.last && parentemd.storeExpr != null; - - if (parentemd.current != null) { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected String constant [" + string + "]."); - } - - if (!parentemd.read) { - throw new IllegalArgumentException(Metadata.error(ctx) + "Must read String constant [" + string + "]."); - } else if (store) { - throw new IllegalArgumentException( - Metadata.error(ctx) + "Cannot write to read-only String constant [" + string + "]."); - } - - memberenmd.target = string; - memberenmd.type = definition.stringType; - parentemd.current = definition.stringType; - - if (memberenmd.last) { - parentemd.constant = string; - } - - if (dotctx != null) { - metadata.createExtNodeMetadata(parent, dotctx); - visit(dotctx); - } else if (bracectx != null) { - metadata.createExtNodeMetadata(parent, bracectx); - visit(bracectx); - } + external.processExtstring(ctx); return null; } @Override public Void visitArguments(final ArgumentsContext ctx) { - throw new UnsupportedOperationException(Metadata.error(ctx) + "Unexpected parser state."); + throw new UnsupportedOperationException(AnalyzerUtility.error(ctx) + "Unexpected state."); } @Override - public Void visitIncrement(IncrementContext ctx) { - final Metadata.ExpressionMetadata incremd = metadata.getExpressionMetadata(ctx); - final Sort sort = incremd.to == null ? null : incremd.to.sort; - final boolean positive = ctx.INCR() != null; - - if (incremd.to == null) { - incremd.preConst = positive ? 1 : -1; - incremd.from = definition.intType; - } else { - switch (sort) { - case LONG: - incremd.preConst = positive ? 1L : -1L; - incremd.from = definition.longType; - break; - case FLOAT: - incremd.preConst = positive ? 1.0F : -1.0F; - incremd.from = definition.floatType; - break; - case DOUBLE: - incremd.preConst = positive ? 1.0 : -1.0; - incremd.from = definition.doubleType; - break; - default: - incremd.preConst = positive ? 1 : -1; - incremd.from = definition.intType; - } - } + public Void visitIncrement(final IncrementContext ctx) { + expression.processIncrement(ctx); return null; } - - private void analyzeLoadStoreExternal(final ParserRuleContext source) { - final Metadata.ExtNodeMetadata extenmd = metadata.getExtNodeMetadata(source); - final ParserRuleContext parent = extenmd.parent; - final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); - - if (extenmd.last && parentemd.storeExpr != null) { - final ParserRuleContext store = parentemd.storeExpr; - final Metadata.ExpressionMetadata storeemd = metadata.createExpressionMetadata(parentemd.storeExpr); - final int token = parentemd.token; - - if (token > 0) { - visit(store); - - final boolean add = token == ADD; - final boolean xor = token == BWAND || token == BWXOR || token == BWOR; - final boolean decimal = token == MUL || token == DIV || token == REM || token == SUB; - - extenmd.promote = add ? promoteAdd(extenmd.type, storeemd.from) : - xor ? promoteXor(extenmd.type, storeemd.from) : - promoteNumeric(extenmd.type, storeemd.from, decimal, true); - - if (extenmd.promote == null) { - throw new IllegalArgumentException("Cannot apply compound assignment to " + - "types [" + extenmd.type.name + "] and [" + storeemd.from.name + "]."); - } - - extenmd.castFrom = getLegalCast(source, extenmd.type, extenmd.promote, false); - extenmd.castTo = getLegalCast(source, extenmd.promote, extenmd.type, true); - - storeemd.to = add && extenmd.promote.sort == Sort.STRING ? storeemd.from : extenmd.promote; - markCast(storeemd); - } else { - storeemd.to = extenmd.type; - visit(store); - markCast(storeemd); - } - } - } - - private void markCast(final Metadata.ExpressionMetadata emd) { - if (emd.from == null) { - throw new IllegalStateException(Metadata.error(emd.source) + "From cast type should never be null."); - } - - if (emd.to != null) { - emd.cast = getLegalCast(emd.source, emd.from, emd.to, emd.explicit || !emd.typesafe); - - if (emd.preConst != null && emd.to.sort.constant) { - emd.postConst = constCast(emd.source, emd.preConst, emd.cast); - } - } else { - throw new IllegalStateException(Metadata.error(emd.source) + "To cast type should never be null."); - } - } - - private Cast getLegalCast(final ParserRuleContext source, final Type from, final Type to, final boolean explicit) { - final Cast cast = new Cast(from, to); - - if (from.equals(to)) { - return cast; - } - - if (from.sort == Sort.DEF && to.sort != Sort.VOID || from.sort != Sort.VOID && to.sort == Sort.DEF) { - final Transform transform = definition.transforms.get(cast); - - if (transform != null) { - return transform; - } - - return cast; - } - - switch (from.sort) { - case BOOL: - switch (to.sort) { - case OBJECT: - case BOOL_OBJ: - return checkTransform(source, cast); - } - - break; - case BYTE: - switch (to.sort) { - case SHORT: - case INT: - case LONG: - case FLOAT: - case DOUBLE: - return cast; - case CHAR: - if (explicit) - return cast; - - break; - case OBJECT: - case NUMBER: - case BYTE_OBJ: - case SHORT_OBJ: - case INT_OBJ: - case LONG_OBJ: - case FLOAT_OBJ: - case DOUBLE_OBJ: - return checkTransform(source, cast); - case CHAR_OBJ: - if (explicit) - return checkTransform(source, cast); - - break; - } - - break; - case SHORT: - switch (to.sort) { - case INT: - case LONG: - case FLOAT: - case DOUBLE: - return cast; - case BYTE: - case CHAR: - if (explicit) - return cast; - - break; - case OBJECT: - case NUMBER: - case SHORT_OBJ: - case INT_OBJ: - case LONG_OBJ: - case FLOAT_OBJ: - case DOUBLE_OBJ: - return checkTransform(source, cast); - case BYTE_OBJ: - case CHAR_OBJ: - if (explicit) - return checkTransform(source, cast); - - break; - } - - break; - case CHAR: - switch (to.sort) { - case INT: - case LONG: - case FLOAT: - case DOUBLE: - return cast; - case BYTE: - case SHORT: - if (explicit) - return cast; - - break; - case OBJECT: - case NUMBER: - case CHAR_OBJ: - case INT_OBJ: - case LONG_OBJ: - case FLOAT_OBJ: - case DOUBLE_OBJ: - return checkTransform(source, cast); - case BYTE_OBJ: - case SHORT_OBJ: - if (explicit) - return checkTransform(source, cast); - - break; - } - - break; - case INT: - switch (to.sort) { - case LONG: - case FLOAT: - case DOUBLE: - return cast; - case BYTE: - case SHORT: - case CHAR: - if (explicit) - return cast; - - break; - case OBJECT: - case NUMBER: - case INT_OBJ: - case LONG_OBJ: - case FLOAT_OBJ: - case DOUBLE_OBJ: - return checkTransform(source, cast); - case BYTE_OBJ: - case SHORT_OBJ: - case CHAR_OBJ: - if (explicit) - return checkTransform(source, cast); - - break; - } - - break; - case LONG: - switch (to.sort) { - case FLOAT: - case DOUBLE: - return cast; - case BYTE: - case SHORT: - case CHAR: - case INT: - if (explicit) - return cast; - - break; - case OBJECT: - case NUMBER: - case LONG_OBJ: - case FLOAT_OBJ: - case DOUBLE_OBJ: - return checkTransform(source, cast); - case BYTE_OBJ: - case SHORT_OBJ: - case CHAR_OBJ: - case INT_OBJ: - if (explicit) - return checkTransform(source, cast); - - break; - } - - break; - case FLOAT: - switch (to.sort) { - case DOUBLE: - return cast; - case BYTE: - case SHORT: - case CHAR: - case INT: - case LONG: - if (explicit) - return cast; - - break; - case OBJECT: - case NUMBER: - case FLOAT_OBJ: - case DOUBLE_OBJ: - return checkTransform(source, cast); - case BYTE_OBJ: - case SHORT_OBJ: - case CHAR_OBJ: - case INT_OBJ: - case LONG_OBJ: - if (explicit) - return checkTransform(source, cast); - - break; - } - - break; - case DOUBLE: - switch (to.sort) { - case BYTE: - case SHORT: - case CHAR: - case INT: - case LONG: - case FLOAT: - if (explicit) - return cast; - - break; - case OBJECT: - case NUMBER: - case DOUBLE_OBJ: - return checkTransform(source, cast); - case BYTE_OBJ: - case SHORT_OBJ: - case CHAR_OBJ: - case INT_OBJ: - case LONG_OBJ: - case FLOAT_OBJ: - if (explicit) - return checkTransform(source, cast); - - break; - } - - break; - case OBJECT: - case NUMBER: - switch (to.sort) { - case BYTE: - case SHORT: - case CHAR: - case INT: - case LONG: - case FLOAT: - case DOUBLE: - if (explicit) - return checkTransform(source, cast); - - break; - } - - break; - case BOOL_OBJ: - switch (to.sort) { - case BOOL: - return checkTransform(source, cast); - } - - break; - case BYTE_OBJ: - switch (to.sort) { - case BYTE: - case SHORT: - case INT: - case LONG: - case FLOAT: - case DOUBLE: - case SHORT_OBJ: - case INT_OBJ: - case LONG_OBJ: - case FLOAT_OBJ: - case DOUBLE_OBJ: - return checkTransform(source, cast); - case CHAR: - case CHAR_OBJ: - if (explicit) - return checkTransform(source, cast); - - break; - } - - break; - case SHORT_OBJ: - switch (to.sort) { - case SHORT: - case INT: - case LONG: - case FLOAT: - case DOUBLE: - case INT_OBJ: - case LONG_OBJ: - case FLOAT_OBJ: - case DOUBLE_OBJ: - return checkTransform(source, cast); - case BYTE: - case CHAR: - case BYTE_OBJ: - case CHAR_OBJ: - if (explicit) - return checkTransform(source, cast); - - break; - } - - break; - case CHAR_OBJ: - switch (to.sort) { - case CHAR: - case INT: - case LONG: - case FLOAT: - case DOUBLE: - case INT_OBJ: - case LONG_OBJ: - case FLOAT_OBJ: - case DOUBLE_OBJ: - return checkTransform(source, cast); - case BYTE: - case SHORT: - case BYTE_OBJ: - case SHORT_OBJ: - if (explicit) - return checkTransform(source, cast); - - break; - } - - break; - case INT_OBJ: - switch (to.sort) { - case INT: - case LONG: - case FLOAT: - case DOUBLE: - case LONG_OBJ: - case FLOAT_OBJ: - case DOUBLE_OBJ: - return checkTransform(source, cast); - case BYTE: - case SHORT: - case CHAR: - case BYTE_OBJ: - case SHORT_OBJ: - case CHAR_OBJ: - if (explicit) - return checkTransform(source, cast); - - break; - } - - break; - case LONG_OBJ: - switch (to.sort) { - case LONG: - case FLOAT: - case DOUBLE: - case FLOAT_OBJ: - case DOUBLE_OBJ: - return checkTransform(source, cast); - case BYTE: - case SHORT: - case CHAR: - case INT: - case BYTE_OBJ: - case SHORT_OBJ: - case CHAR_OBJ: - case INT_OBJ: - if (explicit) - return checkTransform(source, cast); - - break; - } - - break; - case FLOAT_OBJ: - switch (to.sort) { - case FLOAT: - case DOUBLE: - case DOUBLE_OBJ: - return checkTransform(source, cast); - case BYTE: - case SHORT: - case CHAR: - case INT: - case LONG: - case BYTE_OBJ: - case SHORT_OBJ: - case CHAR_OBJ: - case INT_OBJ: - case LONG_OBJ: - if (explicit) - return checkTransform(source, cast); - - break; - } - - break; - case DOUBLE_OBJ: - switch (to.sort) { - case DOUBLE: - return checkTransform(source, cast); - case BYTE: - case SHORT: - case CHAR: - case INT: - case LONG: - case FLOAT: - case BYTE_OBJ: - case SHORT_OBJ: - case CHAR_OBJ: - case INT_OBJ: - case LONG_OBJ: - case FLOAT_OBJ: - if (explicit) - return checkTransform(source, cast); - - break; - } - - break; - } - - try { - from.clazz.asSubclass(to.clazz); - - return cast; - } catch (final ClassCastException cce0) { - try { - if (explicit) { - to.clazz.asSubclass(from.clazz); - - return cast; - } else { - throw new ClassCastException( - Metadata.error(source) + "Cannot cast from [" + from.name + "] to [" + to.name + "]."); - } - } catch (final ClassCastException cce1) { - throw new ClassCastException( - Metadata.error(source) + "Cannot cast from [" + from.name + "] to [" + to.name + "]."); - } - } - } - - private Transform checkTransform(final ParserRuleContext source, final Cast cast) { - final Transform transform = definition.transforms.get(cast); - - if (transform == null) { - throw new ClassCastException( - Metadata.error(source) + "Cannot cast from [" + cast.from.name + "] to [" + cast.to.name + "]."); - } - - return transform; - } - - private Object constCast(final ParserRuleContext source, final Object constant, final Cast cast) { - if (cast instanceof Transform) { - final Transform transform = (Transform)cast; - return invokeTransform(source, transform, constant); - } else { - final Sort fsort = cast.from.sort; - final Sort tsort = cast.to.sort; - - if (fsort == tsort) { - return constant; - } else if (fsort.numeric && tsort.numeric) { - Number number; - - if (fsort == Sort.CHAR) { - number = (int)(char)constant; - } else { - number = (Number)constant; - } - - switch (tsort) { - case BYTE: return number.byteValue(); - case SHORT: return number.shortValue(); - case CHAR: return (char)number.intValue(); - case INT: return number.intValue(); - case LONG: return number.longValue(); - case FLOAT: return number.floatValue(); - case DOUBLE: return number.doubleValue(); - default: - throw new IllegalStateException(Metadata.error(source) + "Expected numeric type for cast."); - } - } else { - throw new IllegalStateException(Metadata.error(source) + "No valid constant cast from " + - "[" + cast.from.clazz.getCanonicalName() + "] to " + - "[" + cast.to.clazz.getCanonicalName() + "]."); - } - } - } - - private Object invokeTransform(final ParserRuleContext source, final Transform transform, final Object object) { - final Method method = transform.method; - final java.lang.reflect.Method jmethod = method.reflect; - final int modifiers = jmethod.getModifiers(); - - try { - if (java.lang.reflect.Modifier.isStatic(modifiers)) { - return jmethod.invoke(null, object); - } else { - return jmethod.invoke(object); - } - } catch (IllegalAccessException | IllegalArgumentException | - java.lang.reflect.InvocationTargetException | NullPointerException | - ExceptionInInitializerError exception) { - throw new IllegalStateException(Metadata.error(source) + "Unable to invoke transform to cast constant from " + - "[" + transform.from.name + "] to [" + transform.to.name + "]."); - } - } - - private Type promoteNumeric(final Type from, boolean decimal, boolean primitive) { - final Sort sort = from.sort; - - if (sort == Sort.DEF) { - return definition.defType; - } else if ((sort == Sort.DOUBLE || sort == Sort.DOUBLE_OBJ || sort == Sort.NUMBER) && decimal) { - return primitive ? definition.doubleType : definition.doubleobjType; - } else if ((sort == Sort.FLOAT || sort == Sort.FLOAT_OBJ) && decimal) { - return primitive ? definition.floatType : definition.floatobjType; - } else if (sort == Sort.LONG || sort == Sort.LONG_OBJ || sort == Sort.NUMBER) { - return primitive ? definition.longType : definition.longobjType; - } else if (sort.numeric) { - return primitive ? definition.intType : definition.intobjType; - } - - return null; - } - - private Type promoteNumeric(final Type from0, final Type from1, boolean decimal, boolean primitive) { - final Sort sort0 = from0.sort; - final Sort sort1 = from1.sort; - - if (sort0 == Sort.DEF || sort1 == Sort.DEF) { - return definition.defType; - } - - if (decimal) { - if (sort0 == Sort.DOUBLE || sort0 == Sort.DOUBLE_OBJ || sort0 == Sort.NUMBER || - sort1 == Sort.DOUBLE || sort1 == Sort.DOUBLE_OBJ || sort1 == Sort.NUMBER) { - return primitive ? definition.doubleType : definition.doubleobjType; - } else if (sort0 == Sort.FLOAT || sort0 == Sort.FLOAT_OBJ || sort1 == Sort.FLOAT || sort1 == Sort.FLOAT_OBJ) { - return primitive ? definition.floatType : definition.floatobjType; - } - } - - if (sort0 == Sort.LONG || sort0 == Sort.LONG_OBJ || sort0 == Sort.NUMBER || - sort1 == Sort.LONG || sort1 == Sort.LONG_OBJ || sort1 == Sort.NUMBER) { - return primitive ? definition.longType : definition.longobjType; - } else if (sort0.numeric && sort1.numeric) { - return primitive ? definition.intType : definition.intobjType; - } - - return null; - } - - private Type promoteAdd(final Type from0, final Type from1) { - final Sort sort0 = from0.sort; - final Sort sort1 = from1.sort; - - if (sort0 == Sort.STRING || sort1 == Sort.STRING) { - return definition.stringType; - } - - return promoteNumeric(from0, from1, true, true); - } - - private Type promoteXor(final Type from0, final Type from1) { - final Sort sort0 = from0.sort; - final Sort sort1 = from1.sort; - - if (sort0.bool || sort1.bool) { - return definition.booleanType; - } - - return promoteNumeric(from0, from1, false, true); - } - - private Type promoteEquality(final Type from0, final Type from1) { - final Sort sort0 = from0.sort; - final Sort sort1 = from1.sort; - - if (sort0 == Sort.DEF || sort1 == Sort.DEF) { - return definition.defType; - } - - final boolean primitive = sort0.primitive && sort1.primitive; - - if (sort0.bool && sort1.bool) { - return primitive ? definition.booleanType : definition.booleanobjType; - } - - if (sort0.numeric && sort1.numeric) { - return promoteNumeric(from0, from1, true, primitive); - } - - return definition.objectType; - } - - private Type promoteReference(final Type from0, final Type from1) { - final Sort sort0 = from0.sort; - final Sort sort1 = from1.sort; - - if (sort0 == Sort.DEF || sort1 == Sort.DEF) { - return definition.defType; - } - - if (sort0.primitive && sort1.primitive) { - if (sort0.bool && sort1.bool) { - return definition.booleanType; - } - - if (sort0.numeric && sort1.numeric) { - return promoteNumeric(from0, from1, true, true); - } - } - - return definition.objectType; - } - - private Type promoteConditional(final Type from0, final Type from1, final Object const0, final Object const1) { - if (from0.equals(from1)) { - return from0; - } - - final Sort sort0 = from0.sort; - final Sort sort1 = from1.sort; - - if (sort0 == Sort.DEF || sort1 == Sort.DEF) { - return definition.defType; - } - - final boolean primitive = sort0.primitive && sort1.primitive; - - if (sort0.bool && sort1.bool) { - return primitive ? definition.booleanType : definition.booleanobjType; - } - - if (sort0.numeric && sort1.numeric) { - if (sort0 == Sort.DOUBLE || sort0 == Sort.DOUBLE_OBJ || sort1 == Sort.DOUBLE || sort1 == Sort.DOUBLE_OBJ) { - return primitive ? definition.doubleType : definition.doubleobjType; - } else if (sort0 == Sort.FLOAT || sort0 == Sort.FLOAT_OBJ || sort1 == Sort.FLOAT || sort1 == Sort.FLOAT_OBJ) { - return primitive ? definition.floatType : definition.floatobjType; - } else if (sort0 == Sort.LONG || sort0 == Sort.LONG_OBJ || sort1 == Sort.LONG || sort1 == Sort.LONG_OBJ) { - return sort0.primitive && sort1.primitive ? definition.longType : definition.longobjType; - } else { - if (sort0 == Sort.BYTE || sort0 == Sort.BYTE_OBJ) { - if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { - return primitive ? definition.byteType : definition.byteobjType; - } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { - if (const1 != null) { - final short constant = (short)const1; - - if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; - } - } - - return primitive ? definition.shortType : definition.shortobjType; - } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { - if (const1 != null) { - final int constant = (int)const1; - - if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; - } - } - - return primitive ? definition.intType : definition.intobjType; - } - } else if (sort0 == Sort.SHORT || sort0 == Sort.SHORT_OBJ) { - if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { - if (const0 != null) { - final short constant = (short)const0; - - if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; - } - } - - return primitive ? definition.shortType : definition.shortobjType; - } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { - return primitive ? definition.shortType : definition.shortobjType; - } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { - if (const1 != null) { - final int constant = (int)const1; - - if (constant <= Short.MAX_VALUE && constant >= Short.MIN_VALUE) { - return primitive ? definition.shortType : definition.shortobjType; - } - } - - return primitive ? definition.intType : definition.intobjType; - } - } else if (sort0 == Sort.CHAR || sort0 == Sort.CHAR_OBJ) { - if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { - return primitive ? definition.charType : definition.charobjType; - } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { - if (const1 != null) { - final int constant = (int)const1; - - if (constant <= Character.MAX_VALUE && constant >= Character.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; - } - } - - return primitive ? definition.intType : definition.intobjType; - } - } else if (sort0 == Sort.INT || sort0 == Sort.INT_OBJ) { - if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { - if (const0 != null) { - final int constant = (int)const0; - - if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; - } - } - - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { - if (const0 != null) { - final int constant = (int)const0; - - if (constant <= Short.MAX_VALUE && constant >= Short.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; - } - } - - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { - if (const0 != null) { - final int constant = (int)const0; - - if (constant <= Character.MAX_VALUE && constant >= Character.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; - } - } - - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { - return primitive ? definition.intType : definition.intobjType; - } - } - } - } - - final Pair pair = new Pair(from0, from1); - final Type bound = definition.bounds.get(pair); - - return bound == null ? definition.objectType : bound; - } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java new file mode 100644 index 000000000000..46a510bc6bbf --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java @@ -0,0 +1,563 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +import org.antlr.v4.runtime.ParserRuleContext; +import org.elasticsearch.painless.Definition.Cast; +import org.elasticsearch.painless.Definition.Method; +import org.elasticsearch.painless.Definition.Sort; +import org.elasticsearch.painless.Definition.Transform; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Metadata.ExpressionMetadata; + +class AnalyzerCaster { + private final Definition definition; + + AnalyzerCaster(final Definition definition) { + this.definition = definition; + } + + void markCast(final ExpressionMetadata emd) { + if (emd.from == null) { + throw new IllegalStateException(AnalyzerUtility.error(emd.source) + "From cast type should never be null."); + } + + if (emd.to != null) { + emd.cast = getLegalCast(emd.source, emd.from, emd.to, emd.explicit || !emd.typesafe); + + if (emd.preConst != null && emd.to.sort.constant) { + emd.postConst = constCast(emd.source, emd.preConst, emd.cast); + } + } else { + throw new IllegalStateException(AnalyzerUtility.error(emd.source) + "To cast type should never be null."); + } + } + + Cast getLegalCast(final ParserRuleContext source, final Type from, final Type to, final boolean explicit) { + final Cast cast = new Cast(from, to); + + if (from.equals(to)) { + return cast; + } + + if (from.sort == Sort.DEF && to.sort != Sort.VOID || from.sort != Sort.VOID && to.sort == Sort.DEF) { + final Transform transform = definition.transforms.get(cast); + + if (transform != null) { + return transform; + } + + return cast; + } + + switch (from.sort) { + case BOOL: + switch (to.sort) { + case OBJECT: + case BOOL_OBJ: + return checkTransform(source, cast); + } + + break; + case BYTE: + switch (to.sort) { + case SHORT: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + return cast; + case CHAR: + if (explicit) + return cast; + + break; + case OBJECT: + case NUMBER: + case BYTE_OBJ: + case SHORT_OBJ: + case INT_OBJ: + case LONG_OBJ: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case CHAR_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case SHORT: + switch (to.sort) { + case INT: + case LONG: + case FLOAT: + case DOUBLE: + return cast; + case BYTE: + case CHAR: + if (explicit) + return cast; + + break; + case OBJECT: + case NUMBER: + case SHORT_OBJ: + case INT_OBJ: + case LONG_OBJ: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE_OBJ: + case CHAR_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case CHAR: + switch (to.sort) { + case INT: + case LONG: + case FLOAT: + case DOUBLE: + return cast; + case BYTE: + case SHORT: + if (explicit) + return cast; + + break; + case OBJECT: + case NUMBER: + case CHAR_OBJ: + case INT_OBJ: + case LONG_OBJ: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE_OBJ: + case SHORT_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case INT: + switch (to.sort) { + case LONG: + case FLOAT: + case DOUBLE: + return cast; + case BYTE: + case SHORT: + case CHAR: + if (explicit) + return cast; + + break; + case OBJECT: + case NUMBER: + case INT_OBJ: + case LONG_OBJ: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE_OBJ: + case SHORT_OBJ: + case CHAR_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case LONG: + switch (to.sort) { + case FLOAT: + case DOUBLE: + return cast; + case BYTE: + case SHORT: + case CHAR: + case INT: + if (explicit) + return cast; + + break; + case OBJECT: + case NUMBER: + case LONG_OBJ: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE_OBJ: + case SHORT_OBJ: + case CHAR_OBJ: + case INT_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case FLOAT: + switch (to.sort) { + case DOUBLE: + return cast; + case BYTE: + case SHORT: + case CHAR: + case INT: + case LONG: + if (explicit) + return cast; + + break; + case OBJECT: + case NUMBER: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE_OBJ: + case SHORT_OBJ: + case CHAR_OBJ: + case INT_OBJ: + case LONG_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case DOUBLE: + switch (to.sort) { + case BYTE: + case SHORT: + case CHAR: + case INT: + case LONG: + case FLOAT: + if (explicit) + return cast; + + break; + case OBJECT: + case NUMBER: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE_OBJ: + case SHORT_OBJ: + case CHAR_OBJ: + case INT_OBJ: + case LONG_OBJ: + case FLOAT_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case OBJECT: + case NUMBER: + switch (to.sort) { + case BYTE: + case SHORT: + case CHAR: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case BOOL_OBJ: + switch (to.sort) { + case BOOL: + return checkTransform(source, cast); + } + + break; + case BYTE_OBJ: + switch (to.sort) { + case BYTE: + case SHORT: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + case SHORT_OBJ: + case INT_OBJ: + case LONG_OBJ: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case CHAR: + case CHAR_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case SHORT_OBJ: + switch (to.sort) { + case SHORT: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + case INT_OBJ: + case LONG_OBJ: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE: + case CHAR: + case BYTE_OBJ: + case CHAR_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case CHAR_OBJ: + switch (to.sort) { + case CHAR: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + case INT_OBJ: + case LONG_OBJ: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE: + case SHORT: + case BYTE_OBJ: + case SHORT_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case INT_OBJ: + switch (to.sort) { + case INT: + case LONG: + case FLOAT: + case DOUBLE: + case LONG_OBJ: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE: + case SHORT: + case CHAR: + case BYTE_OBJ: + case SHORT_OBJ: + case CHAR_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case LONG_OBJ: + switch (to.sort) { + case LONG: + case FLOAT: + case DOUBLE: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE: + case SHORT: + case CHAR: + case INT: + case BYTE_OBJ: + case SHORT_OBJ: + case CHAR_OBJ: + case INT_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case FLOAT_OBJ: + switch (to.sort) { + case FLOAT: + case DOUBLE: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE: + case SHORT: + case CHAR: + case INT: + case LONG: + case BYTE_OBJ: + case SHORT_OBJ: + case CHAR_OBJ: + case INT_OBJ: + case LONG_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case DOUBLE_OBJ: + switch (to.sort) { + case DOUBLE: + return checkTransform(source, cast); + case BYTE: + case SHORT: + case CHAR: + case INT: + case LONG: + case FLOAT: + case BYTE_OBJ: + case SHORT_OBJ: + case CHAR_OBJ: + case INT_OBJ: + case LONG_OBJ: + case FLOAT_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + } + + try { + from.clazz.asSubclass(to.clazz); + + return cast; + } catch (final ClassCastException cce0) { + try { + if (explicit) { + to.clazz.asSubclass(from.clazz); + + return cast; + } else { + throw new ClassCastException( + AnalyzerUtility.error(source) + "Cannot cast from [" + from.name + "] to [" + to.name + "]."); + } + } catch (final ClassCastException cce1) { + throw new ClassCastException( + AnalyzerUtility.error(source) + "Cannot cast from [" + from.name + "] to [" + to.name + "]."); + } + } + } + + private Transform checkTransform(final ParserRuleContext source, final Cast cast) { + final Transform transform = definition.transforms.get(cast); + + if (transform == null) { + throw new ClassCastException( + AnalyzerUtility.error(source) + "Cannot cast from [" + cast.from.name + "] to [" + cast.to.name + "]."); + } + + return transform; + } + + private Object constCast(final ParserRuleContext source, final Object constant, final Cast cast) { + if (cast instanceof Transform) { + final Transform transform = (Transform)cast; + return invokeTransform(source, transform, constant); + } else { + final Sort fsort = cast.from.sort; + final Sort tsort = cast.to.sort; + + if (fsort == tsort) { + return constant; + } else if (fsort.numeric && tsort.numeric) { + Number number; + + if (fsort == Sort.CHAR) { + number = (int)(char)constant; + } else { + number = (Number)constant; + } + + switch (tsort) { + case BYTE: return number.byteValue(); + case SHORT: return number.shortValue(); + case CHAR: return (char)number.intValue(); + case INT: return number.intValue(); + case LONG: return number.longValue(); + case FLOAT: return number.floatValue(); + case DOUBLE: return number.doubleValue(); + default: + throw new IllegalStateException(AnalyzerUtility.error(source) + "Expected numeric type for cast."); + } + } else { + throw new IllegalStateException(AnalyzerUtility.error(source) + "No valid constant cast from " + + "[" + cast.from.clazz.getCanonicalName() + "] to " + + "[" + cast.to.clazz.getCanonicalName() + "]."); + } + } + } + + private Object invokeTransform(final ParserRuleContext source, final Transform transform, final Object object) { + final Method method = transform.method; + final java.lang.reflect.Method jmethod = method.reflect; + final int modifiers = jmethod.getModifiers(); + + try { + if (java.lang.reflect.Modifier.isStatic(modifiers)) { + return jmethod.invoke(null, object); + } else { + return jmethod.invoke(object); + } + } catch (IllegalAccessException | IllegalArgumentException | + java.lang.reflect.InvocationTargetException | NullPointerException | + ExceptionInInitializerError exception) { + throw new IllegalStateException(AnalyzerUtility.error(source) + "Unable to invoke transform to cast constant from " + + "[" + transform.from.name + "] to [" + transform.to.name + "]."); + } + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerExpression.java new file mode 100644 index 000000000000..3e74259fecfa --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerExpression.java @@ -0,0 +1,868 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +import org.elasticsearch.painless.Definition.Sort; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Metadata.ExpressionMetadata; +import org.elasticsearch.painless.Metadata.ExternalMetadata; +import org.elasticsearch.painless.PainlessParser.AssignmentContext; +import org.elasticsearch.painless.PainlessParser.BinaryContext; +import org.elasticsearch.painless.PainlessParser.BoolContext; +import org.elasticsearch.painless.PainlessParser.CastContext; +import org.elasticsearch.painless.PainlessParser.CharContext; +import org.elasticsearch.painless.PainlessParser.CompContext; +import org.elasticsearch.painless.PainlessParser.ConditionalContext; +import org.elasticsearch.painless.PainlessParser.DecltypeContext; +import org.elasticsearch.painless.PainlessParser.ExpressionContext; +import org.elasticsearch.painless.PainlessParser.ExternalContext; +import org.elasticsearch.painless.PainlessParser.ExtstartContext; +import org.elasticsearch.painless.PainlessParser.FalseContext; +import org.elasticsearch.painless.PainlessParser.IncrementContext; +import org.elasticsearch.painless.PainlessParser.NullContext; +import org.elasticsearch.painless.PainlessParser.NumericContext; +import org.elasticsearch.painless.PainlessParser.PostincContext; +import org.elasticsearch.painless.PainlessParser.PreincContext; +import org.elasticsearch.painless.PainlessParser.TrueContext; +import org.elasticsearch.painless.PainlessParser.UnaryContext; + +import static org.elasticsearch.painless.PainlessParser.ADD; +import static org.elasticsearch.painless.PainlessParser.BWAND; +import static org.elasticsearch.painless.PainlessParser.BWOR; +import static org.elasticsearch.painless.PainlessParser.BWXOR; +import static org.elasticsearch.painless.PainlessParser.DIV; +import static org.elasticsearch.painless.PainlessParser.LSH; +import static org.elasticsearch.painless.PainlessParser.MUL; +import static org.elasticsearch.painless.PainlessParser.REM; +import static org.elasticsearch.painless.PainlessParser.RSH; +import static org.elasticsearch.painless.PainlessParser.SUB; +import static org.elasticsearch.painless.PainlessParser.USH; + +class AnalyzerExpression { + private final Metadata metadata; + private final Definition definition; + private final CompilerSettings settings; + + private final Analyzer analyzer; + private final AnalyzerCaster caster; + private final AnalyzerPromoter promoter; + + AnalyzerExpression(final Metadata metadata, final Analyzer analyzer, + final AnalyzerCaster caster, final AnalyzerPromoter promoter) { + this.metadata = metadata; + this.definition = metadata.definition; + this.settings = metadata.settings; + + this.analyzer = analyzer; + this.caster = caster; + this.promoter = promoter; + } + + void processNumeric(final NumericContext ctx) { + final ExpressionMetadata numericemd = metadata.getExpressionMetadata(ctx); + final boolean negate = ctx.parent instanceof UnaryContext && ((UnaryContext)ctx.parent).SUB() != null; + + if (ctx.DECIMAL() != null) { + final String svalue = (negate ? "-" : "") + ctx.DECIMAL().getText(); + + if (svalue.endsWith("f") || svalue.endsWith("F")) { + try { + numericemd.from = definition.floatType; + numericemd.preConst = Float.parseFloat(svalue.substring(0, svalue.length() - 1)); + } catch (NumberFormatException exception) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Invalid float constant [" + svalue + "]."); + } + } else { + try { + numericemd.from = definition.doubleType; + numericemd.preConst = Double.parseDouble(svalue); + } catch (NumberFormatException exception) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Invalid double constant [" + svalue + "]."); + } + } + } else { + String svalue = negate ? "-" : ""; + int radix; + + if (ctx.OCTAL() != null) { + svalue += ctx.OCTAL().getText(); + radix = 8; + } else if (ctx.INTEGER() != null) { + svalue += ctx.INTEGER().getText(); + radix = 10; + } else if (ctx.HEX() != null) { + svalue += ctx.HEX().getText(); + radix = 16; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + + if (svalue.endsWith("d") || svalue.endsWith("D")) { + try { + numericemd.from = definition.doubleType; + numericemd.preConst = Double.parseDouble(svalue.substring(0, svalue.length() - 1)); + } catch (NumberFormatException exception) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Invalid float constant [" + svalue + "]."); + } + } else if (svalue.endsWith("f") || svalue.endsWith("F")) { + try { + numericemd.from = definition.floatType; + numericemd.preConst = Float.parseFloat(svalue.substring(0, svalue.length() - 1)); + } catch (NumberFormatException exception) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Invalid float constant [" + svalue + "]."); + } + } else if (svalue.endsWith("l") || svalue.endsWith("L")) { + try { + numericemd.from = definition.longType; + numericemd.preConst = Long.parseLong(svalue.substring(0, svalue.length() - 1), radix); + } catch (NumberFormatException exception) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Invalid long constant [" + svalue + "]."); + } + } else { + try { + final Type type = numericemd.to; + final Sort sort = type == null ? Sort.INT : type.sort; + final int value = Integer.parseInt(svalue, radix); + + if (sort == Sort.BYTE && value >= Byte.MIN_VALUE && value <= Byte.MAX_VALUE) { + numericemd.from = definition.byteType; + numericemd.preConst = (byte)value; + } else if (sort == Sort.CHAR && value >= Character.MIN_VALUE && value <= Character.MAX_VALUE) { + numericemd.from = definition.charType; + numericemd.preConst = (char)value; + } else if (sort == Sort.SHORT && value >= Short.MIN_VALUE && value <= Short.MAX_VALUE) { + numericemd.from = definition.shortType; + numericemd.preConst = (short)value; + } else { + numericemd.from = definition.intType; + numericemd.preConst = value; + } + } catch (NumberFormatException exception) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Invalid int constant [" + svalue + "]."); + } + } + } + } + + void processChar(final CharContext ctx) { + final ExpressionMetadata charemd = metadata.getExpressionMetadata(ctx); + + if (ctx.CHAR() == null) { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + + charemd.preConst = ctx.CHAR().getText().charAt(0); + charemd.from = definition.charType; + } + + void processTrue(final TrueContext ctx) { + final ExpressionMetadata trueemd = metadata.getExpressionMetadata(ctx); + + if (ctx.TRUE() == null) { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + + trueemd.preConst = true; + trueemd.from = definition.booleanType; + } + + void processFalse(final FalseContext ctx) { + final ExpressionMetadata falseemd = metadata.getExpressionMetadata(ctx); + + if (ctx.FALSE() == null) { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + + falseemd.preConst = false; + falseemd.from = definition.booleanType; + } + + void processNull(final NullContext ctx) { + final ExpressionMetadata nullemd = metadata.getExpressionMetadata(ctx); + + if (ctx.NULL() == null) { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + + nullemd.isNull = true; + + if (nullemd.to != null) { + if (nullemd.to.sort.primitive) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + + "Cannot cast null to a primitive type [" + nullemd.to.name + "]."); + } + + nullemd.from = nullemd.to; + } else { + nullemd.from = definition.objectType; + } + } + + void processExternal(final ExternalContext ctx) { + final ExpressionMetadata extemd = metadata.getExpressionMetadata(ctx); + + final ExtstartContext extstartctx = ctx.extstart(); + final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); + extstartemd.read = extemd.read; + analyzer.visit(extstartctx); + + extemd.statement = extstartemd.statement; + extemd.preConst = extstartemd.constant; + extemd.from = extstartemd.current; + extemd.typesafe = extstartemd.current.sort != Sort.DEF; + } + + void processPostinc(final PostincContext ctx) { + final ExpressionMetadata postincemd = metadata.getExpressionMetadata(ctx); + + final ExtstartContext extstartctx = ctx.extstart(); + final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); + extstartemd.read = postincemd.read; + extstartemd.storeExpr = ctx.increment(); + extstartemd.token = ADD; + extstartemd.post = true; + analyzer.visit(extstartctx); + + postincemd.statement = true; + postincemd.from = extstartemd.read ? extstartemd.current : definition.voidType; + postincemd.typesafe = extstartemd.current.sort != Sort.DEF; + } + + void processPreinc(final PreincContext ctx) { + final ExpressionMetadata preincemd = metadata.getExpressionMetadata(ctx); + + final ExtstartContext extstartctx = ctx.extstart(); + final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); + extstartemd.read = preincemd.read; + extstartemd.storeExpr = ctx.increment(); + extstartemd.token = ADD; + extstartemd.pre = true; + analyzer.visit(extstartctx); + + preincemd.statement = true; + preincemd.from = extstartemd.read ? extstartemd.current : definition.voidType; + preincemd.typesafe = extstartemd.current.sort != Sort.DEF; + } + + void processUnary(final UnaryContext ctx) { + final ExpressionMetadata unaryemd = metadata.getExpressionMetadata(ctx); + + final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + + if (ctx.BOOLNOT() != null) { + expremd.to = definition.booleanType; + analyzer.visit(exprctx); + caster.markCast(expremd); + + if (expremd.postConst != null) { + unaryemd.preConst = !(boolean)expremd.postConst; + } + + unaryemd.from = definition.booleanType; + } else if (ctx.BWNOT() != null || ctx.ADD() != null || ctx.SUB() != null) { + analyzer.visit(exprctx); + + final Type promote = promoter.promoteNumeric(expremd.from, ctx.BWNOT() == null, true); + + if (promote == null) { + throw new ClassCastException(AnalyzerUtility.error(ctx) + "Cannot apply [" + ctx.getChild(0).getText() + "] " + + "operation to type [" + expremd.from.name + "]."); + } + + expremd.to = promote; + caster.markCast(expremd); + + if (expremd.postConst != null) { + final Sort sort = promote.sort; + + if (ctx.BWNOT() != null) { + if (sort == Sort.INT) { + unaryemd.preConst = ~(int)expremd.postConst; + } else if (sort == Sort.LONG) { + unaryemd.preConst = ~(long)expremd.postConst; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } else if (ctx.SUB() != null) { + if (exprctx instanceof NumericContext) { + unaryemd.preConst = expremd.postConst; + } else { + if (sort == Sort.INT) { + if (settings.getNumericOverflow()) { + unaryemd.preConst = -(int)expremd.postConst; + } else { + unaryemd.preConst = Math.negateExact((int)expremd.postConst); + } + } else if (sort == Sort.LONG) { + if (settings.getNumericOverflow()) { + unaryemd.preConst = -(long)expremd.postConst; + } else { + unaryemd.preConst = Math.negateExact((long)expremd.postConst); + } + } else if (sort == Sort.FLOAT) { + unaryemd.preConst = -(float)expremd.postConst; + } else if (sort == Sort.DOUBLE) { + unaryemd.preConst = -(double)expremd.postConst; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } + } else if (ctx.ADD() != null) { + if (sort == Sort.INT) { + unaryemd.preConst = +(int)expremd.postConst; + } else if (sort == Sort.LONG) { + unaryemd.preConst = +(long)expremd.postConst; + } else if (sort == Sort.FLOAT) { + unaryemd.preConst = +(float)expremd.postConst; + } else if (sort == Sort.DOUBLE) { + unaryemd.preConst = +(double)expremd.postConst; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } + + unaryemd.from = promote; + unaryemd.typesafe = expremd.typesafe; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } + + void processCast(final CastContext ctx) { + final ExpressionMetadata castemd = metadata.getExpressionMetadata(ctx); + + final DecltypeContext decltypectx = ctx.decltype(); + final ExpressionMetadata decltypemd = metadata.createExpressionMetadata(decltypectx); + analyzer.visit(decltypectx); + + final Type type = decltypemd.from; + castemd.from = type; + + final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + expremd.to = type; + expremd.explicit = true; + analyzer.visit(exprctx); + caster.markCast(expremd); + + if (expremd.postConst != null) { + castemd.preConst = expremd.postConst; + } + + castemd.typesafe = expremd.typesafe && castemd.from.sort != Sort.DEF; + } + + void processBinary(final BinaryContext ctx) { + final ExpressionMetadata binaryemd = metadata.getExpressionMetadata(ctx); + + final ExpressionContext exprctx0 = AnalyzerUtility.updateExpressionTree(ctx.expression(0)); + final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); + analyzer.visit(exprctx0); + + final ExpressionContext exprctx1 = AnalyzerUtility.updateExpressionTree(ctx.expression(1)); + final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); + analyzer.visit(exprctx1); + + final boolean decimal = ctx.MUL() != null || ctx.DIV() != null || ctx.REM() != null || ctx.SUB() != null; + final boolean add = ctx.ADD() != null; + final boolean xor = ctx.BWXOR() != null; + final Type promote = add ? promoter.promoteAdd(expremd0.from, expremd1.from) : + xor ? promoter.promoteXor(expremd0.from, expremd1.from) : + promoter.promoteNumeric(expremd0.from, expremd1.from, decimal, true); + + if (promote == null) { + throw new ClassCastException(AnalyzerUtility.error(ctx) + "Cannot apply [" + ctx.getChild(1).getText() + "] " + + "operation to types [" + expremd0.from.name + "] and [" + expremd1.from.name + "]."); + } + + final Sort sort = promote.sort; + expremd0.to = add && sort == Sort.STRING ? expremd0.from : promote; + expremd1.to = add && sort == Sort.STRING ? expremd1.from : promote; + caster.markCast(expremd0); + caster.markCast(expremd1); + + if (expremd0.postConst != null && expremd1.postConst != null) { + if (ctx.MUL() != null) { + if (sort == Sort.INT) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (int)expremd0.postConst * (int)expremd1.postConst; + } else { + binaryemd.preConst = Math.multiplyExact((int)expremd0.postConst, (int)expremd1.postConst); + } + } else if (sort == Sort.LONG) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (long)expremd0.postConst * (long)expremd1.postConst; + } else { + binaryemd.preConst = Math.multiplyExact((long)expremd0.postConst, (long)expremd1.postConst); + } + } else if (sort == Sort.FLOAT) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (float)expremd0.postConst * (float)expremd1.postConst; + } else { + binaryemd.preConst = Utility.multiplyWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst); + } + } else if (sort == Sort.DOUBLE) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (double)expremd0.postConst * (double)expremd1.postConst; + } else { + binaryemd.preConst = Utility.multiplyWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); + } + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } else if (ctx.DIV() != null) { + if (sort == Sort.INT) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (int)expremd0.postConst / (int)expremd1.postConst; + } else { + binaryemd.preConst = Utility.divideWithoutOverflow((int)expremd0.postConst, (int)expremd1.postConst); + } + } else if (sort == Sort.LONG) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (long)expremd0.postConst / (long)expremd1.postConst; + } else { + binaryemd.preConst = Utility.divideWithoutOverflow((long)expremd0.postConst, (long)expremd1.postConst); + } + } else if (sort == Sort.FLOAT) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (float)expremd0.postConst / (float)expremd1.postConst; + } else { + binaryemd.preConst = Utility.divideWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst); + } + } else if (sort == Sort.DOUBLE) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (double)expremd0.postConst / (double)expremd1.postConst; + } else { + binaryemd.preConst = Utility.divideWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); + } + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } else if (ctx.REM() != null) { + if (sort == Sort.INT) { + binaryemd.preConst = (int)expremd0.postConst % (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + binaryemd.preConst = (long)expremd0.postConst % (long)expremd1.postConst; + } else if (sort == Sort.FLOAT) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (float)expremd0.postConst % (float)expremd1.postConst; + } else { + binaryemd.preConst = Utility.remainderWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst); + } + } else if (sort == Sort.DOUBLE) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (double)expremd0.postConst % (double)expremd1.postConst; + } else { + binaryemd.preConst = Utility.remainderWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); + } + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } else if (ctx.ADD() != null) { + if (sort == Sort.INT) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (int)expremd0.postConst + (int)expremd1.postConst; + } else { + binaryemd.preConst = Math.addExact((int)expremd0.postConst, (int)expremd1.postConst); + } + } else if (sort == Sort.LONG) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (long)expremd0.postConst + (long)expremd1.postConst; + } else { + binaryemd.preConst = Math.addExact((long)expremd0.postConst, (long)expremd1.postConst); + } + } else if (sort == Sort.FLOAT) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (float)expremd0.postConst + (float)expremd1.postConst; + } else { + binaryemd.preConst = Utility.addWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst); + } + } else if (sort == Sort.DOUBLE) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (double)expremd0.postConst + (double)expremd1.postConst; + } else { + binaryemd.preConst = Utility.addWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); + } + } else if (sort == Sort.STRING) { + binaryemd.preConst = "" + expremd0.postConst + expremd1.postConst; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } else if (ctx.SUB() != null) { + if (sort == Sort.INT) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (int)expremd0.postConst - (int)expremd1.postConst; + } else { + binaryemd.preConst = Math.subtractExact((int)expremd0.postConst, (int)expremd1.postConst); + } + } else if (sort == Sort.LONG) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (long)expremd0.postConst - (long)expremd1.postConst; + } else { + binaryemd.preConst = Math.subtractExact((long)expremd0.postConst, (long)expremd1.postConst); + } + } else if (sort == Sort.FLOAT) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (float)expremd0.postConst - (float)expremd1.postConst; + } else { + binaryemd.preConst = Utility.subtractWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst); + } + } else if (sort == Sort.DOUBLE) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (double)expremd0.postConst - (double)expremd1.postConst; + } else { + binaryemd.preConst = Utility.subtractWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); + } + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } else if (ctx.LSH() != null) { + if (sort == Sort.INT) { + binaryemd.preConst = (int)expremd0.postConst << (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + binaryemd.preConst = (long)expremd0.postConst << (long)expremd1.postConst; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } else if (ctx.RSH() != null) { + if (sort == Sort.INT) { + binaryemd.preConst = (int)expremd0.postConst >> (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + binaryemd.preConst = (long)expremd0.postConst >> (long)expremd1.postConst; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } else if (ctx.USH() != null) { + if (sort == Sort.INT) { + binaryemd.preConst = (int)expremd0.postConst >>> (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + binaryemd.preConst = (long)expremd0.postConst >>> (long)expremd1.postConst; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } else if (ctx.BWAND() != null) { + if (sort == Sort.INT) { + binaryemd.preConst = (int)expremd0.postConst & (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + binaryemd.preConst = (long)expremd0.postConst & (long)expremd1.postConst; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } else if (ctx.BWXOR() != null) { + if (sort == Sort.BOOL) { + binaryemd.preConst = (boolean)expremd0.postConst ^ (boolean)expremd1.postConst; + } else if (sort == Sort.INT) { + binaryemd.preConst = (int)expremd0.postConst ^ (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + binaryemd.preConst = (long)expremd0.postConst ^ (long)expremd1.postConst; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } else if (ctx.BWOR() != null) { + if (sort == Sort.INT) { + binaryemd.preConst = (int)expremd0.postConst | (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + binaryemd.preConst = (long)expremd0.postConst | (long)expremd1.postConst; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } + + binaryemd.from = promote; + binaryemd.typesafe = expremd0.typesafe && expremd1.typesafe; + } + + void processComp(final CompContext ctx) { + final ExpressionMetadata compemd = metadata.getExpressionMetadata(ctx); + final boolean equality = ctx.EQ() != null || ctx.NE() != null; + final boolean reference = ctx.EQR() != null || ctx.NER() != null; + + final ExpressionContext exprctx0 = AnalyzerUtility.updateExpressionTree(ctx.expression(0)); + final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); + analyzer.visit(exprctx0); + + final ExpressionContext exprctx1 = AnalyzerUtility.updateExpressionTree(ctx.expression(1)); + final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); + analyzer.visit(exprctx1); + + if (expremd0.isNull && expremd1.isNull) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unnecessary comparison of null constants."); + } + + final Type promote = equality ? promoter.promoteEquality(expremd0.from, expremd1.from) : + reference ? promoter.promoteReference(expremd0.from, expremd1.from) : + promoter.promoteNumeric(expremd0.from, expremd1.from, true, true); + + if (promote == null) { + throw new ClassCastException(AnalyzerUtility.error(ctx) + "Cannot apply [" + ctx.getChild(1).getText() + "] " + + "operation to types [" + expremd0.from.name + "] and [" + expremd1.from.name + "]."); + } + + expremd0.to = promote; + expremd1.to = promote; + caster.markCast(expremd0); + caster.markCast(expremd1); + + if (expremd0.postConst != null && expremd1.postConst != null) { + final Sort sort = promote.sort; + + if (ctx.EQ() != null || ctx.EQR() != null) { + if (sort == Sort.BOOL) { + compemd.preConst = (boolean)expremd0.postConst == (boolean)expremd1.postConst; + } else if (sort == Sort.INT) { + compemd.preConst = (int)expremd0.postConst == (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + compemd.preConst = (long)expremd0.postConst == (long)expremd1.postConst; + } else if (sort == Sort.FLOAT) { + compemd.preConst = (float)expremd0.postConst == (float)expremd1.postConst; + } else if (sort == Sort.DOUBLE) { + compemd.preConst = (double)expremd0.postConst == (double)expremd1.postConst; + } else { + if (ctx.EQ() != null && !expremd0.isNull && !expremd1.isNull) { + compemd.preConst = expremd0.postConst.equals(expremd1.postConst); + } else if (ctx.EQR() != null) { + compemd.preConst = expremd0.postConst == expremd1.postConst; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } + } else if (ctx.NE() != null || ctx.NER() != null) { + if (sort == Sort.BOOL) { + compemd.preConst = (boolean)expremd0.postConst != (boolean)expremd1.postConst; + } else if (sort == Sort.INT) { + compemd.preConst = (int)expremd0.postConst != (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + compemd.preConst = (long)expremd0.postConst != (long)expremd1.postConst; + } else if (sort == Sort.FLOAT) { + compemd.preConst = (float)expremd0.postConst != (float)expremd1.postConst; + } else if (sort == Sort.DOUBLE) { + compemd.preConst = (double)expremd0.postConst != (double)expremd1.postConst; + } else { + if (ctx.NE() != null && !expremd0.isNull && !expremd1.isNull) { + compemd.preConst = expremd0.postConst.equals(expremd1.postConst); + } else if (ctx.NER() != null) { + compemd.preConst = expremd0.postConst == expremd1.postConst; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } + } else if (ctx.GTE() != null) { + if (sort == Sort.INT) { + compemd.preConst = (int)expremd0.postConst >= (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + compemd.preConst = (long)expremd0.postConst >= (long)expremd1.postConst; + } else if (sort == Sort.FLOAT) { + compemd.preConst = (float)expremd0.postConst >= (float)expremd1.postConst; + } else if (sort == Sort.DOUBLE) { + compemd.preConst = (double)expremd0.postConst >= (double)expremd1.postConst; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } else if (ctx.GT() != null) { + if (sort == Sort.INT) { + compemd.preConst = (int)expremd0.postConst > (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + compemd.preConst = (long)expremd0.postConst > (long)expremd1.postConst; + } else if (sort == Sort.FLOAT) { + compemd.preConst = (float)expremd0.postConst > (float)expremd1.postConst; + } else if (sort == Sort.DOUBLE) { + compemd.preConst = (double)expremd0.postConst > (double)expremd1.postConst; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } else if (ctx.LTE() != null) { + if (sort == Sort.INT) { + compemd.preConst = (int)expremd0.postConst <= (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + compemd.preConst = (long)expremd0.postConst <= (long)expremd1.postConst; + } else if (sort == Sort.FLOAT) { + compemd.preConst = (float)expremd0.postConst <= (float)expremd1.postConst; + } else if (sort == Sort.DOUBLE) { + compemd.preConst = (double)expremd0.postConst <= (double)expremd1.postConst; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } else if (ctx.LT() != null) { + if (sort == Sort.INT) { + compemd.preConst = (int)expremd0.postConst < (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + compemd.preConst = (long)expremd0.postConst < (long)expremd1.postConst; + } else if (sort == Sort.FLOAT) { + compemd.preConst = (float)expremd0.postConst < (float)expremd1.postConst; + } else if (sort == Sort.DOUBLE) { + compemd.preConst = (double)expremd0.postConst < (double)expremd1.postConst; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } + + compemd.from = definition.booleanType; + compemd.typesafe = expremd0.typesafe && expremd1.typesafe; + } + + void processBool(final BoolContext ctx) { + final ExpressionMetadata boolemd = metadata.getExpressionMetadata(ctx); + + final ExpressionContext exprctx0 = AnalyzerUtility.updateExpressionTree(ctx.expression(0)); + final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); + expremd0.to = definition.booleanType; + analyzer.visit(exprctx0); + caster.markCast(expremd0); + + final ExpressionContext exprctx1 = AnalyzerUtility.updateExpressionTree(ctx.expression(1)); + final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); + expremd1.to = definition.booleanType; + analyzer.visit(exprctx1); + caster.markCast(expremd1); + + if (expremd0.postConst != null && expremd1.postConst != null) { + if (ctx.BOOLAND() != null) { + boolemd.preConst = (boolean)expremd0.postConst && (boolean)expremd1.postConst; + } else if (ctx.BOOLOR() != null) { + boolemd.preConst = (boolean)expremd0.postConst || (boolean)expremd1.postConst; + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } + + boolemd.from = definition.booleanType; + boolemd.typesafe = expremd0.typesafe && expremd1.typesafe; + } + + void processConditional(final ConditionalContext ctx) { + final ExpressionMetadata condemd = metadata.getExpressionMetadata(ctx); + + final ExpressionContext exprctx0 = AnalyzerUtility.updateExpressionTree(ctx.expression(0)); + final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); + expremd0.to = definition.booleanType; + analyzer.visit(exprctx0); + caster.markCast(expremd0); + + if (expremd0.postConst != null) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unnecessary conditional statement."); + } + + final ExpressionContext exprctx1 = AnalyzerUtility.updateExpressionTree(ctx.expression(1)); + final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); + expremd1.to = condemd.to; + expremd1.explicit = condemd.explicit; + analyzer.visit(exprctx1); + + final ExpressionContext exprctx2 = AnalyzerUtility.updateExpressionTree(ctx.expression(2)); + final ExpressionMetadata expremd2 = metadata.createExpressionMetadata(exprctx2); + expremd2.to = condemd.to; + expremd2.explicit = condemd.explicit; + analyzer.visit(exprctx2); + + if (condemd.to == null) { + final Type promote = promoter.promoteConditional(expremd1.from, expremd2.from, expremd1.preConst, expremd2.preConst); + + expremd1.to = promote; + expremd2.to = promote; + condemd.from = promote; + } else { + condemd.from = condemd.to; + } + + caster.markCast(expremd1); + caster.markCast(expremd2); + + condemd.typesafe = expremd0.typesafe && expremd1.typesafe; + } + + void processAssignment(final AssignmentContext ctx) { + final ExpressionMetadata assignemd = metadata.getExpressionMetadata(ctx); + + final ExtstartContext extstartctx = ctx.extstart(); + final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); + + extstartemd.read = assignemd.read; + extstartemd.storeExpr = AnalyzerUtility.updateExpressionTree(ctx.expression()); + + if (ctx.AMUL() != null) { + extstartemd.token = MUL; + } else if (ctx.ADIV() != null) { + extstartemd.token = DIV; + } else if (ctx.AREM() != null) { + extstartemd.token = REM; + } else if (ctx.AADD() != null) { + extstartemd.token = ADD; + } else if (ctx.ASUB() != null) { + extstartemd.token = SUB; + } else if (ctx.ALSH() != null) { + extstartemd.token = LSH; + } else if (ctx.AUSH() != null) { + extstartemd.token = USH; + } else if (ctx.ARSH() != null) { + extstartemd.token = RSH; + } else if (ctx.AAND() != null) { + extstartemd.token = BWAND; + } else if (ctx.AXOR() != null) { + extstartemd.token = BWXOR; + } else if (ctx.AOR() != null) { + extstartemd.token = BWOR; + } + + analyzer.visit(extstartctx); + + assignemd.statement = true; + assignemd.from = extstartemd.read ? extstartemd.current : definition.voidType; + assignemd.typesafe = extstartemd.current.sort != Sort.DEF; + } + + void processIncrement(final IncrementContext ctx) { + final ExpressionMetadata incremd = metadata.getExpressionMetadata(ctx); + final Sort sort = incremd.to == null ? null : incremd.to.sort; + final boolean positive = ctx.INCR() != null; + + if (incremd.to == null) { + incremd.preConst = positive ? 1 : -1; + incremd.from = definition.intType; + } else { + switch (sort) { + case LONG: + incremd.preConst = positive ? 1L : -1L; + incremd.from = definition.longType; + break; + case FLOAT: + incremd.preConst = positive ? 1.0F : -1.0F; + incremd.from = definition.floatType; + break; + case DOUBLE: + incremd.preConst = positive ? 1.0 : -1.0; + incremd.from = definition.doubleType; + break; + default: + incremd.preConst = positive ? 1 : -1; + incremd.from = definition.intType; + } + } + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerExternal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerExternal.java new file mode 100644 index 000000000000..db3ab06e7853 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerExternal.java @@ -0,0 +1,816 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +import org.antlr.v4.runtime.ParserRuleContext; +import org.elasticsearch.painless.AnalyzerUtility.Variable; +import org.elasticsearch.painless.Definition.Constructor; +import org.elasticsearch.painless.Definition.Field; +import org.elasticsearch.painless.Definition.Method; +import org.elasticsearch.painless.Definition.Sort; +import org.elasticsearch.painless.Definition.Struct; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Metadata.ExpressionMetadata; +import org.elasticsearch.painless.Metadata.ExtNodeMetadata; +import org.elasticsearch.painless.Metadata.ExternalMetadata; +import org.elasticsearch.painless.PainlessParser.DecltypeContext; +import org.elasticsearch.painless.PainlessParser.ExpressionContext; +import org.elasticsearch.painless.PainlessParser.ExtbraceContext; +import org.elasticsearch.painless.PainlessParser.ExtcallContext; +import org.elasticsearch.painless.PainlessParser.ExtcastContext; +import org.elasticsearch.painless.PainlessParser.ExtdotContext; +import org.elasticsearch.painless.PainlessParser.ExtfieldContext; +import org.elasticsearch.painless.PainlessParser.ExtnewContext; +import org.elasticsearch.painless.PainlessParser.ExtprecContext; +import org.elasticsearch.painless.PainlessParser.ExtstartContext; +import org.elasticsearch.painless.PainlessParser.ExtstringContext; +import org.elasticsearch.painless.PainlessParser.ExttypeContext; +import org.elasticsearch.painless.PainlessParser.ExtvarContext; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.painless.PainlessParser.ADD; +import static org.elasticsearch.painless.PainlessParser.BWAND; +import static org.elasticsearch.painless.PainlessParser.BWOR; +import static org.elasticsearch.painless.PainlessParser.BWXOR; +import static org.elasticsearch.painless.PainlessParser.DIV; +import static org.elasticsearch.painless.PainlessParser.MUL; +import static org.elasticsearch.painless.PainlessParser.REM; +import static org.elasticsearch.painless.PainlessParser.SUB; + +class AnalyzerExternal { + private final Metadata metadata; + private final Definition definition; + + private final Analyzer analyzer; + private final AnalyzerUtility utility; + private final AnalyzerCaster caster; + private final AnalyzerPromoter promoter; + + AnalyzerExternal(final Metadata metadata, final Analyzer analyzer, final AnalyzerUtility utility, + final AnalyzerCaster caster, final AnalyzerPromoter promoter) { + this.metadata = metadata; + this.definition = metadata.definition; + + this.analyzer = analyzer; + this.utility = utility; + this.caster = caster; + this.promoter = promoter; + } + + void processExtstart(final ExtstartContext ctx) { + final ExtprecContext precctx = ctx.extprec(); + final ExtcastContext castctx = ctx.extcast(); + final ExttypeContext typectx = ctx.exttype(); + final ExtvarContext varctx = ctx.extvar(); + final ExtnewContext newctx = ctx.extnew(); + final ExtstringContext stringctx = ctx.extstring(); + + if (precctx != null) { + metadata.createExtNodeMetadata(ctx, precctx); + analyzer.visit(precctx); + } else if (castctx != null) { + metadata.createExtNodeMetadata(ctx, castctx); + analyzer.visit(castctx); + } else if (typectx != null) { + metadata.createExtNodeMetadata(ctx, typectx); + analyzer.visit(typectx); + } else if (varctx != null) { + metadata.createExtNodeMetadata(ctx, varctx); + analyzer.visit(varctx); + } else if (newctx != null) { + metadata.createExtNodeMetadata(ctx, newctx); + analyzer.visit(newctx); + } else if (stringctx != null) { + metadata.createExtNodeMetadata(ctx, stringctx); + analyzer.visit(stringctx); + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } + + void processExtprec(final ExtprecContext ctx) { + final ExtNodeMetadata precenmd = metadata.getExtNodeMetadata(ctx); + final ParserRuleContext parent = precenmd.parent; + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + + final ExtprecContext precctx = ctx.extprec(); + final ExtcastContext castctx = ctx.extcast(); + final ExttypeContext typectx = ctx.exttype(); + final ExtvarContext varctx = ctx.extvar(); + final ExtnewContext newctx = ctx.extnew(); + final ExtstringContext stringctx = ctx.extstring(); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + if (dotctx != null || bracectx != null) { + ++parentemd.scope; + } + + if (precctx != null) { + metadata.createExtNodeMetadata(parent, precctx); + analyzer.visit(precctx); + } else if (castctx != null) { + metadata.createExtNodeMetadata(parent, castctx); + analyzer.visit(castctx); + } else if (typectx != null) { + metadata.createExtNodeMetadata(parent, typectx); + analyzer.visit(typectx); + } else if (varctx != null) { + metadata.createExtNodeMetadata(parent, varctx); + analyzer.visit(varctx); + } else if (newctx != null) { + metadata.createExtNodeMetadata(parent, newctx); + analyzer.visit(newctx); + } else if (stringctx != null) { + metadata.createExtNodeMetadata(ctx, stringctx); + analyzer.visit(stringctx); + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + + parentemd.statement = false; + + if (dotctx != null) { + --parentemd.scope; + + metadata.createExtNodeMetadata(parent, dotctx); + analyzer.visit(dotctx); + } else if (bracectx != null) { + --parentemd.scope; + + metadata.createExtNodeMetadata(parent, bracectx); + analyzer.visit(bracectx); + } + } + + void processExtcast(final ExtcastContext ctx) { + final ExtNodeMetadata castenmd = metadata.getExtNodeMetadata(ctx); + final ParserRuleContext parent = castenmd.parent; + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + + final ExtprecContext precctx = ctx.extprec(); + final ExtcastContext castctx = ctx.extcast(); + final ExttypeContext typectx = ctx.exttype(); + final ExtvarContext varctx = ctx.extvar(); + final ExtnewContext newctx = ctx.extnew(); + final ExtstringContext stringctx = ctx.extstring(); + + if (precctx != null) { + metadata.createExtNodeMetadata(parent, precctx); + analyzer.visit(precctx); + } else if (castctx != null) { + metadata.createExtNodeMetadata(parent, castctx); + analyzer.visit(castctx); + } else if (typectx != null) { + metadata.createExtNodeMetadata(parent, typectx); + analyzer.visit(typectx); + } else if (varctx != null) { + metadata.createExtNodeMetadata(parent, varctx); + analyzer.visit(varctx); + } else if (newctx != null) { + metadata.createExtNodeMetadata(parent, newctx); + analyzer.visit(newctx); + } else if (stringctx != null) { + metadata.createExtNodeMetadata(ctx, stringctx); + analyzer.visit(stringctx); + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + + final DecltypeContext declctx = ctx.decltype(); + final ExpressionMetadata declemd = metadata.createExpressionMetadata(declctx); + analyzer.visit(declctx); + + castenmd.castTo = caster.getLegalCast(ctx, parentemd.current, declemd.from, true); + castenmd.type = declemd.from; + parentemd.current = declemd.from; + parentemd.statement = false; + } + + void processExtbrace(final ExtbraceContext ctx) { + final ExtNodeMetadata braceenmd = metadata.getExtNodeMetadata(ctx); + final ParserRuleContext parent = braceenmd.parent; + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + + final boolean array = parentemd.current.sort == Sort.ARRAY; + final boolean def = parentemd.current.sort == Sort.DEF; + boolean map = false; + boolean list = false; + + try { + parentemd.current.clazz.asSubclass(Map.class); + map = true; + } catch (final ClassCastException exception) { + // Do nothing. + } + + try { + parentemd.current.clazz.asSubclass(List.class); + list = true; + } catch (final ClassCastException exception) { + // Do nothing. + } + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + braceenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; + + final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + + if (array || def) { + expremd.to = array ? definition.intType : definition.objectType; + analyzer.visit(exprctx); + caster.markCast(expremd); + + braceenmd.target = "#brace"; + braceenmd.type = def ? definition.defType : + definition.getType(parentemd.current.struct, parentemd.current.type.getDimensions() - 1); + analyzeLoadStoreExternal(ctx); + parentemd.current = braceenmd.type; + + if (dotctx != null) { + metadata.createExtNodeMetadata(parent, dotctx); + analyzer.visit(dotctx); + } else if (bracectx != null) { + metadata.createExtNodeMetadata(parent, bracectx); + analyzer.visit(bracectx); + } + } else { + final boolean store = braceenmd.last && parentemd.storeExpr != null; + final boolean get = parentemd.read || parentemd.token > 0 || !braceenmd.last; + final boolean set = braceenmd.last && store; + + Method getter; + Method setter; + Type valuetype; + Type settype; + + if (map) { + getter = parentemd.current.struct.methods.get("get"); + setter = parentemd.current.struct.methods.get("put"); + + if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1)) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + + "Illegal map get shortcut for type [" + parentemd.current.name + "]."); + } + + if (setter != null && setter.arguments.size() != 2) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + + "Illegal map set shortcut for type [" + parentemd.current.name + "]."); + } + + if (getter != null && setter != null && (!getter.arguments.get(0).equals(setter.arguments.get(0)) + || !getter.rtn.equals(setter.arguments.get(1)))) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Shortcut argument types must match."); + } + + valuetype = setter != null ? setter.arguments.get(0) : getter != null ? getter.arguments.get(0) : null; + settype = setter == null ? null : setter.arguments.get(1); + } else if (list) { + getter = parentemd.current.struct.methods.get("get"); + setter = parentemd.current.struct.methods.get("set"); + + if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || + getter.arguments.get(0).sort != Sort.INT)) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + + "Illegal list get shortcut for type [" + parentemd.current.name + "]."); + } + + if (setter != null && (setter.arguments.size() != 2 || setter.arguments.get(0).sort != Sort.INT)) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + + "Illegal list set shortcut for type [" + parentemd.current.name + "]."); + } + + if (getter != null && setter != null && (!getter.arguments.get(0).equals(setter.arguments.get(0)) + || !getter.rtn.equals(setter.arguments.get(1)))) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Shortcut argument types must match."); + } + + valuetype = definition.intType; + settype = setter == null ? null : setter.arguments.get(1); + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + + if ((get || set) && (!get || getter != null) && (!set || setter != null)) { + expremd.to = valuetype; + analyzer.visit(exprctx); + caster.markCast(expremd); + + braceenmd.target = new Object[] {getter, setter, true, null}; + braceenmd.type = get ? getter.rtn : settype; + analyzeLoadStoreExternal(ctx); + parentemd.current = get ? getter.rtn : setter.rtn; + } + } + + if (braceenmd.target == null) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + + "Attempting to address a non-array type [" + parentemd.current.name + "] as an array."); + } + } + + void processExtdot(final ExtdotContext ctx) { + final ExtNodeMetadata dotemnd = metadata.getExtNodeMetadata(ctx); + final ParserRuleContext parent = dotemnd.parent; + + final ExtcallContext callctx = ctx.extcall(); + final ExtfieldContext fieldctx = ctx.extfield(); + + if (callctx != null) { + metadata.createExtNodeMetadata(parent, callctx); + analyzer.visit(callctx); + } else if (fieldctx != null) { + metadata.createExtNodeMetadata(parent, fieldctx); + analyzer.visit(fieldctx); + } + } + + void processExttype(final ExttypeContext ctx) { + final ExtNodeMetadata typeenmd = metadata.getExtNodeMetadata(ctx); + final ParserRuleContext parent = typeenmd.parent; + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + + if (parentemd.current != null) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unexpected static type."); + } + + final String typestr = ctx.TYPE().getText(); + typeenmd.type = definition.getType(typestr); + parentemd.current = typeenmd.type; + parentemd.statik = true; + + final ExtdotContext dotctx = ctx.extdot(); + metadata.createExtNodeMetadata(parent, dotctx); + analyzer.visit(dotctx); + } + + void processExtcall(final ExtcallContext ctx) { + final ExtNodeMetadata callenmd = metadata.getExtNodeMetadata(ctx); + final ParserRuleContext parent = callenmd.parent; + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + callenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; + + final String name = ctx.EXTID().getText(); + + if (parentemd.current.sort == Sort.ARRAY) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unexpected call [" + name + "] on an array."); + } else if (callenmd.last && parentemd.storeExpr != null) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Cannot assign a value to a call [" + name + "]."); + } + + final Struct struct = parentemd.current.struct; + final List arguments = ctx.arguments().expression(); + final int size = arguments.size(); + Type[] types; + + final Method method = parentemd.statik ? struct.functions.get(name) : struct.methods.get(name); + final boolean def = parentemd.current.sort == Sort.DEF; + + if (method == null && !def) { + throw new IllegalArgumentException( + AnalyzerUtility.error(ctx) + "Unknown call [" + name + "] on type [" + struct.name + "]."); + } else if (method != null) { + types = new Type[method.arguments.size()]; + method.arguments.toArray(types); + + callenmd.target = method; + callenmd.type = method.rtn; + parentemd.statement = !parentemd.read && callenmd.last; + parentemd.current = method.rtn; + + if (size != types.length) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "When calling [" + name + "] on type " + + "[" + struct.name + "] expected [" + types.length + "] arguments," + + " but found [" + arguments.size() + "]."); + } + } else { + types = new Type[arguments.size()]; + Arrays.fill(types, definition.defType); + + callenmd.target = name; + callenmd.type = definition.defType; + parentemd.statement = !parentemd.read && callenmd.last; + parentemd.current = callenmd.type; + } + + for (int argument = 0; argument < size; ++argument) { + final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(arguments.get(argument)); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + expremd.to = types[argument]; + analyzer.visit(exprctx); + caster.markCast(expremd); + } + + parentemd.statik = false; + + if (dotctx != null) { + metadata.createExtNodeMetadata(parent, dotctx); + analyzer.visit(dotctx); + } else if (bracectx != null) { + metadata.createExtNodeMetadata(parent, bracectx); + analyzer.visit(bracectx); + } + } + + void processExtvar(final ExtvarContext ctx) { + final ExtNodeMetadata varenmd = metadata.getExtNodeMetadata(ctx); + final ParserRuleContext parent = varenmd.parent; + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + + final String name = ctx.ID().getText(); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + if (parentemd.current != null) { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected variable [" + name + "] load."); + } + + varenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; + + final Variable variable = utility.getVariable(name); + + if (variable == null) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unknown variable [" + name + "]."); + } + + varenmd.target = variable.slot; + varenmd.type = variable.type; + analyzeLoadStoreExternal(ctx); + parentemd.current = varenmd.type; + + if (dotctx != null) { + metadata.createExtNodeMetadata(parent, dotctx); + analyzer.visit(dotctx); + } else if (bracectx != null) { + metadata.createExtNodeMetadata(parent, bracectx); + analyzer.visit(bracectx); + } + } + + void processExtfield(final ExtfieldContext ctx) { + final ExtNodeMetadata memberenmd = metadata.getExtNodeMetadata(ctx); + final ParserRuleContext parent = memberenmd.parent; + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + + if (ctx.EXTID() == null && ctx.EXTINTEGER() == null) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + + final String value = ctx.EXTID() == null ? ctx.EXTINTEGER().getText() : ctx.EXTID().getText(); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + memberenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; + final boolean store = memberenmd.last && parentemd.storeExpr != null; + + if (parentemd.current == null) { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected field [" + value + "] load."); + } + + if (parentemd.current.sort == Sort.ARRAY) { + if ("length".equals(value)) { + if (!parentemd.read) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Must read array field [length]."); + } else if (store) { + throw new IllegalArgumentException( + AnalyzerUtility.error(ctx) + "Cannot write to read-only array field [length]."); + } + + memberenmd.target = "#length"; + memberenmd.type = definition.intType; + parentemd.current = definition.intType; + } else { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unexpected array field [" + value + "]."); + } + } else if (parentemd.current.sort == Sort.DEF) { + memberenmd.target = value; + memberenmd.type = definition.defType; + analyzeLoadStoreExternal(ctx); + parentemd.current = memberenmd.type; + } else { + final Struct struct = parentemd.current.struct; + final Field field = parentemd.statik ? struct.statics.get(value) : struct.members.get(value); + + if (field != null) { + if (store && java.lang.reflect.Modifier.isFinal(field.reflect.getModifiers())) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Cannot write to read-only" + + " field [" + value + "] for type [" + struct.name + "]."); + } + + memberenmd.target = field; + memberenmd.type = field.type; + analyzeLoadStoreExternal(ctx); + parentemd.current = memberenmd.type; + } else { + final boolean get = parentemd.read || parentemd.token > 0 || !memberenmd.last; + final boolean set = memberenmd.last && store; + + Method getter = struct.methods.get("get" + Character.toUpperCase(value.charAt(0)) + value.substring(1)); + Method setter = struct.methods.get("set" + Character.toUpperCase(value.charAt(0)) + value.substring(1)); + Object constant = null; + + if (getter != null && (getter.rtn.sort == Sort.VOID || !getter.arguments.isEmpty())) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + + "Illegal get shortcut on field [" + value + "] for type [" + struct.name + "]."); + } + + if (setter != null && (setter.rtn.sort != Sort.VOID || setter.arguments.size() != 1)) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + + "Illegal set shortcut on field [" + value + "] for type [" + struct.name + "]."); + } + + Type settype = setter == null ? null : setter.arguments.get(0); + + if (getter == null && setter == null) { + if (ctx.EXTID() != null) { + try { + parentemd.current.clazz.asSubclass(Map.class); + + getter = parentemd.current.struct.methods.get("get"); + setter = parentemd.current.struct.methods.get("put"); + + if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || + getter.arguments.get(0).sort != Sort.STRING)) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + + "Illegal map get shortcut [" + value + "] for type [" + struct.name + "]."); + } + + if (setter != null && (setter.arguments.size() != 2 || + setter.arguments.get(0).sort != Sort.STRING)) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + + "Illegal map set shortcut [" + value + "] for type [" + struct.name + "]."); + } + + if (getter != null && setter != null && !getter.rtn.equals(setter.arguments.get(1))) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Shortcut argument types must match."); + } + + settype = setter == null ? null : setter.arguments.get(1); + constant = value; + } catch (ClassCastException exception) { + //Do nothing. + } + } else if (ctx.EXTINTEGER() != null) { + try { + parentemd.current.clazz.asSubclass(List.class); + + getter = parentemd.current.struct.methods.get("get"); + setter = parentemd.current.struct.methods.get("set"); + + if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || + getter.arguments.get(0).sort != Sort.INT)) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + + "Illegal list get shortcut [" + value + "] for type [" + struct.name + "]."); + } + + if (setter != null && (setter.rtn.sort != Sort.VOID || setter.arguments.size() != 2 || + setter.arguments.get(0).sort != Sort.INT)) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + + "Illegal list set shortcut [" + value + "] for type [" + struct.name + "]."); + } + + if (getter != null && setter != null && !getter.rtn.equals(setter.arguments.get(1))) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Shortcut argument types must match."); + } + + settype = setter == null ? null : setter.arguments.get(1); + + try { + constant = Integer.parseInt(value); + } catch (NumberFormatException exception) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + + "Illegal list shortcut value [" + value + "]."); + } + } catch (ClassCastException exception) { + //Do nothing. + } + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } + + if ((get || set) && (!get || getter != null) && (!set || setter != null)) { + memberenmd.target = new Object[] {getter, setter, constant != null, constant}; + memberenmd.type = get ? getter.rtn : settype; + analyzeLoadStoreExternal(ctx); + parentemd.current = get ? getter.rtn : setter.rtn; + } + } + + if (memberenmd.target == null) { + throw new IllegalArgumentException( + AnalyzerUtility.error(ctx) + "Unknown field [" + value + "] for type [" + struct.name + "]."); + } + } + + parentemd.statik = false; + + if (dotctx != null) { + metadata.createExtNodeMetadata(parent, dotctx); + analyzer.visit(dotctx); + } else if (bracectx != null) { + metadata.createExtNodeMetadata(parent, bracectx); + analyzer.visit(bracectx); + } + } + + void processExtnew(final ExtnewContext ctx) { + final ExtNodeMetadata newenmd = metadata.getExtNodeMetadata(ctx); + final ParserRuleContext parent = newenmd.parent; + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + newenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; + + final String name = ctx.TYPE().getText(); + final Struct struct = definition.structs.get(name); + + if (parentemd.current != null) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unexpected new call."); + } else if (struct == null) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Specified type [" + name + "] not found."); + } else if (newenmd.last && parentemd.storeExpr != null) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Cannot assign a value to a new call."); + } + + final boolean newclass = ctx.arguments() != null; + final boolean newarray = !ctx.expression().isEmpty(); + + final List arguments = newclass ? ctx.arguments().expression() : ctx.expression(); + final int size = arguments.size(); + + Type[] types; + + if (newarray) { + if (!parentemd.read) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "A newly created array must be assigned."); + } + + types = new Type[size]; + Arrays.fill(types, definition.intType); + + newenmd.target = "#makearray"; + + if (size > 1) { + newenmd.type = definition.getType(struct, size); + parentemd.current = newenmd.type; + } else if (size == 1) { + newenmd.type = definition.getType(struct, 0); + parentemd.current = definition.getType(struct, 1); + } else { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "A newly created array cannot have zero dimensions."); + } + } else if (newclass) { + final Constructor constructor = struct.constructors.get("new"); + + if (constructor != null) { + types = new Type[constructor.arguments.size()]; + constructor.arguments.toArray(types); + + newenmd.target = constructor; + newenmd.type = definition.getType(struct, 0); + parentemd.statement = !parentemd.read && newenmd.last; + parentemd.current = newenmd.type; + } else { + throw new IllegalArgumentException( + AnalyzerUtility.error(ctx) + "Unknown new call on type [" + struct.name + "]."); + } + } else { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unknown state."); + } + + if (size != types.length) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "When calling [" + name + "] on type " + + "[" + struct.name + "] expected [" + types.length + "] arguments," + + " but found [" + arguments.size() + "]."); + } + + for (int argument = 0; argument < size; ++argument) { + final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(arguments.get(argument)); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + expremd.to = types[argument]; + analyzer.visit(exprctx); + caster.markCast(expremd); + } + + if (dotctx != null) { + metadata.createExtNodeMetadata(parent, dotctx); + analyzer.visit(dotctx); + } else if (bracectx != null) { + metadata.createExtNodeMetadata(parent, bracectx); + analyzer.visit(bracectx); + } + } + + void processExtstring(final ExtstringContext ctx) { + final ExtNodeMetadata memberenmd = metadata.getExtNodeMetadata(ctx); + final ParserRuleContext parent = memberenmd.parent; + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + + final String string = ctx.STRING().getText(); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + memberenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; + final boolean store = memberenmd.last && parentemd.storeExpr != null; + + if (parentemd.current != null) { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected String constant [" + string + "]."); + } + + if (!parentemd.read) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Must read String constant [" + string + "]."); + } else if (store) { + throw new IllegalArgumentException( + AnalyzerUtility.error(ctx) + "Cannot write to read-only String constant [" + string + "]."); + } + + memberenmd.target = string; + memberenmd.type = definition.stringType; + parentemd.current = definition.stringType; + + if (memberenmd.last) { + parentemd.constant = string; + } + + if (dotctx != null) { + metadata.createExtNodeMetadata(parent, dotctx); + analyzer.visit(dotctx); + } else if (bracectx != null) { + metadata.createExtNodeMetadata(parent, bracectx); + analyzer.visit(bracectx); + } + } + + private void analyzeLoadStoreExternal(final ParserRuleContext source) { + final ExtNodeMetadata extenmd = metadata.getExtNodeMetadata(source); + final ParserRuleContext parent = extenmd.parent; + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + + if (extenmd.last && parentemd.storeExpr != null) { + final ParserRuleContext store = parentemd.storeExpr; + final ExpressionMetadata storeemd = metadata.createExpressionMetadata(parentemd.storeExpr); + final int token = parentemd.token; + + if (token > 0) { + analyzer.visit(store); + + final boolean add = token == ADD; + final boolean xor = token == BWAND || token == BWXOR || token == BWOR; + final boolean decimal = token == MUL || token == DIV || token == REM || token == SUB; + + extenmd.promote = add ? promoter.promoteAdd(extenmd.type, storeemd.from) : + xor ? promoter.promoteXor(extenmd.type, storeemd.from) : + promoter.promoteNumeric(extenmd.type, storeemd.from, decimal, true); + + if (extenmd.promote == null) { + throw new IllegalArgumentException("Cannot apply compound assignment to " + + "types [" + extenmd.type.name + "] and [" + storeemd.from.name + "]."); + } + + extenmd.castFrom = caster.getLegalCast(source, extenmd.type, extenmd.promote, false); + extenmd.castTo = caster.getLegalCast(source, extenmd.promote, extenmd.type, true); + + storeemd.to = add && extenmd.promote.sort == Sort.STRING ? storeemd.from : extenmd.promote; + caster.markCast(storeemd); + } else { + storeemd.to = extenmd.type; + analyzer.visit(store); + caster.markCast(storeemd); + } + } + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerPromoter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerPromoter.java new file mode 100644 index 000000000000..ff77fb06d93b --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerPromoter.java @@ -0,0 +1,281 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +import org.elasticsearch.painless.Definition.Pair; +import org.elasticsearch.painless.Definition.Sort; +import org.elasticsearch.painless.Definition.Type; + +class AnalyzerPromoter { + private final Definition definition; + + AnalyzerPromoter(final Definition definition) { + this.definition = definition; + } + + Type promoteNumeric(final Type from, final boolean decimal, final boolean primitive) { + final Sort sort = from.sort; + + if (sort == Sort.DEF) { + return definition.defType; + } else if ((sort == Sort.DOUBLE || sort == Sort.DOUBLE_OBJ || sort == Sort.NUMBER) && decimal) { + return primitive ? definition.doubleType : definition.doubleobjType; + } else if ((sort == Sort.FLOAT || sort == Sort.FLOAT_OBJ) && decimal) { + return primitive ? definition.floatType : definition.floatobjType; + } else if (sort == Sort.LONG || sort == Sort.LONG_OBJ || sort == Sort.NUMBER) { + return primitive ? definition.longType : definition.longobjType; + } else if (sort.numeric) { + return primitive ? definition.intType : definition.intobjType; + } + + return null; + } + + Type promoteNumeric(final Type from0, final Type from1, final boolean decimal, final boolean primitive) { + final Sort sort0 = from0.sort; + final Sort sort1 = from1.sort; + + if (sort0 == Sort.DEF || sort1 == Sort.DEF) { + return definition.defType; + } + + if (decimal) { + if (sort0 == Sort.DOUBLE || sort0 == Sort.DOUBLE_OBJ || sort0 == Sort.NUMBER || + sort1 == Sort.DOUBLE || sort1 == Sort.DOUBLE_OBJ || sort1 == Sort.NUMBER) { + return primitive ? definition.doubleType : definition.doubleobjType; + } else if (sort0 == Sort.FLOAT || sort0 == Sort.FLOAT_OBJ || sort1 == Sort.FLOAT || sort1 == Sort.FLOAT_OBJ) { + return primitive ? definition.floatType : definition.floatobjType; + } + } + + if (sort0 == Sort.LONG || sort0 == Sort.LONG_OBJ || sort0 == Sort.NUMBER || + sort1 == Sort.LONG || sort1 == Sort.LONG_OBJ || sort1 == Sort.NUMBER) { + return primitive ? definition.longType : definition.longobjType; + } else if (sort0.numeric && sort1.numeric) { + return primitive ? definition.intType : definition.intobjType; + } + + return null; + } + + Type promoteAdd(final Type from0, final Type from1) { + final Sort sort0 = from0.sort; + final Sort sort1 = from1.sort; + + if (sort0 == Sort.STRING || sort1 == Sort.STRING) { + return definition.stringType; + } + + return promoteNumeric(from0, from1, true, true); + } + + Type promoteXor(final Type from0, final Type from1) { + final Sort sort0 = from0.sort; + final Sort sort1 = from1.sort; + + if (sort0.bool || sort1.bool) { + return definition.booleanType; + } + + return promoteNumeric(from0, from1, false, true); + } + + Type promoteEquality(final Type from0, final Type from1) { + final Sort sort0 = from0.sort; + final Sort sort1 = from1.sort; + + if (sort0 == Sort.DEF || sort1 == Sort.DEF) { + return definition.defType; + } + + final boolean primitive = sort0.primitive && sort1.primitive; + + if (sort0.bool && sort1.bool) { + return primitive ? definition.booleanType : definition.booleanobjType; + } + + if (sort0.numeric && sort1.numeric) { + return promoteNumeric(from0, from1, true, primitive); + } + + return definition.objectType; + } + + Type promoteReference(final Type from0, final Type from1) { + final Sort sort0 = from0.sort; + final Sort sort1 = from1.sort; + + if (sort0 == Sort.DEF || sort1 == Sort.DEF) { + return definition.defType; + } + + if (sort0.primitive && sort1.primitive) { + if (sort0.bool && sort1.bool) { + return definition.booleanType; + } + + if (sort0.numeric && sort1.numeric) { + return promoteNumeric(from0, from1, true, true); + } + } + + return definition.objectType; + } + + Type promoteConditional(final Type from0, final Type from1, final Object const0, final Object const1) { + if (from0.equals(from1)) { + return from0; + } + + final Sort sort0 = from0.sort; + final Sort sort1 = from1.sort; + + if (sort0 == Sort.DEF || sort1 == Sort.DEF) { + return definition.defType; + } + + final boolean primitive = sort0.primitive && sort1.primitive; + + if (sort0.bool && sort1.bool) { + return primitive ? definition.booleanType : definition.booleanobjType; + } + + if (sort0.numeric && sort1.numeric) { + if (sort0 == Sort.DOUBLE || sort0 == Sort.DOUBLE_OBJ || sort1 == Sort.DOUBLE || sort1 == Sort.DOUBLE_OBJ) { + return primitive ? definition.doubleType : definition.doubleobjType; + } else if (sort0 == Sort.FLOAT || sort0 == Sort.FLOAT_OBJ || sort1 == Sort.FLOAT || sort1 == Sort.FLOAT_OBJ) { + return primitive ? definition.floatType : definition.floatobjType; + } else if (sort0 == Sort.LONG || sort0 == Sort.LONG_OBJ || sort1 == Sort.LONG || sort1 == Sort.LONG_OBJ) { + return sort0.primitive && sort1.primitive ? definition.longType : definition.longobjType; + } else { + if (sort0 == Sort.BYTE || sort0 == Sort.BYTE_OBJ) { + if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { + return primitive ? definition.byteType : definition.byteobjType; + } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { + if (const1 != null) { + final short constant = (short)const1; + + if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { + return primitive ? definition.byteType : definition.byteobjType; + } + } + + return primitive ? definition.shortType : definition.shortobjType; + } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { + return primitive ? definition.intType : definition.intobjType; + } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { + if (const1 != null) { + final int constant = (int)const1; + + if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { + return primitive ? definition.byteType : definition.byteobjType; + } + } + + return primitive ? definition.intType : definition.intobjType; + } + } else if (sort0 == Sort.SHORT || sort0 == Sort.SHORT_OBJ) { + if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { + if (const0 != null) { + final short constant = (short)const0; + + if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { + return primitive ? definition.byteType : definition.byteobjType; + } + } + + return primitive ? definition.shortType : definition.shortobjType; + } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { + return primitive ? definition.shortType : definition.shortobjType; + } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { + return primitive ? definition.intType : definition.intobjType; + } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { + if (const1 != null) { + final int constant = (int)const1; + + if (constant <= Short.MAX_VALUE && constant >= Short.MIN_VALUE) { + return primitive ? definition.shortType : definition.shortobjType; + } + } + + return primitive ? definition.intType : definition.intobjType; + } + } else if (sort0 == Sort.CHAR || sort0 == Sort.CHAR_OBJ) { + if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { + return primitive ? definition.intType : definition.intobjType; + } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { + return primitive ? definition.intType : definition.intobjType; + } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { + return primitive ? definition.charType : definition.charobjType; + } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { + if (const1 != null) { + final int constant = (int)const1; + + if (constant <= Character.MAX_VALUE && constant >= Character.MIN_VALUE) { + return primitive ? definition.byteType : definition.byteobjType; + } + } + + return primitive ? definition.intType : definition.intobjType; + } + } else if (sort0 == Sort.INT || sort0 == Sort.INT_OBJ) { + if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { + if (const0 != null) { + final int constant = (int)const0; + + if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { + return primitive ? definition.byteType : definition.byteobjType; + } + } + + return primitive ? definition.intType : definition.intobjType; + } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { + if (const0 != null) { + final int constant = (int)const0; + + if (constant <= Short.MAX_VALUE && constant >= Short.MIN_VALUE) { + return primitive ? definition.byteType : definition.byteobjType; + } + } + + return primitive ? definition.intType : definition.intobjType; + } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { + if (const0 != null) { + final int constant = (int)const0; + + if (constant <= Character.MAX_VALUE && constant >= Character.MIN_VALUE) { + return primitive ? definition.byteType : definition.byteobjType; + } + } + + return primitive ? definition.intType : definition.intobjType; + } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { + return primitive ? definition.intType : definition.intobjType; + } + } + } + } + + final Pair pair = new Pair(from0, from1); + final Type bound = definition.bounds.get(pair); + + return bound == null ? definition.objectType : bound; + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerStatement.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerStatement.java new file mode 100644 index 000000000000..e44336035e6a --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerStatement.java @@ -0,0 +1,581 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +import org.elasticsearch.painless.Definition.Sort; +import org.elasticsearch.painless.Metadata.ExpressionMetadata; +import org.elasticsearch.painless.Metadata.StatementMetadata; +import org.elasticsearch.painless.PainlessParser.AfterthoughtContext; +import org.elasticsearch.painless.PainlessParser.BlockContext; +import org.elasticsearch.painless.PainlessParser.BreakContext; +import org.elasticsearch.painless.PainlessParser.ContinueContext; +import org.elasticsearch.painless.PainlessParser.DeclContext; +import org.elasticsearch.painless.PainlessParser.DeclarationContext; +import org.elasticsearch.painless.PainlessParser.DecltypeContext; +import org.elasticsearch.painless.PainlessParser.DeclvarContext; +import org.elasticsearch.painless.PainlessParser.DoContext; +import org.elasticsearch.painless.PainlessParser.ExprContext; +import org.elasticsearch.painless.PainlessParser.ExpressionContext; +import org.elasticsearch.painless.PainlessParser.ForContext; +import org.elasticsearch.painless.PainlessParser.IfContext; +import org.elasticsearch.painless.PainlessParser.InitializerContext; +import org.elasticsearch.painless.PainlessParser.MultipleContext; +import org.elasticsearch.painless.PainlessParser.ReturnContext; +import org.elasticsearch.painless.PainlessParser.SingleContext; +import org.elasticsearch.painless.PainlessParser.SourceContext; +import org.elasticsearch.painless.PainlessParser.StatementContext; +import org.elasticsearch.painless.PainlessParser.ThrowContext; +import org.elasticsearch.painless.PainlessParser.TrapContext; +import org.elasticsearch.painless.PainlessParser.TryContext; +import org.elasticsearch.painless.PainlessParser.WhileContext; + +import java.util.List; + +class AnalyzerStatement { + private final Metadata metadata; + private final Definition definition; + + private final Analyzer analyzer; + private final AnalyzerUtility utility; + private final AnalyzerCaster caster; + + AnalyzerStatement(final Metadata metadata, final Analyzer analyzer, + final AnalyzerUtility utility, final AnalyzerCaster caster) { + this.metadata = metadata; + this.definition = metadata.definition; + + this.analyzer = analyzer; + this.utility = utility; + this.caster = caster; + } + + void processSource(final SourceContext ctx) { + final StatementMetadata sourcesmd = metadata.getStatementMetadata(ctx); + final List statectxs = ctx.statement(); + final StatementContext lastctx = statectxs.get(statectxs.size() - 1); + + utility.incrementScope(); + + for (final StatementContext statectx : statectxs) { + if (sourcesmd.allLast) { + throw new IllegalArgumentException(AnalyzerUtility.error(statectx) + + "Statement will never be executed because all prior paths escape."); + } + + final StatementMetadata statesmd = metadata.createStatementMetadata(statectx); + statesmd.lastSource = statectx == lastctx; + analyzer.visit(statectx); + + sourcesmd.methodEscape = statesmd.methodEscape; + sourcesmd.allLast = statesmd.allLast; + } + + utility.decrementScope(); + } + + void processIf(final IfContext ctx) { + final StatementMetadata ifsmd = metadata.getStatementMetadata(ctx); + + final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + expremd.to = definition.booleanType; + analyzer.visit(exprctx); + caster.markCast(expremd); + + if (expremd.postConst != null) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "If statement is not necessary."); + } + + final BlockContext blockctx0 = ctx.block(0); + final StatementMetadata blocksmd0 = metadata.createStatementMetadata(blockctx0); + blocksmd0.lastSource = ifsmd.lastSource; + blocksmd0.inLoop = ifsmd.inLoop; + blocksmd0.lastLoop = ifsmd.lastLoop; + utility.incrementScope(); + analyzer.visit(blockctx0); + utility.decrementScope(); + + ifsmd.anyContinue = blocksmd0.anyContinue; + ifsmd.anyBreak = blocksmd0.anyBreak; + + ifsmd.count = blocksmd0.count; + + if (ctx.ELSE() != null) { + final BlockContext blockctx1 = ctx.block(1); + final StatementMetadata blocksmd1 = metadata.createStatementMetadata(blockctx1); + blocksmd1.lastSource = ifsmd.lastSource; + utility.incrementScope(); + analyzer.visit(blockctx1); + utility.decrementScope(); + + ifsmd.methodEscape = blocksmd0.methodEscape && blocksmd1.methodEscape; + ifsmd.loopEscape = blocksmd0.loopEscape && blocksmd1.loopEscape; + ifsmd.allLast = blocksmd0.allLast && blocksmd1.allLast; + ifsmd.anyContinue |= blocksmd1.anyContinue; + ifsmd.anyBreak |= blocksmd1.anyBreak; + + ifsmd.count = Math.max(ifsmd.count, blocksmd1.count); + } + } + + void processWhile(final WhileContext ctx) { + final StatementMetadata whilesmd = metadata.getStatementMetadata(ctx); + + utility.incrementScope(); + + final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + expremd.to = definition.booleanType; + analyzer.visit(exprctx); + caster.markCast(expremd); + + boolean continuous = false; + + if (expremd.postConst != null) { + continuous = (boolean)expremd.postConst; + + if (!continuous) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "The loop will never be executed."); + } + + if (ctx.empty() != null) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "The loop will never exit."); + } + } + + final BlockContext blockctx = ctx.block(); + + if (blockctx != null) { + final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + blocksmd.beginLoop = true; + blocksmd.inLoop = true; + analyzer.visit(blockctx); + + if (blocksmd.loopEscape && !blocksmd.anyContinue) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "All paths escape so the loop is not necessary."); + } + + if (continuous && !blocksmd.anyBreak) { + whilesmd.methodEscape = true; + whilesmd.allLast = true; + } + } + + whilesmd.count = 1; + + utility.decrementScope(); + } + + void processDo(final DoContext ctx) { + final StatementMetadata dosmd = metadata.getStatementMetadata(ctx); + + utility.incrementScope(); + + final BlockContext blockctx = ctx.block(); + final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + blocksmd.beginLoop = true; + blocksmd.inLoop = true; + analyzer.visit(blockctx); + + if (blocksmd.loopEscape && !blocksmd.anyContinue) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "All paths escape so the loop is not necessary."); + } + + final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + expremd.to = definition.booleanType; + analyzer.visit(exprctx); + caster.markCast(expremd); + + if (expremd.postConst != null) { + final boolean continuous = (boolean)expremd.postConst; + + if (!continuous) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "All paths escape so the loop is not necessary."); + } + + if (!blocksmd.anyBreak) { + dosmd.methodEscape = true; + dosmd.allLast = true; + } + } + + dosmd.count = 1; + + utility.decrementScope(); + } + + void processFor(final ForContext ctx) { + final StatementMetadata forsmd = metadata.getStatementMetadata(ctx); + boolean continuous = false; + + utility.incrementScope(); + + final InitializerContext initctx = ctx.initializer(); + + if (initctx != null) { + metadata.createStatementMetadata(initctx); + analyzer.visit(initctx); + } + + final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression()); + + if (exprctx != null) { + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + expremd.to = definition.booleanType; + analyzer.visit(exprctx); + caster.markCast(expremd); + + if (expremd.postConst != null) { + continuous = (boolean)expremd.postConst; + + if (!continuous) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "The loop will never be executed."); + } + + if (ctx.empty() != null) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "The loop is continuous."); + } + } + } else { + continuous = true; + } + + final AfterthoughtContext atctx = ctx.afterthought(); + + if (atctx != null) { + metadata.createStatementMetadata(atctx); + analyzer.visit(atctx); + } + + final BlockContext blockctx = ctx.block(); + + if (blockctx != null) { + final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + blocksmd.beginLoop = true; + blocksmd.inLoop = true; + analyzer.visit(blockctx); + + if (blocksmd.loopEscape && !blocksmd.anyContinue) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "All paths escape so the loop is not necessary."); + } + + if (continuous && !blocksmd.anyBreak) { + forsmd.methodEscape = true; + forsmd.allLast = true; + } + } + + forsmd.count = 1; + + utility.decrementScope(); + } + + void processDecl(final DeclContext ctx) { + final StatementMetadata declsmd = metadata.getStatementMetadata(ctx); + + final DeclarationContext declctx = ctx.declaration(); + metadata.createStatementMetadata(declctx); + analyzer.visit(declctx); + + declsmd.count = 1; + } + + void processContinue(final ContinueContext ctx) { + final StatementMetadata continuesmd = metadata.getStatementMetadata(ctx); + + if (!continuesmd.inLoop) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Cannot have a continue statement outside of a loop."); + } + + if (continuesmd.lastLoop) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unnecessary continue statement at the end of a loop."); + } + + continuesmd.allLast = true; + continuesmd.anyContinue = true; + + continuesmd.count = 1; + } + + void processBreak(final BreakContext ctx) { + final StatementMetadata breaksmd = metadata.getStatementMetadata(ctx); + + if (!breaksmd.inLoop) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Cannot have a break statement outside of a loop."); + } + + breaksmd.loopEscape = true; + breaksmd.allLast = true; + breaksmd.anyBreak = true; + + breaksmd.count = 1; + } + + void processReturn(final ReturnContext ctx) { + final StatementMetadata returnsmd = metadata.getStatementMetadata(ctx); + + final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + expremd.to = definition.objectType; + analyzer.visit(exprctx); + caster.markCast(expremd); + + returnsmd.methodEscape = true; + returnsmd.loopEscape = true; + returnsmd.allLast = true; + + returnsmd.count = 1; + } + + void processTry(final TryContext ctx) { + final StatementMetadata trysmd = metadata.getStatementMetadata(ctx); + + final BlockContext blockctx = ctx.block(); + final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + blocksmd.lastSource = trysmd.lastSource; + blocksmd.inLoop = trysmd.inLoop; + blocksmd.lastLoop = trysmd.lastLoop; + utility.incrementScope(); + analyzer.visit(blockctx); + utility.decrementScope(); + + trysmd.methodEscape = blocksmd.methodEscape; + trysmd.loopEscape = blocksmd.loopEscape; + trysmd.allLast = blocksmd.allLast; + trysmd.anyContinue = blocksmd.anyContinue; + trysmd.anyBreak = blocksmd.anyBreak; + + int trapcount = 0; + + for (final TrapContext trapctx : ctx.trap()) { + final StatementMetadata trapsmd = metadata.createStatementMetadata(trapctx); + trapsmd.lastSource = trysmd.lastSource; + trapsmd.inLoop = trysmd.inLoop; + trapsmd.lastLoop = trysmd.lastLoop; + utility.incrementScope(); + analyzer.visit(trapctx); + utility.decrementScope(); + + trysmd.methodEscape &= trapsmd.methodEscape; + trysmd.loopEscape &= trapsmd.loopEscape; + trysmd.allLast &= trapsmd.allLast; + trysmd.anyContinue |= trapsmd.anyContinue; + trysmd.anyBreak |= trapsmd.anyBreak; + + trapcount = Math.max(trapcount, trapsmd.count); + } + + trysmd.count = blocksmd.count + trapcount; + } + + void processThrow(final ThrowContext ctx) { + final StatementMetadata throwsmd = metadata.getStatementMetadata(ctx); + + final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + expremd.to = definition.exceptionType; + analyzer.visit(exprctx); + caster.markCast(expremd); + + throwsmd.methodEscape = true; + throwsmd.loopEscape = true; + throwsmd.allLast = true; + + throwsmd.count = 1; + } + + void processExpr(final ExprContext ctx) { + final StatementMetadata exprsmd = metadata.getStatementMetadata(ctx); + final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + expremd.read = exprsmd.lastSource; + analyzer.visit(exprctx); + + if (!expremd.statement && !exprsmd.lastSource) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Not a statement."); + } + + final boolean rtn = exprsmd.lastSource && expremd.from.sort != Sort.VOID; + exprsmd.methodEscape = rtn; + exprsmd.loopEscape = rtn; + exprsmd.allLast = rtn; + expremd.to = rtn ? definition.objectType : expremd.from; + caster.markCast(expremd); + + exprsmd.count = 1; + } + + void processMultiple(final MultipleContext ctx) { + final StatementMetadata multiplesmd = metadata.getStatementMetadata(ctx); + final List statectxs = ctx.statement(); + final StatementContext lastctx = statectxs.get(statectxs.size() - 1); + + for (StatementContext statectx : statectxs) { + if (multiplesmd.allLast) { + throw new IllegalArgumentException(AnalyzerUtility.error(statectx) + + "Statement will never be executed because all prior paths escape."); + } + + final StatementMetadata statesmd = metadata.createStatementMetadata(statectx); + statesmd.lastSource = multiplesmd.lastSource && statectx == lastctx; + statesmd.inLoop = multiplesmd.inLoop; + statesmd.lastLoop = (multiplesmd.beginLoop || multiplesmd.lastLoop) && statectx == lastctx; + analyzer.visit(statectx); + + multiplesmd.methodEscape = statesmd.methodEscape; + multiplesmd.loopEscape = statesmd.loopEscape; + multiplesmd.allLast = statesmd.allLast; + multiplesmd.anyContinue |= statesmd.anyContinue; + multiplesmd.anyBreak |= statesmd.anyBreak; + + multiplesmd.count += statesmd.count; + } + } + + void processSingle(final SingleContext ctx) { + final StatementMetadata singlesmd = metadata.getStatementMetadata(ctx); + + final StatementContext statectx = ctx.statement(); + final StatementMetadata statesmd = metadata.createStatementMetadata(statectx); + statesmd.lastSource = singlesmd.lastSource; + statesmd.inLoop = singlesmd.inLoop; + statesmd.lastLoop = singlesmd.beginLoop || singlesmd.lastLoop; + analyzer.visit(statectx); + + singlesmd.methodEscape = statesmd.methodEscape; + singlesmd.loopEscape = statesmd.loopEscape; + singlesmd.allLast = statesmd.allLast; + singlesmd.anyContinue = statesmd.anyContinue; + singlesmd.anyBreak = statesmd.anyBreak; + + singlesmd.count = statesmd.count; + } + + void processInitializer(InitializerContext ctx) { + final DeclarationContext declctx = ctx.declaration(); + final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression()); + + if (declctx != null) { + metadata.createStatementMetadata(declctx); + analyzer.visit(declctx); + } else if (exprctx != null) { + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + expremd.read = false; + analyzer.visit(exprctx); + + expremd.to = expremd.from; + caster.markCast(expremd); + + if (!expremd.statement) { + throw new IllegalArgumentException(AnalyzerUtility.error(exprctx) + + "The initializer of a for loop must be a statement."); + } + } else { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } + + void processAfterthought(AfterthoughtContext ctx) { + final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression()); + + if (exprctx != null) { + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + expremd.read = false; + analyzer.visit(exprctx); + + expremd.to = expremd.from; + caster.markCast(expremd); + + if (!expremd.statement) { + throw new IllegalArgumentException(AnalyzerUtility.error(exprctx) + + "The afterthought of a for loop must be a statement."); + } + } + } + + void processDeclaration(final DeclarationContext ctx) { + final DecltypeContext decltypectx = ctx.decltype(); + final ExpressionMetadata decltypeemd = metadata.createExpressionMetadata(decltypectx); + analyzer.visit(decltypectx); + + for (final DeclvarContext declvarctx : ctx.declvar()) { + final ExpressionMetadata declvaremd = metadata.createExpressionMetadata(declvarctx); + declvaremd.to = decltypeemd.from; + analyzer.visit(declvarctx); + } + } + + void processDecltype(final DecltypeContext ctx) { + final ExpressionMetadata decltypeemd = metadata.getExpressionMetadata(ctx); + final String name = ctx.getText(); + decltypeemd.from = definition.getType(name); + } + + void processDeclvar(final DeclvarContext ctx) { + final ExpressionMetadata declvaremd = metadata.getExpressionMetadata(ctx); + + final String name = ctx.ID().getText(); + declvaremd.postConst = utility.addVariable(ctx, name, declvaremd.to).slot; + + final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression()); + + if (exprctx != null) { + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + expremd.to = declvaremd.to; + analyzer.visit(exprctx); + caster.markCast(expremd); + } + } + + void processTrap(final TrapContext ctx) { + final StatementMetadata trapsmd = metadata.getStatementMetadata(ctx); + + final String type = ctx.TYPE().getText(); + trapsmd.exception = definition.getType(type); + + try { + trapsmd.exception.clazz.asSubclass(Exception.class); + } catch (final ClassCastException exception) { + throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Invalid exception type [" + trapsmd.exception.name + "]."); + } + + final String id = ctx.ID().getText(); + trapsmd.slot = utility.addVariable(ctx, id, trapsmd.exception).slot; + + final BlockContext blockctx = ctx.block(); + + if (blockctx != null) { + final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + blocksmd.lastSource = trapsmd.lastSource; + blocksmd.inLoop = trapsmd.inLoop; + blocksmd.lastLoop = trapsmd.lastLoop; + analyzer.visit(blockctx); + + trapsmd.methodEscape = blocksmd.methodEscape; + trapsmd.loopEscape = blocksmd.loopEscape; + trapsmd.allLast = blocksmd.allLast; + trapsmd.anyContinue = blocksmd.anyContinue; + trapsmd.anyBreak = blocksmd.anyBreak; + } else if (ctx.emptyscope() == null) { + throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); + } + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerUtility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerUtility.java new file mode 100644 index 000000000000..11fb669f1906 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerUtility.java @@ -0,0 +1,144 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.tree.ParseTree; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.PainlessParser.ExpressionContext; +import org.elasticsearch.painless.PainlessParser.PrecedenceContext; + +import java.util.ArrayDeque; +import java.util.Deque; +import java.util.Iterator; + +class AnalyzerUtility { + static class Variable { + final String name; + final Type type; + final int slot; + + private Variable(final String name, final Type type, final int slot) { + this.name = name; + this.type = type; + this.slot = slot; + } + } + + /** + * A utility method to output consistent error messages. + * @param ctx The ANTLR node the error occurred in. + * @return The error message with tacked on line number and character position. + */ + static String error(final ParserRuleContext ctx) { + return "Analyzer Error [" + ctx.getStart().getLine() + ":" + ctx.getStart().getCharPositionInLine() + "]: "; + } + + /** + * The ANTLR parse tree is modified in one single case; a parent node needs to check a child node to see if it's + * a precedence node, and if so, it must be removed from the tree permanently. Once the ANTLR tree is built, + * precedence nodes are no longer necessary to maintain the correct ordering of the tree, so they only + * add a level of indirection where complicated decisions about metadata passing would have to be made. This + * method removes the need for those decisions. + * @param source The child ANTLR node to check for precedence. + * @return The updated child ANTLR node. + */ + static ExpressionContext updateExpressionTree(ExpressionContext source) { + // Check to see if the ANTLR node is a precedence node. + if (source instanceof PainlessParser.PrecedenceContext) { + final ParserRuleContext parent = source.getParent(); + int index = 0; + + // Mark the index of the source node within the list of child nodes from the parent. + for (final ParseTree child : parent.children) { + if (child == source) { + break; + } + + ++index; + } + + // If there are multiple precedence nodes in a row, remove them all. + while (source instanceof PrecedenceContext) { + source = ((PrecedenceContext)source).expression(); + } + + // Update the parent node with the child of the precedence node. + parent.children.set(index, source); + } + + return source; + } + + private final Deque scopes = new ArrayDeque<>(); + private final Deque variables = new ArrayDeque<>(); + + void incrementScope() { + scopes.push(0); + } + + void decrementScope() { + int remove = scopes.pop(); + + while (remove > 0) { + variables.pop(); + --remove; + } + } + + Variable getVariable(final String name) { + final Iterator itr = variables.iterator(); + + while (itr.hasNext()) { + final Variable variable = itr.next(); + + if (variable.name.equals(name)) { + return variable; + } + } + + return null; + } + + Variable addVariable(final ParserRuleContext source, final String name, final Type type) { + if (getVariable(name) != null) { + if (source == null) { + throw new IllegalArgumentException("Argument name [" + name + "] already defined within the scope."); + } else { + throw new IllegalArgumentException(error(source) + "Variable name [" + name + "] already defined within the scope."); + } + } + + final Variable previous = variables.peekFirst(); + int slot = 0; + + if (previous != null) { + slot += previous.slot + previous.type.type.getSize(); + } + + final Variable variable = new Variable(name, type, slot); + variables.push(variable); + + final int update = scopes.pop() + 1; + scopes.push(update); + + return variable; + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java index d1b0e2dc6fed..3d8123a4800e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java @@ -160,7 +160,7 @@ final class Compiler { // throw new RuntimeException(e); // } - final Class clazz = loader.define(Writer.CLASS_NAME, bytes); + final Class clazz = loader.define(WriterConstants.CLASS_NAME, bytes); final java.lang.reflect.Constructor constructor = clazz.getConstructor(Definition.class, String.class, String.class); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Metadata.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Metadata.java index cace48ff4336..e38d6da7d984 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Metadata.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Metadata.java @@ -20,11 +20,8 @@ package org.elasticsearch.painless; import org.antlr.v4.runtime.ParserRuleContext; -import org.antlr.v4.runtime.tree.ParseTree; import org.elasticsearch.painless.Definition.Cast; import org.elasticsearch.painless.Definition.Type; -import org.elasticsearch.painless.PainlessParser.ExpressionContext; -import org.elasticsearch.painless.PainlessParser.PrecedenceContext; import java.util.HashMap; import java.util.Map; @@ -37,7 +34,6 @@ import java.util.Map; * the root of the ANTLR parse tree, and the {@link CompilerSettings}. */ class Metadata { - /** * StatementMetadata is used to store metadata mostly about * control flow for ANTLR nodes related to if/else, do, while, for, etc. @@ -386,15 +382,6 @@ class Metadata { } } - /** - * A utility method to output consistent error messages. - * @param ctx The ANTLR node the error occurred in. - * @return The error message with tacked on line number and character position. - */ - static String error(final ParserRuleContext ctx) { - return "Error [" + ctx.getStart().getLine() + ":" + ctx.getStart().getCharPositionInLine() + "]: "; - } - /** * Acts as both the Painless API and white-list for what types and methods are allowed. */ @@ -490,49 +477,13 @@ class Metadata { final StatementMetadata sourcesmd = statementMetadata.get(source); if (sourcesmd == null) { - throw new IllegalStateException(error(source) + "Statement metadata does not exist at" + + throw new IllegalStateException("Statement metadata does not exist at" + " the parse node with text [" + source.getText() + "]."); } return sourcesmd; } - /** - * The ANTLR parse tree is modified in one single case; a parent node needs to check a child node to see if it's - * a precedence node, and if so, it must be removed from the tree permanently. Once the ANTLR tree is built, - * precedence nodes are no longer necessary to maintain the correct ordering of the tree, so they only - * add a level of indirection where complicated decisions about metadata passing would have to be made. This - * method removes the need for those decisions. - * @param source The child ANTLR node to check for precedence. - * @return The updated child ANTLR node. - */ - ExpressionContext updateExpressionTree(ExpressionContext source) { - // Check to see if the ANTLR node is a precedence node. - if (source instanceof PrecedenceContext) { - final ParserRuleContext parent = source.getParent(); - int index = 0; - - // Mark the index of the source node within the list of child nodes from the parent. - for (final ParseTree child : parent.children) { - if (child == source) { - break; - } - - ++index; - } - - // If there are multiple precedence nodes in a row, remove them all. - while (source instanceof PrecedenceContext) { - source = ((PrecedenceContext)source).expression(); - } - - // Update the parent node with the child of the precedence node. - parent.children.set(index, source); - } - - return source; - } - /** * Creates a new ExpressionMetadata and stores it in the expressionMetadata map. * @param source The ANTLR node for this metadata. @@ -554,7 +505,7 @@ class Metadata { final ExpressionMetadata sourceemd = expressionMetadata.get(source); if (sourceemd == null) { - throw new IllegalStateException(error(source) + "Expression metadata does not exist at" + + throw new IllegalStateException("Expression metadata does not exist at" + " the parse node with text [" + source.getText() + "]."); } @@ -582,7 +533,7 @@ class Metadata { final ExternalMetadata sourceemd = externalMetadata.get(source); if (sourceemd == null) { - throw new IllegalStateException(error(source) + "External metadata does not exist at" + + throw new IllegalStateException("External metadata does not exist at" + " the parse node with text [" + source.getText() + "]."); } @@ -610,7 +561,7 @@ class Metadata { final ExtNodeMetadata sourceemd = extNodeMetadata.get(source); if (sourceemd == null) { - throw new IllegalStateException(error(source) + "External metadata does not exist at" + + throw new IllegalStateException("External metadata does not exist at" + " the parse node with text [" + source.getText() + "]."); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java index 431e724127f5..4ddb260aea08 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java @@ -19,20 +19,11 @@ package org.elasticsearch.painless; -import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.tree.ParseTree; -import org.elasticsearch.painless.Definition.Cast; -import org.elasticsearch.painless.Definition.Constructor; -import org.elasticsearch.painless.Definition.Field; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.Sort; -import org.elasticsearch.painless.Definition.Transform; -import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.PainlessParser.AfterthoughtContext; import org.elasticsearch.painless.PainlessParser.ArgumentsContext; import org.elasticsearch.painless.PainlessParser.AssignmentContext; import org.elasticsearch.painless.PainlessParser.BinaryContext; -import org.elasticsearch.painless.PainlessParser.BlockContext; import org.elasticsearch.painless.PainlessParser.BoolContext; import org.elasticsearch.painless.PainlessParser.BreakContext; import org.elasticsearch.painless.PainlessParser.CastContext; @@ -48,7 +39,6 @@ import org.elasticsearch.painless.PainlessParser.DoContext; import org.elasticsearch.painless.PainlessParser.EmptyContext; import org.elasticsearch.painless.PainlessParser.EmptyscopeContext; import org.elasticsearch.painless.PainlessParser.ExprContext; -import org.elasticsearch.painless.PainlessParser.ExpressionContext; import org.elasticsearch.painless.PainlessParser.ExtbraceContext; import org.elasticsearch.painless.PainlessParser.ExtcallContext; import org.elasticsearch.painless.PainlessParser.ExtcastContext; @@ -75,261 +65,82 @@ import org.elasticsearch.painless.PainlessParser.PreincContext; import org.elasticsearch.painless.PainlessParser.ReturnContext; import org.elasticsearch.painless.PainlessParser.SingleContext; import org.elasticsearch.painless.PainlessParser.SourceContext; -import org.elasticsearch.painless.PainlessParser.StatementContext; import org.elasticsearch.painless.PainlessParser.ThrowContext; import org.elasticsearch.painless.PainlessParser.TrapContext; import org.elasticsearch.painless.PainlessParser.TrueContext; import org.elasticsearch.painless.PainlessParser.TryContext; import org.elasticsearch.painless.PainlessParser.UnaryContext; import org.elasticsearch.painless.PainlessParser.WhileContext; -import org.elasticsearch.script.ScoreAccessor; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.Label; import org.objectweb.asm.Opcodes; import org.objectweb.asm.commons.GeneratorAdapter; -import java.lang.invoke.MethodType; -import java.util.ArrayDeque; -import java.util.Deque; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import static org.elasticsearch.painless.PainlessParser.ADD; -import static org.elasticsearch.painless.PainlessParser.BWAND; -import static org.elasticsearch.painless.PainlessParser.BWOR; -import static org.elasticsearch.painless.PainlessParser.BWXOR; -import static org.elasticsearch.painless.PainlessParser.DIV; -import static org.elasticsearch.painless.PainlessParser.LSH; -import static org.elasticsearch.painless.PainlessParser.MUL; -import static org.elasticsearch.painless.PainlessParser.REM; -import static org.elasticsearch.painless.PainlessParser.RSH; -import static org.elasticsearch.painless.PainlessParser.SUB; -import static org.elasticsearch.painless.PainlessParser.USH; +import static org.elasticsearch.painless.WriterConstants.BASE_CLASS_TYPE; +import static org.elasticsearch.painless.WriterConstants.CLASS_TYPE; +import static org.elasticsearch.painless.WriterConstants.CONSTRUCTOR; +import static org.elasticsearch.painless.WriterConstants.EXECUTE; +import static org.elasticsearch.painless.WriterConstants.MAP_GET; +import static org.elasticsearch.painless.WriterConstants.MAP_TYPE; +import static org.elasticsearch.painless.WriterConstants.SCORE_ACCESSOR_FLOAT; +import static org.elasticsearch.painless.WriterConstants.SCORE_ACCESSOR_TYPE; +import static org.elasticsearch.painless.WriterConstants.SIGNATURE; class Writer extends PainlessParserBaseVisitor { - private static class Branch { - final ParserRuleContext source; - - Label begin = null; - Label end = null; - Label tru = null; - Label fals = null; - - private Branch(final ParserRuleContext source) { - this.source = source; - } - } - - final static String BASE_CLASS_NAME = Executable.class.getName(); - final static String CLASS_NAME = BASE_CLASS_NAME + "$CompiledPainlessExecutable"; - private final static org.objectweb.asm.Type BASE_CLASS_TYPE = org.objectweb.asm.Type.getType(Executable.class); - private final static org.objectweb.asm.Type CLASS_TYPE = org.objectweb.asm.Type.getType("L" + CLASS_NAME.replace(".", "/") + ";"); - - private final static org.objectweb.asm.commons.Method CONSTRUCTOR = - getAsmMethod(void.class, "", Definition.class, String.class, String.class); - private final static org.objectweb.asm.commons.Method EXECUTE = getAsmMethod(Object.class, "execute", Map.class); - private final static String SIGNATURE = "(Ljava/util/Map;)Ljava/lang/Object;"; - - private final static org.objectweb.asm.Type PAINLESS_ERROR_TYPE = org.objectweb.asm.Type.getType(PainlessError.class); - - private final static org.objectweb.asm.Type DEFINITION_TYPE = org.objectweb.asm.Type.getType(Definition.class); - - private final static org.objectweb.asm.Type MAP_TYPE = org.objectweb.asm.Type.getType(Map.class); - private final static org.objectweb.asm.commons.Method MAP_GET = getAsmMethod(Object.class, "get", Object.class); - - private final static org.objectweb.asm.Type SCORE_ACCESSOR_TYPE = org.objectweb.asm.Type.getType(ScoreAccessor.class); - private final static org.objectweb.asm.commons.Method SCORE_ACCESSOR_FLOAT = getAsmMethod(float.class, "floatValue"); - - private final static org.objectweb.asm.commons.Method DEF_METHOD_CALL = getAsmMethod( - Object.class, "methodCall", Object.class, String.class, Definition.class, Object[].class, boolean[].class); - private final static org.objectweb.asm.commons.Method DEF_ARRAY_STORE = getAsmMethod( - void.class, "arrayStore", Object.class, Object.class, Object.class, Definition.class, boolean.class, boolean.class); - private final static org.objectweb.asm.commons.Method DEF_ARRAY_LOAD = getAsmMethod( - Object.class, "arrayLoad", Object.class, Object.class, Definition.class, boolean.class); - private final static org.objectweb.asm.commons.Method DEF_FIELD_STORE = getAsmMethod( - void.class, "fieldStore", Object.class, Object.class, String.class, Definition.class, boolean.class); - private final static org.objectweb.asm.commons.Method DEF_FIELD_LOAD = getAsmMethod( - Object.class, "fieldLoad", Object.class, String.class, Definition.class); - - private final static org.objectweb.asm.commons.Method DEF_NOT_CALL = getAsmMethod(Object.class, "not", Object.class); - private final static org.objectweb.asm.commons.Method DEF_NEG_CALL = getAsmMethod(Object.class, "neg", Object.class); - private final static org.objectweb.asm.commons.Method DEF_MUL_CALL = getAsmMethod(Object.class, "mul", Object.class, Object.class); - private final static org.objectweb.asm.commons.Method DEF_DIV_CALL = getAsmMethod(Object.class, "div", Object.class, Object.class); - private final static org.objectweb.asm.commons.Method DEF_REM_CALL = getAsmMethod(Object.class, "rem", Object.class, Object.class); - private final static org.objectweb.asm.commons.Method DEF_ADD_CALL = getAsmMethod(Object.class, "add", Object.class, Object.class); - private final static org.objectweb.asm.commons.Method DEF_SUB_CALL = getAsmMethod(Object.class, "sub", Object.class, Object.class); - private final static org.objectweb.asm.commons.Method DEF_LSH_CALL = getAsmMethod(Object.class, "lsh", Object.class, Object.class); - private final static org.objectweb.asm.commons.Method DEF_RSH_CALL = getAsmMethod(Object.class, "rsh", Object.class, Object.class); - private final static org.objectweb.asm.commons.Method DEF_USH_CALL = getAsmMethod(Object.class, "ush", Object.class, Object.class); - private final static org.objectweb.asm.commons.Method DEF_AND_CALL = getAsmMethod(Object.class, "and", Object.class, Object.class); - private final static org.objectweb.asm.commons.Method DEF_XOR_CALL = getAsmMethod(Object.class, "xor", Object.class, Object.class); - private final static org.objectweb.asm.commons.Method DEF_OR_CALL = getAsmMethod(Object.class, "or" , Object.class, Object.class); - private final static org.objectweb.asm.commons.Method DEF_EQ_CALL = getAsmMethod(boolean.class, "eq" , Object.class, Object.class); - private final static org.objectweb.asm.commons.Method DEF_LT_CALL = getAsmMethod(boolean.class, "lt" , Object.class, Object.class); - private final static org.objectweb.asm.commons.Method DEF_LTE_CALL = getAsmMethod(boolean.class, "lte", Object.class, Object.class); - private final static org.objectweb.asm.commons.Method DEF_GT_CALL = getAsmMethod(boolean.class, "gt" , Object.class, Object.class); - private final static org.objectweb.asm.commons.Method DEF_GTE_CALL = getAsmMethod(boolean.class, "gte", Object.class, Object.class); - - private final static org.objectweb.asm.Type STRINGBUILDER_TYPE = org.objectweb.asm.Type.getType(StringBuilder.class); - - private final static org.objectweb.asm.commons.Method STRINGBUILDER_CONSTRUCTOR = getAsmMethod(void.class, ""); - private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_BOOLEAN = - getAsmMethod(StringBuilder.class, "append", boolean.class); - private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_CHAR = - getAsmMethod(StringBuilder.class, "append", char.class); - private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_INT = - getAsmMethod(StringBuilder.class, "append", int.class); - private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_LONG = - getAsmMethod(StringBuilder.class, "append", long.class); - private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_FLOAT = - getAsmMethod(StringBuilder.class, "append", float.class); - private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_DOUBLE = - getAsmMethod(StringBuilder.class, "append", double.class); - private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_STRING = - getAsmMethod(StringBuilder.class, "append", String.class); - private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_OBJECT = - getAsmMethod(StringBuilder.class, "append", Object.class); - private final static org.objectweb.asm.commons.Method STRINGBUILDER_TOSTRING = getAsmMethod(String.class, "toString"); - - private final static org.objectweb.asm.commons.Method TOINTEXACT_LONG = getAsmMethod(int.class, "toIntExact", long.class); - private final static org.objectweb.asm.commons.Method NEGATEEXACT_INT = getAsmMethod(int.class, "negateExact", int.class); - private final static org.objectweb.asm.commons.Method NEGATEEXACT_LONG = getAsmMethod(long.class, "negateExact", long.class); - private final static org.objectweb.asm.commons.Method MULEXACT_INT = getAsmMethod(int.class, "multiplyExact", int.class, int.class); - private final static org.objectweb.asm.commons.Method MULEXACT_LONG = getAsmMethod(long.class, "multiplyExact", long.class, long.class); - private final static org.objectweb.asm.commons.Method ADDEXACT_INT = getAsmMethod(int.class, "addExact", int.class, int.class); - private final static org.objectweb.asm.commons.Method ADDEXACT_LONG = getAsmMethod(long.class, "addExact", long.class, long.class); - private final static org.objectweb.asm.commons.Method SUBEXACT_INT = getAsmMethod(int.class, "subtractExact", int.class, int.class); - private final static org.objectweb.asm.commons.Method SUBEXACT_LONG = getAsmMethod(long.class, "subtractExact", long.class, long.class); - - private final static org.objectweb.asm.commons.Method CHECKEQUALS = - getAsmMethod(boolean.class, "checkEquals", Object.class, Object.class); - private final static org.objectweb.asm.commons.Method TOBYTEEXACT_INT = getAsmMethod(byte.class, "toByteExact", int.class); - private final static org.objectweb.asm.commons.Method TOBYTEEXACT_LONG = getAsmMethod(byte.class, "toByteExact", long.class); - private final static org.objectweb.asm.commons.Method TOBYTEWOOVERFLOW_FLOAT = - getAsmMethod(byte.class, "toByteWithoutOverflow", float.class); - private final static org.objectweb.asm.commons.Method TOBYTEWOOVERFLOW_DOUBLE = - getAsmMethod(byte.class, "toByteWithoutOverflow", double.class); - private final static org.objectweb.asm.commons.Method TOSHORTEXACT_INT = getAsmMethod(short.class, "toShortExact", int.class); - private final static org.objectweb.asm.commons.Method TOSHORTEXACT_LONG = getAsmMethod(short.class, "toShortExact", long.class); - private final static org.objectweb.asm.commons.Method TOSHORTWOOVERFLOW_FLOAT = - getAsmMethod(short.class, "toShortWithoutOverflow", float.class); - private final static org.objectweb.asm.commons.Method TOSHORTWOOVERFLOW_DOUBLE = - getAsmMethod(short.class, "toShortWihtoutOverflow", double.class); - private final static org.objectweb.asm.commons.Method TOCHAREXACT_INT = getAsmMethod(char.class, "toCharExact", int.class); - private final static org.objectweb.asm.commons.Method TOCHAREXACT_LONG = getAsmMethod(char.class, "toCharExact", long.class); - private final static org.objectweb.asm.commons.Method TOCHARWOOVERFLOW_FLOAT = - getAsmMethod(char.class, "toCharWithoutOverflow", float.class); - private final static org.objectweb.asm.commons.Method TOCHARWOOVERFLOW_DOUBLE = - getAsmMethod(char.class, "toCharWithoutOverflow", double.class); - private final static org.objectweb.asm.commons.Method TOINTWOOVERFLOW_FLOAT = - getAsmMethod(int.class, "toIntWithoutOverflow", float.class); - private final static org.objectweb.asm.commons.Method TOINTWOOVERFLOW_DOUBLE = - getAsmMethod(int.class, "toIntWithoutOverflow", double.class); - private final static org.objectweb.asm.commons.Method TOLONGWOOVERFLOW_FLOAT = - getAsmMethod(long.class, "toLongWithoutOverflow", float.class); - private final static org.objectweb.asm.commons.Method TOLONGWOOVERFLOW_DOUBLE = - getAsmMethod(long.class, "toLongWithoutOverflow", double.class); - private final static org.objectweb.asm.commons.Method TOFLOATWOOVERFLOW_DOUBLE = - getAsmMethod(float.class , "toFloatWihtoutOverflow", double.class); - private final static org.objectweb.asm.commons.Method MULWOOVERLOW_FLOAT = - getAsmMethod(float.class, "multiplyWithoutOverflow", float.class, float.class); - private final static org.objectweb.asm.commons.Method MULWOOVERLOW_DOUBLE = - getAsmMethod(double.class, "multiplyWithoutOverflow", double.class, double.class); - private final static org.objectweb.asm.commons.Method DIVWOOVERLOW_INT = - getAsmMethod(int.class, "divideWithoutOverflow", int.class, int.class); - private final static org.objectweb.asm.commons.Method DIVWOOVERLOW_LONG = - getAsmMethod(long.class, "divideWithoutOverflow", long.class, long.class); - private final static org.objectweb.asm.commons.Method DIVWOOVERLOW_FLOAT = - getAsmMethod(float.class, "divideWithoutOverflow", float.class, float.class); - private final static org.objectweb.asm.commons.Method DIVWOOVERLOW_DOUBLE = - getAsmMethod(double.class, "divideWithoutOverflow", double.class, double.class); - private final static org.objectweb.asm.commons.Method REMWOOVERLOW_FLOAT = - getAsmMethod(float.class, "remainderWithoutOverflow", float.class, float.class); - private final static org.objectweb.asm.commons.Method REMWOOVERLOW_DOUBLE = - getAsmMethod(double.class, "remainderWithoutOverflow", double.class, double.class); - private final static org.objectweb.asm.commons.Method ADDWOOVERLOW_FLOAT = - getAsmMethod(float.class, "addWithoutOverflow", float.class, float.class); - private final static org.objectweb.asm.commons.Method ADDWOOVERLOW_DOUBLE = - getAsmMethod(double.class, "addWithoutOverflow", double.class, double.class); - private final static org.objectweb.asm.commons.Method SUBWOOVERLOW_FLOAT = - getAsmMethod(float.class, "subtractWithoutOverflow", float.class, float.class); - private final static org.objectweb.asm.commons.Method SUBWOOVERLOW_DOUBLE = - getAsmMethod(double.class, "subtractWithoutOverflow", double.class, double.class); - - private static org.objectweb.asm.commons.Method getAsmMethod(final Class rtype, final String name, final Class... ptypes) { - return new org.objectweb.asm.commons.Method(name, MethodType.methodType(rtype, ptypes).toMethodDescriptorString()); - } - static byte[] write(Metadata metadata) { - Writer writer = new Writer(metadata); + final Writer writer = new Writer(metadata); return writer.getBytes(); } private final Metadata metadata; - private final Definition definition; private final ParseTree root; private final String source; private final CompilerSettings settings; - private final Map branches = new HashMap<>(); - private final Deque jumps = new ArrayDeque<>(); - private final Set strings = new HashSet<>(); + private final ClassWriter writer; + private final GeneratorAdapter execute; - private ClassWriter writer; - private GeneratorAdapter execute; + private final WriterStatement statement; + private final WriterExpression expression; + private final WriterExternal external; private Writer(final Metadata metadata) { this.metadata = metadata; - definition = metadata.definition; root = metadata.root; source = metadata.source; settings = metadata.settings; + writer = new ClassWriter(ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS); + writeBegin(); writeConstructor(); + + execute = new GeneratorAdapter(Opcodes.ACC_PUBLIC, EXECUTE, SIGNATURE, null, writer); + + final WriterUtility utility = new WriterUtility(metadata, execute); + final WriterCaster caster = new WriterCaster(execute); + + statement = new WriterStatement(metadata, execute, this, utility); + expression = new WriterExpression(metadata, execute, this, utility, caster); + external = new WriterExternal(metadata, execute, this, utility, caster); + writeExecute(); writeEnd(); } - private Branch markBranch(final ParserRuleContext source, final ParserRuleContext... nodes) { - final Branch branch = new Branch(source); - - for (final ParserRuleContext node : nodes) { - branches.put(node, branch); - } - - return branch; - } - - private void copyBranch(final Branch branch, final ParserRuleContext... nodes) { - for (final ParserRuleContext node : nodes) { - branches.put(node, branch); - } - } - - private Branch getBranch(final ParserRuleContext source) { - return branches.get(source); - } - private void writeBegin() { - final int compute = ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS; final int version = Opcodes.V1_7; final int access = Opcodes.ACC_PUBLIC | Opcodes.ACC_SUPER | Opcodes.ACC_FINAL; final String base = BASE_CLASS_TYPE.getInternalName(); final String name = CLASS_TYPE.getInternalName(); - writer = new ClassWriter(compute); writer.visit(version, access, name, null, base, null); writer.visitSource(source, null); } private void writeConstructor() { - final int access = Opcodes.ACC_PUBLIC; - final GeneratorAdapter constructor = new GeneratorAdapter(access, CONSTRUCTOR, null, null, writer); + final GeneratorAdapter constructor = new GeneratorAdapter(Opcodes.ACC_PUBLIC, CONSTRUCTOR, null, null, writer); constructor.loadThis(); constructor.loadArgs(); constructor.invokeConstructor(org.objectweb.asm.Type.getType(Executable.class), CONSTRUCTOR); @@ -338,9 +149,6 @@ class Writer extends PainlessParserBaseVisitor { } private void writeExecute() { - final int access = Opcodes.ACC_PUBLIC; - execute = new GeneratorAdapter(access, EXECUTE, SIGNATURE, null, writer); - final Label fals = new Label(); final Label end = new Label(); execute.visitVarInsn(Opcodes.ALOAD, metadata.inputValueSlot); @@ -364,2011 +172,6 @@ class Writer extends PainlessParserBaseVisitor { execute.endMethod(); } - @Override - public Void visitSource(final SourceContext ctx) { - final Metadata.StatementMetadata sourcesmd = metadata.getStatementMetadata(ctx); - - for (final StatementContext sctx : ctx.statement()) { - visit(sctx); - } - - if (!sourcesmd.methodEscape) { - execute.visitInsn(Opcodes.ACONST_NULL); - execute.returnValue(); - } - - return null; - } - - @Override - public Void visitIf(final IfContext ctx) { - final ExpressionContext exprctx = ctx.expression(); - final boolean els = ctx.ELSE() != null; - final Branch branch = markBranch(ctx, exprctx); - branch.end = new Label(); - branch.fals = els ? new Label() : branch.end; - - visit(exprctx); - - final BlockContext blockctx0 = ctx.block(0); - final Metadata.StatementMetadata blockmd0 = metadata.getStatementMetadata(blockctx0); - visit(blockctx0); - - if (els) { - if (!blockmd0.allLast) { - execute.goTo(branch.end); - } - - execute.mark(branch.fals); - visit(ctx.block(1)); - } - - execute.mark(branch.end); - - return null; - } - - @Override - public Void visitWhile(final WhileContext ctx) { - final ExpressionContext exprctx = ctx.expression(); - final Branch branch = markBranch(ctx, exprctx); - branch.begin = new Label(); - branch.end = new Label(); - branch.fals = branch.end; - - jumps.push(branch); - execute.mark(branch.begin); - visit(exprctx); - - final BlockContext blockctx = ctx.block(); - boolean allLast = false; - - if (blockctx != null) { - final Metadata.StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); - allLast = blocksmd.allLast; - writeLoopCounter(blocksmd.count > 0 ? blocksmd.count : 1); - visit(blockctx); - } else if (ctx.empty() != null) { - writeLoopCounter(1); - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - - if (!allLast) { - execute.goTo(branch.begin); - } - - execute.mark(branch.end); - jumps.pop(); - - return null; - } - - @Override - public Void visitDo(final DoContext ctx) { - final ExpressionContext exprctx = ctx.expression(); - final Branch branch = markBranch(ctx, exprctx); - Label start = new Label(); - branch.begin = new Label(); - branch.end = new Label(); - branch.fals = branch.end; - - final BlockContext blockctx = ctx.block(); - final Metadata.StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); - - jumps.push(branch); - execute.mark(start); - visit(blockctx); - execute.mark(branch.begin); - visit(exprctx); - writeLoopCounter(blocksmd.count > 0 ? blocksmd.count : 1); - execute.goTo(start); - execute.mark(branch.end); - jumps.pop(); - - return null; - } - - @Override - public Void visitFor(final ForContext ctx) { - final ExpressionContext exprctx = ctx.expression(); - final AfterthoughtContext atctx = ctx.afterthought(); - final Branch branch = markBranch(ctx, exprctx); - final Label start = new Label(); - branch.begin = atctx == null ? start : new Label(); - branch.end = new Label(); - branch.fals = branch.end; - - jumps.push(branch); - - if (ctx.initializer() != null) { - visit(ctx.initializer()); - } - - execute.mark(start); - - if (exprctx != null) { - visit(exprctx); - } - - final BlockContext blockctx = ctx.block(); - boolean allLast = false; - - if (blockctx != null) { - Metadata.StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); - allLast = blocksmd.allLast; - - int count = blocksmd.count > 0 ? blocksmd.count : 1; - - if (atctx != null) { - ++count; - } - - writeLoopCounter(count); - visit(blockctx); - } else if (ctx.empty() != null) { - writeLoopCounter(1); - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - - if (atctx != null) { - execute.mark(branch.begin); - visit(atctx); - } - - if (atctx != null || !allLast) { - execute.goTo(start); - } - - execute.mark(branch.end); - jumps.pop(); - - return null; - } - - @Override - public Void visitDecl(final DeclContext ctx) { - visit(ctx.declaration()); - - return null; - } - - @Override - public Void visitContinue(final ContinueContext ctx) { - final Branch jump = jumps.peek(); - execute.goTo(jump.begin); - - return null; - } - - @Override - public Void visitBreak(final BreakContext ctx) { - final Branch jump = jumps.peek(); - execute.goTo(jump.end); - - return null; - } - - @Override - public Void visitReturn(final ReturnContext ctx) { - visit(ctx.expression()); - execute.returnValue(); - - return null; - } - - @Override - public Void visitTry(final TryContext ctx) { - final TrapContext[] trapctxs = new TrapContext[ctx.trap().size()]; - ctx.trap().toArray(trapctxs); - final Branch branch = markBranch(ctx, trapctxs); - - Label end = new Label(); - branch.begin = new Label(); - branch.end = new Label(); - branch.tru = trapctxs.length > 1 ? end : null; - - execute.mark(branch.begin); - - final BlockContext blockctx = ctx.block(); - final Metadata.StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); - visit(blockctx); - - if (!blocksmd.allLast) { - execute.goTo(end); - } - - execute.mark(branch.end); - - for (final TrapContext trapctx : trapctxs) { - visit(trapctx); - } - - if (!blocksmd.allLast || trapctxs.length > 1) { - execute.mark(end); - } - - return null; - } - - @Override - public Void visitThrow(final ThrowContext ctx) { - visit(ctx.expression()); - execute.throwException(); - - return null; - } - - @Override - public Void visitExpr(final ExprContext ctx) { - final Metadata.StatementMetadata exprsmd = metadata.getStatementMetadata(ctx); - final ExpressionContext exprctx = ctx.expression(); - final Metadata.ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx); - visit(exprctx); - - if (exprsmd.methodEscape) { - execute.returnValue(); - } else { - writePop(expremd.to.type.getSize()); - } - - return null; - } - - @Override - public Void visitMultiple(final MultipleContext ctx) { - for (final StatementContext sctx : ctx.statement()) { - visit(sctx); - } - - return null; - } - - @Override - public Void visitSingle(final SingleContext ctx) { - visit(ctx.statement()); - - return null; - } - - @Override - public Void visitEmpty(final EmptyContext ctx) { - throw new UnsupportedOperationException(Metadata.error(ctx) + "Unexpected writer state."); - } - - @Override - public Void visitInitializer(InitializerContext ctx) { - final DeclarationContext declctx = ctx.declaration(); - final ExpressionContext exprctx = ctx.expression(); - - if (declctx != null) { - visit(declctx); - } else if (exprctx != null) { - final Metadata.ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx); - visit(exprctx); - writePop(expremd.to.type.getSize()); - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - - return null; - } - - @Override - public Void visitAfterthought(AfterthoughtContext ctx) { - final ExpressionContext exprctx = ctx.expression(); - final Metadata.ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx); - visit(ctx.expression()); - writePop(expremd.to.type.getSize()); - - return null; - } - - @Override - public Void visitDeclaration(DeclarationContext ctx) { - for (final DeclvarContext declctx : ctx.declvar()) { - visit(declctx); - } - - return null; - } - - @Override - public Void visitDecltype(final DecltypeContext ctx) { - throw new UnsupportedOperationException(Metadata.error(ctx) + "Unexpected writer state."); - } - - @Override - public Void visitDeclvar(final DeclvarContext ctx) { - final Metadata.ExpressionMetadata declvaremd = metadata.getExpressionMetadata(ctx); - final org.objectweb.asm.Type type = declvaremd.to.type; - final Sort sort = declvaremd.to.sort; - final int slot = (int)declvaremd.postConst; - - final ExpressionContext exprctx = ctx.expression(); - final boolean initialize = exprctx == null; - - if (!initialize) { - visit(exprctx); - } - - switch (sort) { - case VOID: throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - case BOOL: - case BYTE: - case SHORT: - case CHAR: - case INT: if (initialize) execute.push(0); break; - case LONG: if (initialize) execute.push(0L); break; - case FLOAT: if (initialize) execute.push(0.0F); break; - case DOUBLE: if (initialize) execute.push(0.0); break; - default: if (initialize) execute.visitInsn(Opcodes.ACONST_NULL); - } - - execute.visitVarInsn(type.getOpcode(Opcodes.ISTORE), slot); - - return null; - } - - @Override - public Void visitTrap(final TrapContext ctx) { - final Metadata.StatementMetadata trapsmd = metadata.getStatementMetadata(ctx); - - final Branch branch = getBranch(ctx); - final Label jump = new Label(); - - final BlockContext blockctx = ctx.block(); - final EmptyscopeContext emptyctx = ctx.emptyscope(); - - execute.mark(jump); - writeLoadStoreVariable(ctx, true, trapsmd.exception, trapsmd.slot); - - if (blockctx != null) { - visit(ctx.block()); - } else if (emptyctx == null) { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - - execute.visitTryCatchBlock(branch.begin, branch.end, jump, trapsmd.exception.type.getInternalName()); - - if (branch.tru != null && !trapsmd.allLast) { - execute.goTo(branch.tru); - } - - return null; - } - - @Override - public Void visitPrecedence(final PrecedenceContext ctx) { - throw new UnsupportedOperationException(Metadata.error(ctx) + "Unexpected writer state."); - } - - @Override - public Void visitNumeric(final NumericContext ctx) { - final Metadata.ExpressionMetadata numericemd = metadata.getExpressionMetadata(ctx); - final Object postConst = numericemd.postConst; - - if (postConst == null) { - writeNumeric(ctx, numericemd.preConst); - checkWriteCast(numericemd); - } else { - writeConstant(ctx, postConst); - } - - checkWriteBranch(ctx); - - return null; - } - - @Override - public Void visitChar(final CharContext ctx) { - final Metadata.ExpressionMetadata charemd = metadata.getExpressionMetadata(ctx); - final Object postConst = charemd.postConst; - - if (postConst == null) { - writeNumeric(ctx, (int)(char)charemd.preConst); - checkWriteCast(charemd); - } else { - writeConstant(ctx, postConst); - } - - checkWriteBranch(ctx); - - return null; - } - - @Override - public Void visitTrue(final TrueContext ctx) { - final Metadata.ExpressionMetadata trueemd = metadata.getExpressionMetadata(ctx); - final Object postConst = trueemd.postConst; - final Branch branch = getBranch(ctx); - - if (branch == null) { - if (postConst == null) { - writeBoolean(ctx, true); - checkWriteCast(trueemd); - } else { - writeConstant(ctx, postConst); - } - } else if (branch.tru != null) { - execute.goTo(branch.tru); - } - - return null; - } - - @Override - public Void visitFalse(final FalseContext ctx) { - final Metadata.ExpressionMetadata falseemd = metadata.getExpressionMetadata(ctx); - final Object postConst = falseemd.postConst; - final Branch branch = getBranch(ctx); - - if (branch == null) { - if (postConst == null) { - writeBoolean(ctx, false); - checkWriteCast(falseemd); - } else { - writeConstant(ctx, postConst); - } - } else if (branch.fals != null) { - execute.goTo(branch.fals); - } - - return null; - } - - @Override - public Void visitNull(final NullContext ctx) { - final Metadata.ExpressionMetadata nullemd = metadata.getExpressionMetadata(ctx); - - execute.visitInsn(Opcodes.ACONST_NULL); - checkWriteCast(nullemd); - checkWriteBranch(ctx); - - return null; - } - - @Override - public Void visitExternal(final ExternalContext ctx) { - final Metadata.ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); - visit(ctx.extstart()); - checkWriteCast(expremd); - checkWriteBranch(ctx); - - return null; - } - - - @Override - public Void visitPostinc(final PostincContext ctx) { - final Metadata.ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); - visit(ctx.extstart()); - checkWriteCast(expremd); - checkWriteBranch(ctx); - - return null; - } - - @Override - public Void visitPreinc(final PreincContext ctx) { - final Metadata.ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); - visit(ctx.extstart()); - checkWriteCast(expremd); - checkWriteBranch(ctx); - - return null; - } - - @Override - public Void visitUnary(final UnaryContext ctx) { - final Metadata.ExpressionMetadata unaryemd = metadata.getExpressionMetadata(ctx); - final Object postConst = unaryemd.postConst; - final Object preConst = unaryemd.preConst; - final Branch branch = getBranch(ctx); - - if (postConst != null) { - if (ctx.BOOLNOT() != null) { - if (branch == null) { - writeConstant(ctx, postConst); - } else { - if ((boolean)postConst && branch.tru != null) { - execute.goTo(branch.tru); - } else if (!(boolean)postConst && branch.fals != null) { - execute.goTo(branch.fals); - } - } - } else { - writeConstant(ctx, postConst); - checkWriteBranch(ctx); - } - } else if (preConst != null) { - if (branch == null) { - writeConstant(ctx, preConst); - checkWriteCast(unaryemd); - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - } else { - final ExpressionContext exprctx = ctx.expression(); - - if (ctx.BOOLNOT() != null) { - final Branch local = markBranch(ctx, exprctx); - - if (branch == null) { - local.fals = new Label(); - final Label aend = new Label(); - - visit(exprctx); - - execute.push(false); - execute.goTo(aend); - execute.mark(local.fals); - execute.push(true); - execute.mark(aend); - - checkWriteCast(unaryemd); - } else { - local.tru = branch.fals; - local.fals = branch.tru; - - visit(exprctx); - } - } else { - final org.objectweb.asm.Type type = unaryemd.from.type; - final Sort sort = unaryemd.from.sort; - - visit(exprctx); - - if (ctx.BWNOT() != null) { - if (sort == Sort.DEF) { - execute.invokeStatic(definition.defobjType.type, DEF_NOT_CALL); - } else { - if (sort == Sort.INT) { - writeConstant(ctx, -1); - } else if (sort == Sort.LONG) { - writeConstant(ctx, -1L); - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - - execute.math(GeneratorAdapter.XOR, type); - } - } else if (ctx.SUB() != null) { - if (sort == Sort.DEF) { - execute.invokeStatic(definition.defobjType.type, DEF_NEG_CALL); - } else { - if (settings.getNumericOverflow()) { - execute.math(GeneratorAdapter.NEG, type); - } else { - if (sort == Sort.INT) { - execute.invokeStatic(definition.mathType.type, NEGATEEXACT_INT); - } else if (sort == Sort.LONG) { - execute.invokeStatic(definition.mathType.type, NEGATEEXACT_LONG); - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - } - } - } else if (ctx.ADD() == null) { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - - checkWriteCast(unaryemd); - checkWriteBranch(ctx); - } - } - - return null; - } - - @Override - public Void visitCast(final CastContext ctx) { - final Metadata.ExpressionMetadata castemd = metadata.getExpressionMetadata(ctx); - final Object postConst = castemd.postConst; - - if (postConst == null) { - visit(ctx.expression()); - checkWriteCast(castemd); - } else { - writeConstant(ctx, postConst); - } - - checkWriteBranch(ctx); - - return null; - } - - @Override - public Void visitBinary(final BinaryContext ctx) { - final Metadata.ExpressionMetadata binaryemd = metadata.getExpressionMetadata(ctx); - final Object postConst = binaryemd.postConst; - final Object preConst = binaryemd.preConst; - final Branch branch = getBranch(ctx); - - if (postConst != null) { - writeConstant(ctx, postConst); - } else if (preConst != null) { - if (branch == null) { - writeConstant(ctx, preConst); - checkWriteCast(binaryemd); - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - } else if (binaryemd.from.sort == Sort.STRING) { - final boolean marked = strings.contains(ctx); - - if (!marked) { - writeNewStrings(); - } - - final ExpressionContext exprctx0 = ctx.expression(0); - final Metadata.ExpressionMetadata expremd0 = metadata.getExpressionMetadata(exprctx0); - strings.add(exprctx0); - visit(exprctx0); - - if (strings.contains(exprctx0)) { - writeAppendStrings(expremd0.from.sort); - strings.remove(exprctx0); - } - - final ExpressionContext exprctx1 = ctx.expression(1); - final Metadata.ExpressionMetadata expremd1 = metadata.getExpressionMetadata(exprctx1); - strings.add(exprctx1); - visit(exprctx1); - - if (strings.contains(exprctx1)) { - writeAppendStrings(expremd1.from.sort); - strings.remove(exprctx1); - } - - if (marked) { - strings.remove(ctx); - } else { - writeToStrings(); - } - - checkWriteCast(binaryemd); - } else { - final ExpressionContext exprctx0 = ctx.expression(0); - final ExpressionContext exprctx1 = ctx.expression(1); - - visit(exprctx0); - visit(exprctx1); - - final Type type = binaryemd.from; - - if (ctx.MUL() != null) writeBinaryInstruction(ctx, type, MUL); - else if (ctx.DIV() != null) writeBinaryInstruction(ctx, type, DIV); - else if (ctx.REM() != null) writeBinaryInstruction(ctx, type, REM); - else if (ctx.ADD() != null) writeBinaryInstruction(ctx, type, ADD); - else if (ctx.SUB() != null) writeBinaryInstruction(ctx, type, SUB); - else if (ctx.LSH() != null) writeBinaryInstruction(ctx, type, LSH); - else if (ctx.USH() != null) writeBinaryInstruction(ctx, type, USH); - else if (ctx.RSH() != null) writeBinaryInstruction(ctx, type, RSH); - else if (ctx.BWAND() != null) writeBinaryInstruction(ctx, type, BWAND); - else if (ctx.BWXOR() != null) writeBinaryInstruction(ctx, type, BWXOR); - else if (ctx.BWOR() != null) writeBinaryInstruction(ctx, type, BWOR); - else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - - checkWriteCast(binaryemd); - } - - checkWriteBranch(ctx); - - return null; - } - - @Override - public Void visitComp(final CompContext ctx) { - final Metadata.ExpressionMetadata compemd = metadata.getExpressionMetadata(ctx); - final Object postConst = compemd.postConst; - final Object preConst = compemd.preConst; - final Branch branch = getBranch(ctx); - - if (postConst != null) { - if (branch == null) { - writeConstant(ctx, postConst); - } else { - if ((boolean)postConst && branch.tru != null) { - execute.mark(branch.tru); - } else if (!(boolean)postConst && branch.fals != null) { - execute.mark(branch.fals); - } - } - } else if (preConst != null) { - if (branch == null) { - writeConstant(ctx, preConst); - checkWriteCast(compemd); - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - } else { - final ExpressionContext exprctx0 = ctx.expression(0); - final Metadata.ExpressionMetadata expremd0 = metadata.getExpressionMetadata(exprctx0); - - final ExpressionContext exprctx1 = ctx.expression(1); - final Metadata.ExpressionMetadata expremd1 = metadata.getExpressionMetadata(exprctx1); - final org.objectweb.asm.Type type = expremd1.to.type; - final Sort sort1 = expremd1.to.sort; - - visit(exprctx0); - - if (!expremd1.isNull) { - visit(exprctx1); - } - - final boolean tru = branch != null && branch.tru != null; - final boolean fals = branch != null && branch.fals != null; - final Label jump = tru ? branch.tru : fals ? branch.fals : new Label(); - final Label end = new Label(); - - final boolean eq = (ctx.EQ() != null || ctx.EQR() != null) && (tru || !fals) || - (ctx.NE() != null || ctx.NER() != null) && fals; - final boolean ne = (ctx.NE() != null || ctx.NER() != null) && (tru || !fals) || - (ctx.EQ() != null || ctx.EQR() != null) && fals; - final boolean lt = ctx.LT() != null && (tru || !fals) || ctx.GTE() != null && fals; - final boolean lte = ctx.LTE() != null && (tru || !fals) || ctx.GT() != null && fals; - final boolean gt = ctx.GT() != null && (tru || !fals) || ctx.LTE() != null && fals; - final boolean gte = ctx.GTE() != null && (tru || !fals) || ctx.LT() != null && fals; - - boolean writejump = true; - - switch (sort1) { - case VOID: - case BYTE: - case SHORT: - case CHAR: - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - case BOOL: - if (eq) execute.ifZCmp(GeneratorAdapter.EQ, jump); - else if (ne) execute.ifZCmp(GeneratorAdapter.NE, jump); - else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - - break; - case INT: - case LONG: - case FLOAT: - case DOUBLE: - if (eq) execute.ifCmp(type, GeneratorAdapter.EQ, jump); - else if (ne) execute.ifCmp(type, GeneratorAdapter.NE, jump); - else if (lt) execute.ifCmp(type, GeneratorAdapter.LT, jump); - else if (lte) execute.ifCmp(type, GeneratorAdapter.LE, jump); - else if (gt) execute.ifCmp(type, GeneratorAdapter.GT, jump); - else if (gte) execute.ifCmp(type, GeneratorAdapter.GE, jump); - else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - - break; - case DEF: - if (eq) { - if (expremd1.isNull) { - execute.ifNull(jump); - } else if (!expremd0.isNull && ctx.EQ() != null) { - execute.invokeStatic(definition.defobjType.type, DEF_EQ_CALL); - } else { - execute.ifCmp(type, GeneratorAdapter.EQ, jump); - } - } else if (ne) { - if (expremd1.isNull) { - execute.ifNonNull(jump); - } else if (!expremd0.isNull && ctx.NE() != null) { - execute.invokeStatic(definition.defobjType.type, DEF_EQ_CALL); - execute.ifZCmp(GeneratorAdapter.EQ, jump); - } else { - execute.ifCmp(type, GeneratorAdapter.NE, jump); - } - } else if (lt) { - execute.invokeStatic(definition.defobjType.type, DEF_LT_CALL); - } else if (lte) { - execute.invokeStatic(definition.defobjType.type, DEF_LTE_CALL); - } else if (gt) { - execute.invokeStatic(definition.defobjType.type, DEF_GT_CALL); - } else if (gte) { - execute.invokeStatic(definition.defobjType.type, DEF_GTE_CALL); - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - - writejump = expremd1.isNull || ne || ctx.EQR() != null; - - if (branch != null && !writejump) { - execute.ifZCmp(GeneratorAdapter.NE, jump); - } - - break; - default: - if (eq) { - if (expremd1.isNull) { - execute.ifNull(jump); - } else if (ctx.EQ() != null) { - execute.invokeStatic(definition.utilityType.type, CHECKEQUALS); - - if (branch != null) { - execute.ifZCmp(GeneratorAdapter.NE, jump); - } - - writejump = false; - } else { - execute.ifCmp(type, GeneratorAdapter.EQ, jump); - } - } else if (ne) { - if (expremd1.isNull) { - execute.ifNonNull(jump); - } else if (ctx.NE() != null) { - execute.invokeStatic(definition.utilityType.type, CHECKEQUALS); - execute.ifZCmp(GeneratorAdapter.EQ, jump); - } else { - execute.ifCmp(type, GeneratorAdapter.NE, jump); - } - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - } - - if (branch == null) { - if (writejump) { - execute.push(false); - execute.goTo(end); - execute.mark(jump); - execute.push(true); - execute.mark(end); - } - - checkWriteCast(compemd); - } - } - - return null; - } - - @Override - public Void visitBool(final BoolContext ctx) { - final Metadata.ExpressionMetadata boolemd = metadata.getExpressionMetadata(ctx); - final Object postConst = boolemd.postConst; - final Object preConst = boolemd.preConst; - final Branch branch = getBranch(ctx); - - if (postConst != null) { - if (branch == null) { - writeConstant(ctx, postConst); - } else { - if ((boolean)postConst && branch.tru != null) { - execute.mark(branch.tru); - } else if (!(boolean)postConst && branch.fals != null) { - execute.mark(branch.fals); - } - } - } else if (preConst != null) { - if (branch == null) { - writeConstant(ctx, preConst); - checkWriteCast(boolemd); - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - } else { - final ExpressionContext exprctx0 = ctx.expression(0); - final ExpressionContext exprctx1 = ctx.expression(1); - - if (branch == null) { - if (ctx.BOOLAND() != null) { - final Branch local = markBranch(ctx, exprctx0, exprctx1); - local.fals = new Label(); - final Label end = new Label(); - - visit(exprctx0); - visit(exprctx1); - - execute.push(true); - execute.goTo(end); - execute.mark(local.fals); - execute.push(false); - execute.mark(end); - } else if (ctx.BOOLOR() != null) { - final Branch branch0 = markBranch(ctx, exprctx0); - branch0.tru = new Label(); - final Branch branch1 = markBranch(ctx, exprctx1); - branch1.fals = new Label(); - final Label aend = new Label(); - - visit(exprctx0); - visit(exprctx1); - - execute.mark(branch0.tru); - execute.push(true); - execute.goTo(aend); - execute.mark(branch1.fals); - execute.push(false); - execute.mark(aend); - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - - checkWriteCast(boolemd); - } else { - if (ctx.BOOLAND() != null) { - final Branch branch0 = markBranch(ctx, exprctx0); - branch0.fals = branch.fals == null ? new Label() : branch.fals; - final Branch branch1 = markBranch(ctx, exprctx1); - branch1.tru = branch.tru; - branch1.fals = branch.fals; - - visit(exprctx0); - visit(exprctx1); - - if (branch.fals == null) { - execute.mark(branch0.fals); - } - } else if (ctx.BOOLOR() != null) { - final Branch branch0 = markBranch(ctx, exprctx0); - branch0.tru = branch.tru == null ? new Label() : branch.tru; - final Branch branch1 = markBranch(ctx, exprctx1); - branch1.tru = branch.tru; - branch1.fals = branch.fals; - - visit(exprctx0); - visit(exprctx1); - - if (branch.tru == null) { - execute.mark(branch0.tru); - } - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - } - } - - return null; - } - - @Override - public Void visitConditional(final ConditionalContext ctx) { - final Metadata.ExpressionMetadata condemd = metadata.getExpressionMetadata(ctx); - final Branch branch = getBranch(ctx); - - final ExpressionContext expr0 = ctx.expression(0); - final ExpressionContext expr1 = ctx.expression(1); - final ExpressionContext expr2 = ctx.expression(2); - - final Branch local = markBranch(ctx, expr0); - local.fals = new Label(); - local.end = new Label(); - - if (branch != null) { - copyBranch(branch, expr1, expr2); - } - - visit(expr0); - visit(expr1); - execute.goTo(local.end); - execute.mark(local.fals); - visit(expr2); - execute.mark(local.end); - - if (branch == null) { - checkWriteCast(condemd); - } - - return null; - } - - @Override - public Void visitAssignment(final AssignmentContext ctx) { - final Metadata.ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); - visit(ctx.extstart()); - checkWriteCast(expremd); - checkWriteBranch(ctx); - - return null; - } - - @Override - public Void visitExtstart(ExtstartContext ctx) { - final Metadata.ExternalMetadata startemd = metadata.getExternalMetadata(ctx); - - if (startemd.token == ADD) { - final Metadata.ExpressionMetadata storeemd = metadata.getExpressionMetadata(startemd.storeExpr); - - if (startemd.current.sort == Sort.STRING || storeemd.from.sort == Sort.STRING) { - writeNewStrings(); - strings.add(startemd.storeExpr); - } - } - - final ExtprecContext precctx = ctx.extprec(); - final ExtcastContext castctx = ctx.extcast(); - final ExttypeContext typectx = ctx.exttype(); - final ExtvarContext varctx = ctx.extvar(); - final ExtnewContext newctx = ctx.extnew(); - final ExtstringContext stringctx = ctx.extstring(); - - if (precctx != null) { - visit(precctx); - } else if (castctx != null) { - visit(castctx); - } else if (typectx != null) { - visit(typectx); - } else if (varctx != null) { - visit(varctx); - } else if (newctx != null) { - visit(newctx); - } else if (stringctx != null) { - visit(stringctx); - } else { - throw new IllegalStateException(); - } - - return null; - } - - @Override - public Void visitExtprec(final ExtprecContext ctx) { - final ExtprecContext precctx = ctx.extprec(); - final ExtcastContext castctx = ctx.extcast(); - final ExttypeContext typectx = ctx.exttype(); - final ExtvarContext varctx = ctx.extvar(); - final ExtnewContext newctx = ctx.extnew(); - final ExtstringContext stringctx = ctx.extstring(); - - if (precctx != null) { - visit(precctx); - } else if (castctx != null) { - visit(castctx); - } else if (typectx != null) { - visit(typectx); - } else if (varctx != null) { - visit(varctx); - } else if (newctx != null) { - visit(newctx); - } else if (stringctx != null) { - visit(stringctx); - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - - final ExtdotContext dotctx = ctx.extdot(); - final ExtbraceContext bracectx = ctx.extbrace(); - - if (dotctx != null) { - visit(dotctx); - } else if (bracectx != null) { - visit(bracectx); - } - - return null; - } - - @Override - public Void visitExtcast(final ExtcastContext ctx) { - Metadata.ExtNodeMetadata castenmd = metadata.getExtNodeMetadata(ctx); - - final ExtprecContext precctx = ctx.extprec(); - final ExtcastContext castctx = ctx.extcast(); - final ExttypeContext typectx = ctx.exttype(); - final ExtvarContext varctx = ctx.extvar(); - final ExtnewContext newctx = ctx.extnew(); - final ExtstringContext stringctx = ctx.extstring(); - - if (precctx != null) { - visit(precctx); - } else if (castctx != null) { - visit(castctx); - } else if (typectx != null) { - visit(typectx); - } else if (varctx != null) { - visit(varctx); - } else if (newctx != null) { - visit(newctx); - } else if (stringctx != null) { - visit(stringctx); - } else { - throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); - } - - checkWriteCast(ctx, castenmd.castTo); - - return null; - } - - @Override - public Void visitExtbrace(final ExtbraceContext ctx) { - final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - - visit(exprctx); - writeLoadStoreExternal(ctx); - - final ExtdotContext dotctx = ctx.extdot(); - final ExtbraceContext bracectx = ctx.extbrace(); - - if (dotctx != null) { - visit(dotctx); - } else if (bracectx != null) { - visit(bracectx); - } - - return null; - } - - @Override - public Void visitExtdot(final ExtdotContext ctx) { - final ExtcallContext callctx = ctx.extcall(); - final ExtfieldContext fieldctx = ctx.extfield(); - - if (callctx != null) { - visit(callctx); - } else if (fieldctx != null) { - visit(fieldctx); - } - - return null; - } - - @Override - public Void visitExttype(final ExttypeContext ctx) { - visit(ctx.extdot()); - - return null; - } - - @Override - public Void visitExtcall(final ExtcallContext ctx) { - writeCallExternal(ctx); - - final ExtdotContext dotctx = ctx.extdot(); - final ExtbraceContext bracectx = ctx.extbrace(); - - if (dotctx != null) { - visit(dotctx); - } else if (bracectx != null) { - visit(bracectx); - } - - return null; - } - - @Override - public Void visitExtvar(final ExtvarContext ctx) { - writeLoadStoreExternal(ctx); - - final ExtdotContext dotctx = ctx.extdot(); - final ExtbraceContext bracectx = ctx.extbrace(); - - if (dotctx != null) { - visit(dotctx); - } else if (bracectx != null) { - visit(bracectx); - } - - return null; - } - - @Override - public Void visitExtfield(final ExtfieldContext ctx) { - writeLoadStoreExternal(ctx); - - final ExtdotContext dotctx = ctx.extdot(); - final ExtbraceContext bracectx = ctx.extbrace(); - - if (dotctx != null) { - visit(dotctx); - } else if (bracectx != null) { - visit(bracectx); - } - - return null; - } - - @Override - public Void visitExtnew(ExtnewContext ctx) { - writeNewExternal(ctx); - - final ExtdotContext dotctx = ctx.extdot(); - final ExtbraceContext bracectx = ctx.extbrace(); - - if (dotctx != null) { - visit(dotctx); - } else if (bracectx != null) { - visit(bracectx); - } - - return null; - } - - @Override - public Void visitExtstring(ExtstringContext ctx) { - final Metadata.ExtNodeMetadata stringenmd = metadata.getExtNodeMetadata(ctx); - - writeConstant(ctx, stringenmd.target); - - final ExtdotContext dotctx = ctx.extdot(); - final ExtbraceContext bracectx = ctx.extbrace(); - - if (dotctx != null) { - visit(dotctx); - } else if (bracectx != null) { - visit(bracectx); - } - - return null; - } - - @Override - public Void visitArguments(final ArgumentsContext ctx) { - throw new UnsupportedOperationException(Metadata.error(ctx) + "Unexpected writer state."); - } - - @Override - public Void visitIncrement(IncrementContext ctx) { - final Metadata.ExpressionMetadata incremd = metadata.getExpressionMetadata(ctx); - final Object postConst = incremd.postConst; - - if (postConst == null) { - writeNumeric(ctx, incremd.preConst); - checkWriteCast(incremd); - } else { - writeConstant(ctx, postConst); - } - - checkWriteBranch(ctx); - - return null; - } - - private void writeLoopCounter(final int count) { - final Label end = new Label(); - - execute.iinc(metadata.loopCounterSlot, -count); - execute.visitVarInsn(Opcodes.ILOAD, metadata.loopCounterSlot); - execute.push(0); - execute.ifICmp(GeneratorAdapter.GT, end); - execute.throwException(PAINLESS_ERROR_TYPE, - "The maximum number of statements that can be executed in a loop has been reached."); - execute.mark(end); - } - - private void writeConstant(final ParserRuleContext source, final Object constant) { - if (constant instanceof Number) { - writeNumeric(source, constant); - } else if (constant instanceof Character) { - writeNumeric(source, (int)(char)constant); - } else if (constant instanceof String) { - writeString(source, constant); - } else if (constant instanceof Boolean) { - writeBoolean(source, constant); - } else if (constant != null) { - throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); - } - } - - private void writeNumeric(final ParserRuleContext source, final Object numeric) { - if (numeric instanceof Double) { - execute.push((double)numeric); - } else if (numeric instanceof Float) { - execute.push((float)numeric); - } else if (numeric instanceof Long) { - execute.push((long)numeric); - } else if (numeric instanceof Number) { - execute.push(((Number)numeric).intValue()); - } else { - throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); - } - } - - private void writeString(final ParserRuleContext source, final Object string) { - if (string instanceof String) { - execute.push((String)string); - } else { - throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); - } - } - - private void writeBoolean(final ParserRuleContext source, final Object bool) { - if (bool instanceof Boolean) { - execute.push((boolean)bool); - } else { - throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); - } - } - - private void writeNewStrings() { - execute.newInstance(STRINGBUILDER_TYPE); - execute.dup(); - execute.invokeConstructor(STRINGBUILDER_TYPE, STRINGBUILDER_CONSTRUCTOR); - } - - private void writeAppendStrings(final Sort sort) { - switch (sort) { - case BOOL: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_BOOLEAN); break; - case CHAR: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_CHAR); break; - case BYTE: - case SHORT: - case INT: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_INT); break; - case LONG: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_LONG); break; - case FLOAT: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_FLOAT); break; - case DOUBLE: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_DOUBLE); break; - case STRING: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_STRING); break; - default: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_OBJECT); - } - } - - private void writeToStrings() { - execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_TOSTRING); - } - - private void writeBinaryInstruction(final ParserRuleContext source, final Type type, final int token) { - final Sort sort = type.sort; - final boolean exact = !settings.getNumericOverflow() && - ((sort == Sort.INT || sort == Sort.LONG) && - (token == MUL || token == DIV || token == ADD || token == SUB) || - (sort == Sort.FLOAT || sort == Sort.DOUBLE) && - (token == MUL || token == DIV || token == REM || token == ADD || token == SUB)); - - // if its a 64-bit shift, fixup the lastSource argument to truncate to 32-bits - // note unlike java, this means we still do binary promotion of shifts, - // but it keeps things simple -- this check works because we promote shifts. - if (sort == Sort.LONG && (token == LSH || token == USH || token == RSH)) { - execute.cast(org.objectweb.asm.Type.LONG_TYPE, org.objectweb.asm.Type.INT_TYPE); - } - - if (exact) { - switch (sort) { - case INT: - switch (token) { - case MUL: execute.invokeStatic(definition.mathType.type, MULEXACT_INT); break; - case DIV: execute.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_INT); break; - case ADD: execute.invokeStatic(definition.mathType.type, ADDEXACT_INT); break; - case SUB: execute.invokeStatic(definition.mathType.type, SUBEXACT_INT); break; - default: - throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); - } - - break; - case LONG: - switch (token) { - case MUL: execute.invokeStatic(definition.mathType.type, MULEXACT_LONG); break; - case DIV: execute.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_LONG); break; - case ADD: execute.invokeStatic(definition.mathType.type, ADDEXACT_LONG); break; - case SUB: execute.invokeStatic(definition.mathType.type, SUBEXACT_LONG); break; - default: - throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); - } - - break; - case FLOAT: - switch (token) { - case MUL: execute.invokeStatic(definition.utilityType.type, MULWOOVERLOW_FLOAT); break; - case DIV: execute.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_FLOAT); break; - case REM: execute.invokeStatic(definition.utilityType.type, REMWOOVERLOW_FLOAT); break; - case ADD: execute.invokeStatic(definition.utilityType.type, ADDWOOVERLOW_FLOAT); break; - case SUB: execute.invokeStatic(definition.utilityType.type, SUBWOOVERLOW_FLOAT); break; - default: - throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); - } - - break; - case DOUBLE: - switch (token) { - case MUL: execute.invokeStatic(definition.utilityType.type, MULWOOVERLOW_DOUBLE); break; - case DIV: execute.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_DOUBLE); break; - case REM: execute.invokeStatic(definition.utilityType.type, REMWOOVERLOW_DOUBLE); break; - case ADD: execute.invokeStatic(definition.utilityType.type, ADDWOOVERLOW_DOUBLE); break; - case SUB: execute.invokeStatic(definition.utilityType.type, SUBWOOVERLOW_DOUBLE); break; - default: - throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); - } - - break; - default: - throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); - } - } else { - if ((sort == Sort.FLOAT || sort == Sort.DOUBLE) && - (token == LSH || token == USH || token == RSH || token == BWAND || token == BWXOR || token == BWOR)) { - throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); - } - - if (sort == Sort.DEF) { - switch (token) { - case MUL: execute.invokeStatic(definition.defobjType.type, DEF_MUL_CALL); break; - case DIV: execute.invokeStatic(definition.defobjType.type, DEF_DIV_CALL); break; - case REM: execute.invokeStatic(definition.defobjType.type, DEF_REM_CALL); break; - case ADD: execute.invokeStatic(definition.defobjType.type, DEF_ADD_CALL); break; - case SUB: execute.invokeStatic(definition.defobjType.type, DEF_SUB_CALL); break; - case LSH: execute.invokeStatic(definition.defobjType.type, DEF_LSH_CALL); break; - case USH: execute.invokeStatic(definition.defobjType.type, DEF_RSH_CALL); break; - case RSH: execute.invokeStatic(definition.defobjType.type, DEF_USH_CALL); break; - case BWAND: execute.invokeStatic(definition.defobjType.type, DEF_AND_CALL); break; - case BWXOR: execute.invokeStatic(definition.defobjType.type, DEF_XOR_CALL); break; - case BWOR: execute.invokeStatic(definition.defobjType.type, DEF_OR_CALL); break; - default: - throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); - } - } else { - switch (token) { - case MUL: execute.math(GeneratorAdapter.MUL, type.type); break; - case DIV: execute.math(GeneratorAdapter.DIV, type.type); break; - case REM: execute.math(GeneratorAdapter.REM, type.type); break; - case ADD: execute.math(GeneratorAdapter.ADD, type.type); break; - case SUB: execute.math(GeneratorAdapter.SUB, type.type); break; - case LSH: execute.math(GeneratorAdapter.SHL, type.type); break; - case USH: execute.math(GeneratorAdapter.USHR, type.type); break; - case RSH: execute.math(GeneratorAdapter.SHR, type.type); break; - case BWAND: execute.math(GeneratorAdapter.AND, type.type); break; - case BWXOR: execute.math(GeneratorAdapter.XOR, type.type); break; - case BWOR: execute.math(GeneratorAdapter.OR, type.type); break; - default: - throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); - } - } - } - } - - /** - * Called for any compound assignment (including increment/decrement instructions). - * We have to be stricter than writeBinary, and do overflow checks against the original type's size - * instead of the promoted type's size, since the result will be implicitly cast back. - * - * @return true if an instruction is written, false otherwise - */ - private boolean writeExactInstruction(final Sort osort, final Sort psort) { - if (psort == Sort.DOUBLE) { - if (osort == Sort.FLOAT) { - execute.invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE); - } else if (osort == Sort.FLOAT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE); - execute.checkCast(definition.floatobjType.type); - } else if (osort == Sort.LONG) { - execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE); - } else if (osort == Sort.LONG_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE); - execute.checkCast(definition.longobjType.type); - } else if (osort == Sort.INT) { - execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE); - } else if (osort == Sort.INT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE); - execute.checkCast(definition.intobjType.type); - } else if (osort == Sort.CHAR) { - execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE); - } else if (osort == Sort.CHAR_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE); - execute.checkCast(definition.charobjType.type); - } else if (osort == Sort.SHORT) { - execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE); - } else if (osort == Sort.SHORT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE); - execute.checkCast(definition.shortobjType.type); - } else if (osort == Sort.BYTE) { - execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE); - } else if (osort == Sort.BYTE_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE); - execute.checkCast(definition.byteobjType.type); - } else { - return false; - } - } else if (psort == Sort.FLOAT) { - if (osort == Sort.LONG) { - execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT); - } else if (osort == Sort.LONG_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT); - execute.checkCast(definition.longobjType.type); - } else if (osort == Sort.INT) { - execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT); - } else if (osort == Sort.INT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT); - execute.checkCast(definition.intobjType.type); - } else if (osort == Sort.CHAR) { - execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT); - } else if (osort == Sort.CHAR_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT); - execute.checkCast(definition.charobjType.type); - } else if (osort == Sort.SHORT) { - execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT); - } else if (osort == Sort.SHORT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT); - execute.checkCast(definition.shortobjType.type); - } else if (osort == Sort.BYTE) { - execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT); - } else if (osort == Sort.BYTE_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT); - execute.checkCast(definition.byteobjType.type); - } else { - return false; - } - } else if (psort == Sort.LONG) { - if (osort == Sort.INT) { - execute.invokeStatic(definition.mathType.type, TOINTEXACT_LONG); - } else if (osort == Sort.INT_OBJ) { - execute.invokeStatic(definition.mathType.type, TOINTEXACT_LONG); - execute.checkCast(definition.intobjType.type); - } else if (osort == Sort.CHAR) { - execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG); - } else if (osort == Sort.CHAR_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG); - execute.checkCast(definition.charobjType.type); - } else if (osort == Sort.SHORT) { - execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG); - } else if (osort == Sort.SHORT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG); - execute.checkCast(definition.shortobjType.type); - } else if (osort == Sort.BYTE) { - execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG); - } else if (osort == Sort.BYTE_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG); - execute.checkCast(definition.byteobjType.type); - } else { - return false; - } - } else if (psort == Sort.INT) { - if (osort == Sort.CHAR) { - execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_INT); - } else if (osort == Sort.CHAR_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_INT); - execute.checkCast(definition.charobjType.type); - } else if (osort == Sort.SHORT) { - execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT); - } else if (osort == Sort.SHORT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT); - execute.checkCast(definition.shortobjType.type); - } else if (osort == Sort.BYTE) { - execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT); - } else if (osort == Sort.BYTE_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT); - execute.checkCast(definition.byteobjType.type); - } else { - return false; - } - } else { - return false; - } - - return true; - } - - private void writeLoadStoreExternal(final ParserRuleContext source) { - final Metadata.ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source); - final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(sourceenmd.parent); - - final boolean length = "#length".equals(sourceenmd.target); - final boolean array = "#brace".equals(sourceenmd.target); - final boolean name = sourceenmd.target instanceof String && !length && !array; - final boolean variable = sourceenmd.target instanceof Integer; - final boolean field = sourceenmd.target instanceof Field; - final boolean shortcut = sourceenmd.target instanceof Object[]; - - if (!length && !variable && !field && !array && !name && !shortcut) { - throw new IllegalStateException(Metadata.error(source) + "Target not found for load/store."); - } - - final boolean maplist = shortcut && (boolean)((Object[])sourceenmd.target)[2]; - final Object constant = shortcut ? ((Object[])sourceenmd.target)[3] : null; - - final boolean x1 = field || name || (shortcut && !maplist); - final boolean x2 = array || (shortcut && maplist); - - if (length) { - execute.arrayLength(); - } else if (sourceenmd.last && parentemd.storeExpr != null) { - final Metadata.ExpressionMetadata expremd = metadata.getExpressionMetadata(parentemd.storeExpr); - final boolean cat = strings.contains(parentemd.storeExpr); - - if (cat) { - if (field || name || shortcut) { - execute.dupX1(); - } else if (array) { - execute.dup2X1(); - } - - if (maplist) { - if (constant != null) { - writeConstant(source, constant); - } - - execute.dupX2(); - } - - writeLoadStoreInstruction(source, false, variable, field, name, array, shortcut); - writeAppendStrings(sourceenmd.type.sort); - visit(parentemd.storeExpr); - - if (strings.contains(parentemd.storeExpr)) { - writeAppendStrings(expremd.to.sort); - strings.remove(parentemd.storeExpr); - } - - writeToStrings(); - checkWriteCast(source, sourceenmd.castTo); - - if (parentemd.read) { - writeDup(sourceenmd.type.sort.size, x1, x2); - } - - writeLoadStoreInstruction(source, true, variable, field, name, array, shortcut); - } else if (parentemd.token > 0) { - final int token = parentemd.token; - - if (field || name || shortcut) { - execute.dup(); - } else if (array) { - execute.dup2(); - } - - if (maplist) { - if (constant != null) { - writeConstant(source, constant); - } - - execute.dupX1(); - } - - writeLoadStoreInstruction(source, false, variable, field, name, array, shortcut); - - if (parentemd.read && parentemd.post) { - writeDup(sourceenmd.type.sort.size, x1, x2); - } - - checkWriteCast(source, sourceenmd.castFrom); - visit(parentemd.storeExpr); - - writeBinaryInstruction(source, sourceenmd.promote, token); - - boolean exact = false; - - if (!settings.getNumericOverflow() && expremd.typesafe && sourceenmd.type.sort != Sort.DEF && - (token == MUL || token == DIV || token == REM || token == ADD || token == SUB)) { - exact = writeExactInstruction(sourceenmd.type.sort, sourceenmd.promote.sort); - } - - if (!exact) { - checkWriteCast(source, sourceenmd.castTo); - } - - if (parentemd.read && !parentemd.post) { - writeDup(sourceenmd.type.sort.size, x1, x2); - } - - writeLoadStoreInstruction(source, true, variable, field, name, array, shortcut); - } else { - if (constant != null) { - writeConstant(source, constant); - } - - visit(parentemd.storeExpr); - - if (parentemd.read) { - writeDup(sourceenmd.type.sort.size, x1, x2); - } - - writeLoadStoreInstruction(source, true, variable, field, name, array, shortcut); - } - } else { - if (constant != null) { - writeConstant(source, constant); - } - - writeLoadStoreInstruction(source, false, variable, field, name, array, shortcut); - } - } - - private void writeLoadStoreInstruction(final ParserRuleContext source, - final boolean store, final boolean variable, - final boolean field, final boolean name, - final boolean array, final boolean shortcut) { - final Metadata.ExtNodeMetadata sourceemd = metadata.getExtNodeMetadata(source); - - if (variable) { - writeLoadStoreVariable(source, store, sourceemd.type, (int)sourceemd.target); - } else if (field) { - writeLoadStoreField(store, (Field)sourceemd.target); - } else if (name) { - writeLoadStoreField(source, store, (String)sourceemd.target); - } else if (array) { - writeLoadStoreArray(source, store, sourceemd.type); - } else if (shortcut) { - Object[] targets = (Object[])sourceemd.target; - writeLoadStoreShortcut(store, (Method)targets[0], (Method)targets[1]); - } else { - throw new IllegalStateException(Metadata.error(source) + "Load/Store requires a variable, field, or array."); - } - } - - private void writeLoadStoreVariable(final ParserRuleContext source, final boolean store, - final Type type, final int slot) { - if (type.sort == Sort.VOID) { - throw new IllegalStateException(Metadata.error(source) + "Cannot load/store void type."); - } - - if (store) { - execute.visitVarInsn(type.type.getOpcode(Opcodes.ISTORE), slot); - } else { - execute.visitVarInsn(type.type.getOpcode(Opcodes.ILOAD), slot); - } - } - - private void writeLoadStoreField(final boolean store, final Field field) { - if (java.lang.reflect.Modifier.isStatic(field.reflect.getModifiers())) { - if (store) { - execute.putStatic(field.owner.type, field.reflect.getName(), field.type.type); - } else { - execute.getStatic(field.owner.type, field.reflect.getName(), field.type.type); - - if (!field.generic.clazz.equals(field.type.clazz)) { - execute.checkCast(field.generic.type); - } - } - } else { - if (store) { - execute.putField(field.owner.type, field.reflect.getName(), field.type.type); - } else { - execute.getField(field.owner.type, field.reflect.getName(), field.type.type); - - if (!field.generic.clazz.equals(field.type.clazz)) { - execute.checkCast(field.generic.type); - } - } - } - } - - private void writeLoadStoreField(final ParserRuleContext source, final boolean store, final String name) { - if (store) { - final Metadata.ExtNodeMetadata sourceemd = metadata.getExtNodeMetadata(source); - final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(sourceemd.parent); - final Metadata.ExpressionMetadata expremd = metadata.getExpressionMetadata(parentemd.storeExpr); - - execute.push(name); - execute.loadThis(); - execute.getField(CLASS_TYPE, "definition", DEFINITION_TYPE); - execute.push(parentemd.token == 0 && expremd.typesafe); - execute.invokeStatic(definition.defobjType.type, DEF_FIELD_STORE); - } else { - execute.push(name); - execute.loadThis(); - execute.getField(CLASS_TYPE, "definition", DEFINITION_TYPE); - execute.invokeStatic(definition.defobjType.type, DEF_FIELD_LOAD); - } - } - - private void writeLoadStoreArray(final ParserRuleContext source, final boolean store, final Type type) { - if (type.sort == Sort.VOID) { - throw new IllegalStateException(Metadata.error(source) + "Cannot load/store void type."); - } - - if (type.sort == Sort.DEF) { - final ExtbraceContext bracectx = (ExtbraceContext)source; - final Metadata.ExpressionMetadata expremd0 = metadata.getExpressionMetadata(bracectx.expression()); - - if (store) { - final Metadata.ExtNodeMetadata braceenmd = metadata.getExtNodeMetadata(bracectx); - final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(braceenmd.parent); - final Metadata.ExpressionMetadata expremd1 = metadata.getExpressionMetadata(parentemd.storeExpr); - - execute.loadThis(); - execute.getField(CLASS_TYPE, "definition", DEFINITION_TYPE); - execute.push(expremd0.typesafe); - execute.push(parentemd.token == 0 && expremd1.typesafe); - execute.invokeStatic(definition.defobjType.type, DEF_ARRAY_STORE); - } else { - execute.loadThis(); - execute.getField(CLASS_TYPE, "definition", DEFINITION_TYPE); - execute.push(expremd0.typesafe); - execute.invokeStatic(definition.defobjType.type, DEF_ARRAY_LOAD); - } - } else { - if (store) { - execute.arrayStore(type.type); - } else { - execute.arrayLoad(type.type); - } - } - } - - private void writeLoadStoreShortcut(final boolean store, final Method getter, final Method setter) { - final Method method = store ? setter : getter; - - if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) { - execute.invokeInterface(method.owner.type, method.method); - } else { - execute.invokeVirtual(method.owner.type, method.method); - } - - if (store) { - writePop(method.rtn.type.getSize()); - } else if (!method.rtn.clazz.equals(method.handle.type().returnType())) { - execute.checkCast(method.rtn.type); - } - } - - private void writeDup(final int size, final boolean x1, final boolean x2) { - if (size == 1) { - if (x2) { - execute.dupX2(); - } else if (x1) { - execute.dupX1(); - } else { - execute.dup(); - } - } else if (size == 2) { - if (x2) { - execute.dup2X2(); - } else if (x1) { - execute.dup2X1(); - } else { - execute.dup2(); - } - } - } - - private void writeNewExternal(final ExtnewContext source) { - final Metadata.ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source); - final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(sourceenmd.parent); - - final boolean makearray = "#makearray".equals(sourceenmd.target); - final boolean constructor = sourceenmd.target instanceof Constructor; - - if (!makearray && !constructor) { - throw new IllegalStateException(Metadata.error(source) + "Target not found for new call."); - } - - if (makearray) { - for (final ExpressionContext exprctx : source.expression()) { - visit(exprctx); - } - - if (sourceenmd.type.sort == Sort.ARRAY) { - execute.visitMultiANewArrayInsn(sourceenmd.type.type.getDescriptor(), sourceenmd.type.type.getDimensions()); - } else { - execute.newArray(sourceenmd.type.type); - } - } else { - execute.newInstance(sourceenmd.type.type); - - if (parentemd.read) { - execute.dup(); - } - - for (final ExpressionContext exprctx : source.arguments().expression()) { - visit(exprctx); - } - - final Constructor target = (Constructor)sourceenmd.target; - execute.invokeConstructor(target.owner.type, target.method); - } - } - - private void writeCallExternal(final ExtcallContext source) { - final Metadata.ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source); - - final boolean method = sourceenmd.target instanceof Method; - final boolean def = sourceenmd.target instanceof String; - - if (!method && !def) { - throw new IllegalStateException(Metadata.error(source) + "Target not found for call."); - } - - final List arguments = source.arguments().expression(); - - if (method) { - for (final ExpressionContext exprctx : arguments) { - visit(exprctx); - } - - final Method target = (Method)sourceenmd.target; - - if (java.lang.reflect.Modifier.isStatic(target.reflect.getModifiers())) { - execute.invokeStatic(target.owner.type, target.method); - } else if (java.lang.reflect.Modifier.isInterface(target.owner.clazz.getModifiers())) { - execute.invokeInterface(target.owner.type, target.method); - } else { - execute.invokeVirtual(target.owner.type, target.method); - } - - if (!target.rtn.clazz.equals(target.handle.type().returnType())) { - execute.checkCast(target.rtn.type); - } - } else { - execute.push((String)sourceenmd.target); - execute.loadThis(); - execute.getField(CLASS_TYPE, "definition", DEFINITION_TYPE); - - execute.push(arguments.size()); - execute.newArray(definition.defType.type); - - for (int argument = 0; argument < arguments.size(); ++argument) { - execute.dup(); - execute.push(argument); - visit(arguments.get(argument)); - execute.arrayStore(definition.defType.type); - } - - execute.push(arguments.size()); - execute.newArray(definition.booleanType.type); - - for (int argument = 0; argument < arguments.size(); ++argument) { - execute.dup(); - execute.push(argument); - execute.push(metadata.getExpressionMetadata(arguments.get(argument)).typesafe); - execute.arrayStore(definition.booleanType.type); - } - - execute.invokeStatic(definition.defobjType.type, DEF_METHOD_CALL); - } - } - - private void writePop(final int size) { - if (size == 1) { - execute.pop(); - } else if (size == 2) { - execute.pop2(); - } - } - - private void checkWriteCast(final Metadata.ExpressionMetadata sort) { - checkWriteCast(sort.source, sort.cast); - } - - private void checkWriteCast(final ParserRuleContext source, final Cast cast) { - if (cast instanceof Transform) { - writeTransform((Transform)cast); - } else if (cast != null) { - writeCast(cast); - } else { - throw new IllegalStateException(Metadata.error(source) + "Unexpected cast object."); - } - } - - private void writeCast(final Cast cast) { - final Type from = cast.from; - final Type to = cast.to; - - if (from.equals(to)) { - return; - } - - if (from.sort.numeric && from.sort.primitive && to.sort.numeric && to.sort.primitive) { - execute.cast(from.type, to.type); - } else { - try { - from.clazz.asSubclass(to.clazz); - } catch (ClassCastException exception) { - execute.checkCast(to.type); - } - } - } - - private void writeTransform(final Transform transform) { - if (transform.upcast != null) { - execute.checkCast(transform.upcast.type); - } - - if (java.lang.reflect.Modifier.isStatic(transform.method.reflect.getModifiers())) { - execute.invokeStatic(transform.method.owner.type, transform.method.method); - } else if (java.lang.reflect.Modifier.isInterface(transform.method.owner.clazz.getModifiers())) { - execute.invokeInterface(transform.method.owner.type, transform.method.method); - } else { - execute.invokeVirtual(transform.method.owner.type, transform.method.method); - } - - if (transform.downcast != null) { - execute.checkCast(transform.downcast.type); - } - } - - void checkWriteBranch(final ParserRuleContext source) { - final Branch branch = getBranch(source); - - if (branch != null) { - if (branch.tru != null) { - execute.visitJumpInsn(Opcodes.IFNE, branch.tru); - } else if (branch.fals != null) { - execute.visitJumpInsn(Opcodes.IFEQ, branch.fals); - } - } - } - private void writeEnd() { writer.visitEnd(); } @@ -2376,4 +179,352 @@ class Writer extends PainlessParserBaseVisitor { private byte[] getBytes() { return writer.toByteArray(); } + + @Override + public Void visitSource(final SourceContext ctx) { + statement.processSource(ctx); + + return null; + } + + @Override + public Void visitIf(final IfContext ctx) { + statement.processIf(ctx); + + return null; + } + + @Override + public Void visitWhile(final WhileContext ctx) { + statement.processWhile(ctx); + + return null; + } + + @Override + public Void visitDo(final DoContext ctx) { + statement.processDo(ctx); + + return null; + } + + @Override + public Void visitFor(final ForContext ctx) { + statement.processFor(ctx); + + return null; + } + + @Override + public Void visitDecl(final DeclContext ctx) { + statement.processDecl(ctx); + + return null; + } + + @Override + public Void visitContinue(final ContinueContext ctx) { + statement.processContinue(); + + return null; + } + + @Override + public Void visitBreak(final BreakContext ctx) { + statement.processBreak(); + + return null; + } + + @Override + public Void visitReturn(final ReturnContext ctx) { + statement.processReturn(ctx); + + return null; + } + + @Override + public Void visitTry(final TryContext ctx) { + statement.processTry(ctx); + + return null; + } + + @Override + public Void visitThrow(final ThrowContext ctx) { + statement.processThrow(ctx); + + return null; + } + + @Override + public Void visitExpr(final ExprContext ctx) { + statement.processExpr(ctx); + + return null; + } + + @Override + public Void visitMultiple(final MultipleContext ctx) { + statement.processMultiple(ctx); + + return null; + } + + @Override + public Void visitSingle(final SingleContext ctx) { + statement.processSingle(ctx); + + return null; + } + + @Override + public Void visitEmpty(final EmptyContext ctx) { + throw new UnsupportedOperationException(WriterUtility.error(ctx) + "Unexpected state."); + } + + @Override + public Void visitEmptyscope(final EmptyscopeContext ctx) { + throw new UnsupportedOperationException(WriterUtility.error(ctx) + "Unexpected state."); + } + + @Override + public Void visitInitializer(final InitializerContext ctx) { + statement.processInitializer(ctx); + + return null; + } + + @Override + public Void visitAfterthought(final AfterthoughtContext ctx) { + statement.processAfterthought(ctx); + + return null; + } + + @Override + public Void visitDeclaration(DeclarationContext ctx) { + statement.processDeclaration(ctx); + + return null; + } + + @Override + public Void visitDecltype(final DecltypeContext ctx) { + throw new UnsupportedOperationException(WriterUtility.error(ctx) + "Unexpected state."); + } + + @Override + public Void visitDeclvar(final DeclvarContext ctx) { + statement.processDeclvar(ctx); + + return null; + } + + @Override + public Void visitTrap(final TrapContext ctx) { + statement.processTrap(ctx); + + return null; + } + + @Override + public Void visitPrecedence(final PrecedenceContext ctx) { + throw new UnsupportedOperationException(WriterUtility.error(ctx) + "Unexpected state."); + } + + @Override + public Void visitNumeric(final NumericContext ctx) { + expression.processNumeric(ctx); + + return null; + } + + @Override + public Void visitChar(final CharContext ctx) { + expression.processChar(ctx); + + return null; + } + + @Override + public Void visitTrue(final TrueContext ctx) { + expression.processTrue(ctx); + + return null; + } + + @Override + public Void visitFalse(final FalseContext ctx) { + expression.processFalse(ctx); + + return null; + } + + @Override + public Void visitNull(final NullContext ctx) { + expression.processNull(ctx); + + return null; + } + + @Override + public Void visitExternal(final ExternalContext ctx) { + expression.processExternal(ctx); + + return null; + } + + + @Override + public Void visitPostinc(final PostincContext ctx) { + expression.processPostinc(ctx); + + return null; + } + + @Override + public Void visitPreinc(final PreincContext ctx) { + expression.processPreinc(ctx); + + return null; + } + + @Override + public Void visitUnary(final UnaryContext ctx) { + expression.processUnary(ctx); + + return null; + } + + @Override + public Void visitCast(final CastContext ctx) { + expression.processCast(ctx); + + return null; + } + + @Override + public Void visitBinary(final BinaryContext ctx) { + expression.processBinary(ctx); + + return null; + } + + @Override + public Void visitComp(final CompContext ctx) { + expression.processComp(ctx); + + return null; + } + + @Override + public Void visitBool(final BoolContext ctx) { + expression.processBool(ctx); + + return null; + } + + @Override + public Void visitConditional(final ConditionalContext ctx) { + expression.processConditional(ctx); + + return null; + } + + @Override + public Void visitAssignment(final AssignmentContext ctx) { + expression.processAssignment(ctx); + + return null; + } + + @Override + public Void visitExtstart(final ExtstartContext ctx) { + external.processExtstart(ctx); + + return null; + } + + @Override + public Void visitExtprec(final ExtprecContext ctx) { + external.processExtprec(ctx); + + return null; + } + + @Override + public Void visitExtcast(final ExtcastContext ctx) { + external.processExtcast(ctx); + + return null; + } + + @Override + public Void visitExtbrace(final ExtbraceContext ctx) { + external.processExtbrace(ctx); + + return null; + } + + @Override + public Void visitExtdot(final ExtdotContext ctx) { + external.processExtdot(ctx); + + return null; + } + + @Override + public Void visitExttype(final ExttypeContext ctx) { + external.processExttype(ctx); + + return null; + } + + @Override + public Void visitExtcall(final ExtcallContext ctx) { + external.processExtcall(ctx); + + return null; + } + + @Override + public Void visitExtvar(final ExtvarContext ctx) { + external.processExtvar(ctx); + + return null; + } + + @Override + public Void visitExtfield(final ExtfieldContext ctx) { + external.processExtfield(ctx); + + return null; + } + + @Override + public Void visitExtnew(final ExtnewContext ctx) { + external.processExtnew(ctx); + + return null; + } + + @Override + public Void visitExtstring(final ExtstringContext ctx) { + external.processExtstring(ctx); + + return null; + } + + @Override + public Void visitArguments(final ArgumentsContext ctx) { + throw new UnsupportedOperationException(WriterUtility.error(ctx) + "Unexpected state."); + } + + @Override + public Void visitIncrement(final IncrementContext ctx) { + expression.processIncrement(ctx); + + return null; + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterCaster.java new file mode 100644 index 000000000000..c55dff2f5498 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterCaster.java @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +import org.antlr.v4.runtime.ParserRuleContext; +import org.elasticsearch.painless.Definition.Cast; +import org.elasticsearch.painless.Definition.Transform; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Metadata.ExpressionMetadata; +import org.objectweb.asm.commons.GeneratorAdapter; + +class WriterCaster { + private final GeneratorAdapter execute; + + WriterCaster(final GeneratorAdapter execute) { + this.execute = execute; + } + + void checkWriteCast(final ExpressionMetadata sort) { + checkWriteCast(sort.source, sort.cast); + } + + void checkWriteCast(final ParserRuleContext source, final Cast cast) { + if (cast instanceof Transform) { + writeTransform((Transform)cast); + } else if (cast != null) { + writeCast(cast); + } else { + throw new IllegalStateException(WriterUtility.error(source) + "Unexpected cast object."); + } + } + + private void writeCast(final Cast cast) { + final Type from = cast.from; + final Type to = cast.to; + + if (from.equals(to)) { + return; + } + + if (from.sort.numeric && from.sort.primitive && to.sort.numeric && to.sort.primitive) { + execute.cast(from.type, to.type); + } else { + try { + from.clazz.asSubclass(to.clazz); + } catch (ClassCastException exception) { + execute.checkCast(to.type); + } + } + } + + private void writeTransform(final Transform transform) { + if (transform.upcast != null) { + execute.checkCast(transform.upcast.type); + } + + if (java.lang.reflect.Modifier.isStatic(transform.method.reflect.getModifiers())) { + execute.invokeStatic(transform.method.owner.type, transform.method.method); + } else if (java.lang.reflect.Modifier.isInterface(transform.method.owner.clazz.getModifiers())) { + execute.invokeInterface(transform.method.owner.type, transform.method.method); + } else { + execute.invokeVirtual(transform.method.owner.type, transform.method.method); + } + + if (transform.downcast != null) { + execute.checkCast(transform.downcast.type); + } + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java new file mode 100644 index 000000000000..33fea094058d --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java @@ -0,0 +1,138 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +import org.elasticsearch.script.ScoreAccessor; +import org.objectweb.asm.Type; +import org.objectweb.asm.commons.Method; + +import java.lang.invoke.MethodType; +import java.util.Map; + +class WriterConstants { + final static String BASE_CLASS_NAME = Executable.class.getName(); + final static String CLASS_NAME = BASE_CLASS_NAME + "$CompiledPainlessExecutable"; + final static Type BASE_CLASS_TYPE = Type.getType(Executable.class); + final static Type CLASS_TYPE = Type.getType("L" + CLASS_NAME.replace(".", "/") + ";"); + + final static Method CONSTRUCTOR = getAsmMethod(void.class, "", Definition.class, String.class, String.class); + final static Method EXECUTE = getAsmMethod(Object.class, "execute", Map.class); + final static String SIGNATURE = "(Ljava/util/Map;)Ljava/lang/Object;"; + + final static Type PAINLESS_ERROR_TYPE = Type.getType(PainlessError.class); + + final static Type DEFINITION_TYPE = Type.getType(Definition.class); + + final static Type MAP_TYPE = Type.getType(Map.class); + final static Method MAP_GET = getAsmMethod(Object.class, "get", Object.class); + + final static Type SCORE_ACCESSOR_TYPE = Type.getType(ScoreAccessor.class); + final static Method SCORE_ACCESSOR_FLOAT = getAsmMethod(float.class, "floatValue"); + + final static Method DEF_METHOD_CALL = getAsmMethod( + Object.class, "methodCall", Object.class, String.class, Definition.class, Object[].class, boolean[].class); + final static Method DEF_ARRAY_STORE = getAsmMethod( + void.class, "arrayStore", Object.class, Object.class, Object.class, Definition.class, boolean.class, boolean.class); + final static Method DEF_ARRAY_LOAD = getAsmMethod( + Object.class, "arrayLoad", Object.class, Object.class, Definition.class, boolean.class); + final static Method DEF_FIELD_STORE = getAsmMethod( + void.class, "fieldStore", Object.class, Object.class, String.class, Definition.class, boolean.class); + final static Method DEF_FIELD_LOAD = getAsmMethod( + Object.class, "fieldLoad", Object.class, String.class, Definition.class); + + final static Method DEF_NOT_CALL = getAsmMethod(Object.class, "not", Object.class); + final static Method DEF_NEG_CALL = getAsmMethod(Object.class, "neg", Object.class); + final static Method DEF_MUL_CALL = getAsmMethod(Object.class, "mul", Object.class, Object.class); + final static Method DEF_DIV_CALL = getAsmMethod(Object.class, "div", Object.class, Object.class); + final static Method DEF_REM_CALL = getAsmMethod(Object.class, "rem", Object.class, Object.class); + final static Method DEF_ADD_CALL = getAsmMethod(Object.class, "add", Object.class, Object.class); + final static Method DEF_SUB_CALL = getAsmMethod(Object.class, "sub", Object.class, Object.class); + final static Method DEF_LSH_CALL = getAsmMethod(Object.class, "lsh", Object.class, Object.class); + final static Method DEF_RSH_CALL = getAsmMethod(Object.class, "rsh", Object.class, Object.class); + final static Method DEF_USH_CALL = getAsmMethod(Object.class, "ush", Object.class, Object.class); + final static Method DEF_AND_CALL = getAsmMethod(Object.class, "and", Object.class, Object.class); + final static Method DEF_XOR_CALL = getAsmMethod(Object.class, "xor", Object.class, Object.class); + final static Method DEF_OR_CALL = getAsmMethod(Object.class, "or" , Object.class, Object.class); + final static Method DEF_EQ_CALL = getAsmMethod(boolean.class, "eq" , Object.class, Object.class); + final static Method DEF_LT_CALL = getAsmMethod(boolean.class, "lt" , Object.class, Object.class); + final static Method DEF_LTE_CALL = getAsmMethod(boolean.class, "lte", Object.class, Object.class); + final static Method DEF_GT_CALL = getAsmMethod(boolean.class, "gt" , Object.class, Object.class); + final static Method DEF_GTE_CALL = getAsmMethod(boolean.class, "gte", Object.class, Object.class); + + final static Type STRINGBUILDER_TYPE = Type.getType(StringBuilder.class); + + final static Method STRINGBUILDER_CONSTRUCTOR = getAsmMethod(void.class, ""); + final static Method STRINGBUILDER_APPEND_BOOLEAN = getAsmMethod(StringBuilder.class, "append", boolean.class); + final static Method STRINGBUILDER_APPEND_CHAR = getAsmMethod(StringBuilder.class, "append", char.class); + final static Method STRINGBUILDER_APPEND_INT = getAsmMethod(StringBuilder.class, "append", int.class); + final static Method STRINGBUILDER_APPEND_LONG = getAsmMethod(StringBuilder.class, "append", long.class); + final static Method STRINGBUILDER_APPEND_FLOAT = getAsmMethod(StringBuilder.class, "append", float.class); + final static Method STRINGBUILDER_APPEND_DOUBLE = getAsmMethod(StringBuilder.class, "append", double.class); + final static Method STRINGBUILDER_APPEND_STRING = getAsmMethod(StringBuilder.class, "append", String.class); + final static Method STRINGBUILDER_APPEND_OBJECT = getAsmMethod(StringBuilder.class, "append", Object.class); + final static Method STRINGBUILDER_TOSTRING = getAsmMethod(String.class, "toString"); + + final static Method TOINTEXACT_LONG = getAsmMethod(int.class, "toIntExact", long.class); + final static Method NEGATEEXACT_INT = getAsmMethod(int.class, "negateExact", int.class); + final static Method NEGATEEXACT_LONG = getAsmMethod(long.class, "negateExact", long.class); + final static Method MULEXACT_INT = getAsmMethod(int.class, "multiplyExact", int.class, int.class); + final static Method MULEXACT_LONG = getAsmMethod(long.class, "multiplyExact", long.class, long.class); + final static Method ADDEXACT_INT = getAsmMethod(int.class, "addExact", int.class, int.class); + final static Method ADDEXACT_LONG = getAsmMethod(long.class, "addExact", long.class, long.class); + final static Method SUBEXACT_INT = getAsmMethod(int.class, "subtractExact", int.class, int.class); + final static Method SUBEXACT_LONG = getAsmMethod(long.class, "subtractExact", long.class, long.class); + + final static Method CHECKEQUALS = getAsmMethod(boolean.class, "checkEquals", Object.class, Object.class); + final static Method TOBYTEEXACT_INT = getAsmMethod(byte.class, "toByteExact", int.class); + final static Method TOBYTEEXACT_LONG = getAsmMethod(byte.class, "toByteExact", long.class); + final static Method TOBYTEWOOVERFLOW_FLOAT = getAsmMethod(byte.class, "toByteWithoutOverflow", float.class); + final static Method TOBYTEWOOVERFLOW_DOUBLE = getAsmMethod(byte.class, "toByteWithoutOverflow", double.class); + final static Method TOSHORTEXACT_INT = getAsmMethod(short.class, "toShortExact", int.class); + final static Method TOSHORTEXACT_LONG = getAsmMethod(short.class, "toShortExact", long.class); + final static Method TOSHORTWOOVERFLOW_FLOAT = getAsmMethod(short.class, "toShortWithoutOverflow", float.class); + final static Method TOSHORTWOOVERFLOW_DOUBLE = getAsmMethod(short.class, "toShortWihtoutOverflow", double.class); + final static Method TOCHAREXACT_INT = getAsmMethod(char.class, "toCharExact", int.class); + final static Method TOCHAREXACT_LONG = getAsmMethod(char.class, "toCharExact", long.class); + final static Method TOCHARWOOVERFLOW_FLOAT = getAsmMethod(char.class, "toCharWithoutOverflow", float.class); + final static Method TOCHARWOOVERFLOW_DOUBLE = getAsmMethod(char.class, "toCharWithoutOverflow", double.class); + final static Method TOINTWOOVERFLOW_FLOAT = getAsmMethod(int.class, "toIntWithoutOverflow", float.class); + final static Method TOINTWOOVERFLOW_DOUBLE = getAsmMethod(int.class, "toIntWithoutOverflow", double.class); + final static Method TOLONGWOOVERFLOW_FLOAT = getAsmMethod(long.class, "toLongWithoutOverflow", float.class); + final static Method TOLONGWOOVERFLOW_DOUBLE = getAsmMethod(long.class, "toLongWithoutOverflow", double.class); + final static Method TOFLOATWOOVERFLOW_DOUBLE = getAsmMethod(float.class , "toFloatWihtoutOverflow", double.class); + final static Method MULWOOVERLOW_FLOAT = getAsmMethod(float.class, "multiplyWithoutOverflow", float.class, float.class); + final static Method MULWOOVERLOW_DOUBLE = getAsmMethod(double.class, "multiplyWithoutOverflow", double.class, double.class); + final static Method DIVWOOVERLOW_INT = getAsmMethod(int.class, "divideWithoutOverflow", int.class, int.class); + final static Method DIVWOOVERLOW_LONG = getAsmMethod(long.class, "divideWithoutOverflow", long.class, long.class); + final static Method DIVWOOVERLOW_FLOAT = getAsmMethod(float.class, "divideWithoutOverflow", float.class, float.class); + final static Method DIVWOOVERLOW_DOUBLE = getAsmMethod(double.class, "divideWithoutOverflow", double.class, double.class); + final static Method REMWOOVERLOW_FLOAT = getAsmMethod(float.class, "remainderWithoutOverflow", float.class, float.class); + final static Method REMWOOVERLOW_DOUBLE = getAsmMethod(double.class, "remainderWithoutOverflow", double.class, double.class); + final static Method ADDWOOVERLOW_FLOAT = getAsmMethod(float.class, "addWithoutOverflow", float.class, float.class); + final static Method ADDWOOVERLOW_DOUBLE = getAsmMethod(double.class, "addWithoutOverflow", double.class, double.class); + final static Method SUBWOOVERLOW_FLOAT = getAsmMethod(float.class, "subtractWithoutOverflow", float.class, float.class); + final static Method SUBWOOVERLOW_DOUBLE = getAsmMethod(double.class, "subtractWithoutOverflow", double.class, double.class); + + private static Method getAsmMethod(final Class rtype, final String name, final Class... ptypes) { + return new Method(name, MethodType.methodType(rtype, ptypes).toMethodDescriptorString()); + } + + private WriterConstants() {} +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterExpression.java new file mode 100644 index 000000000000..c850031efa04 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterExpression.java @@ -0,0 +1,684 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +import org.elasticsearch.painless.Definition.Sort; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Metadata.ExpressionMetadata; +import org.elasticsearch.painless.PainlessParser.AssignmentContext; +import org.elasticsearch.painless.PainlessParser.BinaryContext; +import org.elasticsearch.painless.PainlessParser.BoolContext; +import org.elasticsearch.painless.PainlessParser.CastContext; +import org.elasticsearch.painless.PainlessParser.CharContext; +import org.elasticsearch.painless.PainlessParser.CompContext; +import org.elasticsearch.painless.PainlessParser.ConditionalContext; +import org.elasticsearch.painless.PainlessParser.ExpressionContext; +import org.elasticsearch.painless.PainlessParser.ExternalContext; +import org.elasticsearch.painless.PainlessParser.FalseContext; +import org.elasticsearch.painless.PainlessParser.IncrementContext; +import org.elasticsearch.painless.PainlessParser.NullContext; +import org.elasticsearch.painless.PainlessParser.NumericContext; +import org.elasticsearch.painless.PainlessParser.PostincContext; +import org.elasticsearch.painless.PainlessParser.PreincContext; +import org.elasticsearch.painless.PainlessParser.TrueContext; +import org.elasticsearch.painless.PainlessParser.UnaryContext; +import org.elasticsearch.painless.WriterUtility.Branch; +import org.objectweb.asm.Label; +import org.objectweb.asm.Opcodes; +import org.objectweb.asm.commons.GeneratorAdapter; + +import static org.elasticsearch.painless.PainlessParser.ADD; +import static org.elasticsearch.painless.PainlessParser.BWAND; +import static org.elasticsearch.painless.PainlessParser.BWOR; +import static org.elasticsearch.painless.PainlessParser.BWXOR; +import static org.elasticsearch.painless.PainlessParser.DIV; +import static org.elasticsearch.painless.PainlessParser.LSH; +import static org.elasticsearch.painless.PainlessParser.MUL; +import static org.elasticsearch.painless.PainlessParser.REM; +import static org.elasticsearch.painless.PainlessParser.RSH; +import static org.elasticsearch.painless.PainlessParser.SUB; +import static org.elasticsearch.painless.PainlessParser.USH; +import static org.elasticsearch.painless.WriterConstants.CHECKEQUALS; +import static org.elasticsearch.painless.WriterConstants.DEF_EQ_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_GTE_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_GT_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_LTE_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_LT_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_NEG_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_NOT_CALL; +import static org.elasticsearch.painless.WriterConstants.NEGATEEXACT_INT; +import static org.elasticsearch.painless.WriterConstants.NEGATEEXACT_LONG; + +class WriterExpression { + private final Metadata metadata; + private final Definition definition; + private final CompilerSettings settings; + + private final GeneratorAdapter execute; + + private final Writer writer; + private final WriterUtility utility; + private final WriterCaster caster; + + WriterExpression(final Metadata metadata, final GeneratorAdapter execute, final Writer writer, + final WriterUtility utility, final WriterCaster caster) { + this.metadata = metadata; + definition = metadata.definition; + settings = metadata.settings; + + this.execute = execute; + + this.writer = writer; + this.utility = utility; + this.caster = caster; + } + + void processNumeric(final NumericContext ctx) { + final ExpressionMetadata numericemd = metadata.getExpressionMetadata(ctx); + final Object postConst = numericemd.postConst; + + if (postConst == null) { + utility.writeNumeric(ctx, numericemd.preConst); + caster.checkWriteCast(numericemd); + } else { + utility.writeConstant(ctx, postConst); + } + + utility.checkWriteBranch(ctx); + } + + void processChar(final CharContext ctx) { + final ExpressionMetadata charemd = metadata.getExpressionMetadata(ctx); + final Object postConst = charemd.postConst; + + if (postConst == null) { + utility.writeNumeric(ctx, (int)(char)charemd.preConst); + caster.checkWriteCast(charemd); + } else { + utility.writeConstant(ctx, postConst); + } + + utility.checkWriteBranch(ctx); + } + + void processTrue(final TrueContext ctx) { + final ExpressionMetadata trueemd = metadata.getExpressionMetadata(ctx); + final Object postConst = trueemd.postConst; + final Branch branch = utility.getBranch(ctx); + + if (branch == null) { + if (postConst == null) { + utility.writeBoolean(ctx, true); + caster.checkWriteCast(trueemd); + } else { + utility.writeConstant(ctx, postConst); + } + } else if (branch.tru != null) { + execute.goTo(branch.tru); + } + } + + void processFalse(final FalseContext ctx) { + final ExpressionMetadata falseemd = metadata.getExpressionMetadata(ctx); + final Object postConst = falseemd.postConst; + final Branch branch = utility.getBranch(ctx); + + if (branch == null) { + if (postConst == null) { + utility.writeBoolean(ctx, false); + caster.checkWriteCast(falseemd); + } else { + utility.writeConstant(ctx, postConst); + } + } else if (branch.fals != null) { + execute.goTo(branch.fals); + } + } + + void processNull(final NullContext ctx) { + final ExpressionMetadata nullemd = metadata.getExpressionMetadata(ctx); + + execute.visitInsn(Opcodes.ACONST_NULL); + caster.checkWriteCast(nullemd); + utility.checkWriteBranch(ctx); + } + + void processExternal(final ExternalContext ctx) { + final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); + writer.visit(ctx.extstart()); + caster.checkWriteCast(expremd); + utility.checkWriteBranch(ctx); + } + + + void processPostinc(final PostincContext ctx) { + final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); + writer.visit(ctx.extstart()); + caster.checkWriteCast(expremd); + utility.checkWriteBranch(ctx); + } + + void processPreinc(final PreincContext ctx) { + final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); + writer.visit(ctx.extstart()); + caster.checkWriteCast(expremd); + utility.checkWriteBranch(ctx); + } + + void processUnary(final UnaryContext ctx) { + final ExpressionMetadata unaryemd = metadata.getExpressionMetadata(ctx); + final Object postConst = unaryemd.postConst; + final Object preConst = unaryemd.preConst; + final Branch branch = utility.getBranch(ctx); + + if (postConst != null) { + if (ctx.BOOLNOT() != null) { + if (branch == null) { + utility.writeConstant(ctx, postConst); + } else { + if ((boolean)postConst && branch.tru != null) { + execute.goTo(branch.tru); + } else if (!(boolean)postConst && branch.fals != null) { + execute.goTo(branch.fals); + } + } + } else { + utility.writeConstant(ctx, postConst); + utility.checkWriteBranch(ctx); + } + } else if (preConst != null) { + if (branch == null) { + utility.writeConstant(ctx, preConst); + caster.checkWriteCast(unaryemd); + } else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + } else { + final ExpressionContext exprctx = ctx.expression(); + + if (ctx.BOOLNOT() != null) { + final Branch local = utility.markBranch(ctx, exprctx); + + if (branch == null) { + local.fals = new Label(); + final Label aend = new Label(); + + writer.visit(exprctx); + + execute.push(false); + execute.goTo(aend); + execute.mark(local.fals); + execute.push(true); + execute.mark(aend); + + caster.checkWriteCast(unaryemd); + } else { + local.tru = branch.fals; + local.fals = branch.tru; + + writer.visit(exprctx); + } + } else { + final org.objectweb.asm.Type type = unaryemd.from.type; + final Sort sort = unaryemd.from.sort; + + writer.visit(exprctx); + + if (ctx.BWNOT() != null) { + if (sort == Sort.DEF) { + execute.invokeStatic(definition.defobjType.type, DEF_NOT_CALL); + } else { + if (sort == Sort.INT) { + utility.writeConstant(ctx, -1); + } else if (sort == Sort.LONG) { + utility.writeConstant(ctx, -1L); + } else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + + execute.math(GeneratorAdapter.XOR, type); + } + } else if (ctx.SUB() != null) { + if (sort == Sort.DEF) { + execute.invokeStatic(definition.defobjType.type, DEF_NEG_CALL); + } else { + if (settings.getNumericOverflow()) { + execute.math(GeneratorAdapter.NEG, type); + } else { + if (sort == Sort.INT) { + execute.invokeStatic(definition.mathType.type, NEGATEEXACT_INT); + } else if (sort == Sort.LONG) { + execute.invokeStatic(definition.mathType.type, NEGATEEXACT_LONG); + } else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + } + } + } else if (ctx.ADD() == null) { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + + caster.checkWriteCast(unaryemd); + utility.checkWriteBranch(ctx); + } + } + } + + void processCast(final CastContext ctx) { + final ExpressionMetadata castemd = metadata.getExpressionMetadata(ctx); + final Object postConst = castemd.postConst; + + if (postConst == null) { + writer.visit(ctx.expression()); + caster.checkWriteCast(castemd); + } else { + utility.writeConstant(ctx, postConst); + } + + utility.checkWriteBranch(ctx); + } + + void processBinary(final BinaryContext ctx) { + final ExpressionMetadata binaryemd = metadata.getExpressionMetadata(ctx); + final Object postConst = binaryemd.postConst; + final Object preConst = binaryemd.preConst; + final Branch branch = utility.getBranch(ctx); + + if (postConst != null) { + utility.writeConstant(ctx, postConst); + } else if (preConst != null) { + if (branch == null) { + utility.writeConstant(ctx, preConst); + caster.checkWriteCast(binaryemd); + } else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + } else if (binaryemd.from.sort == Sort.STRING) { + final boolean marked = utility.containsStrings(ctx); + + if (!marked) { + utility.writeNewStrings(); + } + + final ExpressionContext exprctx0 = ctx.expression(0); + final ExpressionMetadata expremd0 = metadata.getExpressionMetadata(exprctx0); + utility.addStrings(exprctx0); + writer.visit(exprctx0); + + if (utility.containsStrings(exprctx0)) { + utility.writeAppendStrings(expremd0.from.sort); + utility.removeStrings(exprctx0); + } + + final ExpressionContext exprctx1 = ctx.expression(1); + final ExpressionMetadata expremd1 = metadata.getExpressionMetadata(exprctx1); + utility.addStrings(exprctx1); + writer.visit(exprctx1); + + if (utility.containsStrings(exprctx1)) { + utility.writeAppendStrings(expremd1.from.sort); + utility.removeStrings(exprctx1); + } + + if (marked) { + utility.removeStrings(ctx); + } else { + utility.writeToStrings(); + } + + caster.checkWriteCast(binaryemd); + } else { + final ExpressionContext exprctx0 = ctx.expression(0); + final ExpressionContext exprctx1 = ctx.expression(1); + + writer.visit(exprctx0); + writer.visit(exprctx1); + + final Type type = binaryemd.from; + + if (ctx.MUL() != null) utility.writeBinaryInstruction(ctx, type, MUL); + else if (ctx.DIV() != null) utility.writeBinaryInstruction(ctx, type, DIV); + else if (ctx.REM() != null) utility.writeBinaryInstruction(ctx, type, REM); + else if (ctx.ADD() != null) utility.writeBinaryInstruction(ctx, type, ADD); + else if (ctx.SUB() != null) utility.writeBinaryInstruction(ctx, type, SUB); + else if (ctx.LSH() != null) utility.writeBinaryInstruction(ctx, type, LSH); + else if (ctx.USH() != null) utility.writeBinaryInstruction(ctx, type, USH); + else if (ctx.RSH() != null) utility.writeBinaryInstruction(ctx, type, RSH); + else if (ctx.BWAND() != null) utility.writeBinaryInstruction(ctx, type, BWAND); + else if (ctx.BWXOR() != null) utility.writeBinaryInstruction(ctx, type, BWXOR); + else if (ctx.BWOR() != null) utility.writeBinaryInstruction(ctx, type, BWOR); + else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + + caster.checkWriteCast(binaryemd); + } + + utility.checkWriteBranch(ctx); + } + + void processComp(final CompContext ctx) { + final ExpressionMetadata compemd = metadata.getExpressionMetadata(ctx); + final Object postConst = compemd.postConst; + final Object preConst = compemd.preConst; + final Branch branch = utility.getBranch(ctx); + + if (postConst != null) { + if (branch == null) { + utility.writeConstant(ctx, postConst); + } else { + if ((boolean)postConst && branch.tru != null) { + execute.mark(branch.tru); + } else if (!(boolean)postConst && branch.fals != null) { + execute.mark(branch.fals); + } + } + } else if (preConst != null) { + if (branch == null) { + utility.writeConstant(ctx, preConst); + caster.checkWriteCast(compemd); + } else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + } else { + final ExpressionContext exprctx0 = ctx.expression(0); + final ExpressionMetadata expremd0 = metadata.getExpressionMetadata(exprctx0); + + final ExpressionContext exprctx1 = ctx.expression(1); + final ExpressionMetadata expremd1 = metadata.getExpressionMetadata(exprctx1); + final org.objectweb.asm.Type type = expremd1.to.type; + final Sort sort1 = expremd1.to.sort; + + writer.visit(exprctx0); + + if (!expremd1.isNull) { + writer.visit(exprctx1); + } + + final boolean tru = branch != null && branch.tru != null; + final boolean fals = branch != null && branch.fals != null; + final Label jump = tru ? branch.tru : fals ? branch.fals : new Label(); + final Label end = new Label(); + + final boolean eq = (ctx.EQ() != null || ctx.EQR() != null) && (tru || !fals) || + (ctx.NE() != null || ctx.NER() != null) && fals; + final boolean ne = (ctx.NE() != null || ctx.NER() != null) && (tru || !fals) || + (ctx.EQ() != null || ctx.EQR() != null) && fals; + final boolean lt = ctx.LT() != null && (tru || !fals) || ctx.GTE() != null && fals; + final boolean lte = ctx.LTE() != null && (tru || !fals) || ctx.GT() != null && fals; + final boolean gt = ctx.GT() != null && (tru || !fals) || ctx.LTE() != null && fals; + final boolean gte = ctx.GTE() != null && (tru || !fals) || ctx.LT() != null && fals; + + boolean writejump = true; + + switch (sort1) { + case VOID: + case BYTE: + case SHORT: + case CHAR: + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + case BOOL: + if (eq) execute.ifZCmp(GeneratorAdapter.EQ, jump); + else if (ne) execute.ifZCmp(GeneratorAdapter.NE, jump); + else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + + break; + case INT: + case LONG: + case FLOAT: + case DOUBLE: + if (eq) execute.ifCmp(type, GeneratorAdapter.EQ, jump); + else if (ne) execute.ifCmp(type, GeneratorAdapter.NE, jump); + else if (lt) execute.ifCmp(type, GeneratorAdapter.LT, jump); + else if (lte) execute.ifCmp(type, GeneratorAdapter.LE, jump); + else if (gt) execute.ifCmp(type, GeneratorAdapter.GT, jump); + else if (gte) execute.ifCmp(type, GeneratorAdapter.GE, jump); + else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + + break; + case DEF: + if (eq) { + if (expremd1.isNull) { + execute.ifNull(jump); + } else if (!expremd0.isNull && ctx.EQ() != null) { + execute.invokeStatic(definition.defobjType.type, DEF_EQ_CALL); + } else { + execute.ifCmp(type, GeneratorAdapter.EQ, jump); + } + } else if (ne) { + if (expremd1.isNull) { + execute.ifNonNull(jump); + } else if (!expremd0.isNull && ctx.NE() != null) { + execute.invokeStatic(definition.defobjType.type, DEF_EQ_CALL); + execute.ifZCmp(GeneratorAdapter.EQ, jump); + } else { + execute.ifCmp(type, GeneratorAdapter.NE, jump); + } + } else if (lt) { + execute.invokeStatic(definition.defobjType.type, DEF_LT_CALL); + } else if (lte) { + execute.invokeStatic(definition.defobjType.type, DEF_LTE_CALL); + } else if (gt) { + execute.invokeStatic(definition.defobjType.type, DEF_GT_CALL); + } else if (gte) { + execute.invokeStatic(definition.defobjType.type, DEF_GTE_CALL); + } else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + + writejump = expremd1.isNull || ne || ctx.EQR() != null; + + if (branch != null && !writejump) { + execute.ifZCmp(GeneratorAdapter.NE, jump); + } + + break; + default: + if (eq) { + if (expremd1.isNull) { + execute.ifNull(jump); + } else if (ctx.EQ() != null) { + execute.invokeStatic(definition.utilityType.type, CHECKEQUALS); + + if (branch != null) { + execute.ifZCmp(GeneratorAdapter.NE, jump); + } + + writejump = false; + } else { + execute.ifCmp(type, GeneratorAdapter.EQ, jump); + } + } else if (ne) { + if (expremd1.isNull) { + execute.ifNonNull(jump); + } else if (ctx.NE() != null) { + execute.invokeStatic(definition.utilityType.type, CHECKEQUALS); + execute.ifZCmp(GeneratorAdapter.EQ, jump); + } else { + execute.ifCmp(type, GeneratorAdapter.NE, jump); + } + } else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + } + + if (branch == null) { + if (writejump) { + execute.push(false); + execute.goTo(end); + execute.mark(jump); + execute.push(true); + execute.mark(end); + } + + caster.checkWriteCast(compemd); + } + } + } + + void processBool(final BoolContext ctx) { + final ExpressionMetadata boolemd = metadata.getExpressionMetadata(ctx); + final Object postConst = boolemd.postConst; + final Object preConst = boolemd.preConst; + final Branch branch = utility.getBranch(ctx); + + if (postConst != null) { + if (branch == null) { + utility.writeConstant(ctx, postConst); + } else { + if ((boolean)postConst && branch.tru != null) { + execute.mark(branch.tru); + } else if (!(boolean)postConst && branch.fals != null) { + execute.mark(branch.fals); + } + } + } else if (preConst != null) { + if (branch == null) { + utility.writeConstant(ctx, preConst); + caster.checkWriteCast(boolemd); + } else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + } else { + final ExpressionContext exprctx0 = ctx.expression(0); + final ExpressionContext exprctx1 = ctx.expression(1); + + if (branch == null) { + if (ctx.BOOLAND() != null) { + final Branch local = utility.markBranch(ctx, exprctx0, exprctx1); + local.fals = new Label(); + final Label end = new Label(); + + writer.visit(exprctx0); + writer.visit(exprctx1); + + execute.push(true); + execute.goTo(end); + execute.mark(local.fals); + execute.push(false); + execute.mark(end); + } else if (ctx.BOOLOR() != null) { + final Branch branch0 = utility.markBranch(ctx, exprctx0); + branch0.tru = new Label(); + final Branch branch1 = utility.markBranch(ctx, exprctx1); + branch1.fals = new Label(); + final Label aend = new Label(); + + writer.visit(exprctx0); + writer.visit(exprctx1); + + execute.mark(branch0.tru); + execute.push(true); + execute.goTo(aend); + execute.mark(branch1.fals); + execute.push(false); + execute.mark(aend); + } else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + + caster.checkWriteCast(boolemd); + } else { + if (ctx.BOOLAND() != null) { + final Branch branch0 = utility.markBranch(ctx, exprctx0); + branch0.fals = branch.fals == null ? new Label() : branch.fals; + final Branch branch1 = utility.markBranch(ctx, exprctx1); + branch1.tru = branch.tru; + branch1.fals = branch.fals; + + writer.visit(exprctx0); + writer.visit(exprctx1); + + if (branch.fals == null) { + execute.mark(branch0.fals); + } + } else if (ctx.BOOLOR() != null) { + final Branch branch0 = utility.markBranch(ctx, exprctx0); + branch0.tru = branch.tru == null ? new Label() : branch.tru; + final Branch branch1 = utility.markBranch(ctx, exprctx1); + branch1.tru = branch.tru; + branch1.fals = branch.fals; + + writer.visit(exprctx0); + writer.visit(exprctx1); + + if (branch.tru == null) { + execute.mark(branch0.tru); + } + } else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + } + } + } + + void processConditional(final ConditionalContext ctx) { + final ExpressionMetadata condemd = metadata.getExpressionMetadata(ctx); + final Branch branch = utility.getBranch(ctx); + + final ExpressionContext expr0 = ctx.expression(0); + final ExpressionContext expr1 = ctx.expression(1); + final ExpressionContext expr2 = ctx.expression(2); + + final Branch local = utility.markBranch(ctx, expr0); + local.fals = new Label(); + local.end = new Label(); + + if (branch != null) { + utility.copyBranch(branch, expr1, expr2); + } + + writer.visit(expr0); + writer.visit(expr1); + execute.goTo(local.end); + execute.mark(local.fals); + writer.visit(expr2); + execute.mark(local.end); + + if (branch == null) { + caster.checkWriteCast(condemd); + } + } + + void processAssignment(final AssignmentContext ctx) { + final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); + writer.visit(ctx.extstart()); + caster.checkWriteCast(expremd); + utility.checkWriteBranch(ctx); + } + + void processIncrement(final IncrementContext ctx) { + final ExpressionMetadata incremd = metadata.getExpressionMetadata(ctx); + final Object postConst = incremd.postConst; + + if (postConst == null) { + utility.writeNumeric(ctx, incremd.preConst); + caster.checkWriteCast(incremd); + } else { + utility.writeConstant(ctx, postConst); + } + + utility.checkWriteBranch(ctx); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterExternal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterExternal.java new file mode 100644 index 000000000000..8ab729f98fa5 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterExternal.java @@ -0,0 +1,769 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +import org.antlr.v4.runtime.ParserRuleContext; +import org.elasticsearch.painless.Definition.Constructor; +import org.elasticsearch.painless.Definition.Field; +import org.elasticsearch.painless.Definition.Method; +import org.elasticsearch.painless.Definition.Sort; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Metadata.ExpressionMetadata; +import org.elasticsearch.painless.Metadata.ExtNodeMetadata; +import org.elasticsearch.painless.Metadata.ExternalMetadata; +import org.elasticsearch.painless.PainlessParser.ExpressionContext; +import org.elasticsearch.painless.PainlessParser.ExtbraceContext; +import org.elasticsearch.painless.PainlessParser.ExtcallContext; +import org.elasticsearch.painless.PainlessParser.ExtcastContext; +import org.elasticsearch.painless.PainlessParser.ExtdotContext; +import org.elasticsearch.painless.PainlessParser.ExtfieldContext; +import org.elasticsearch.painless.PainlessParser.ExtnewContext; +import org.elasticsearch.painless.PainlessParser.ExtprecContext; +import org.elasticsearch.painless.PainlessParser.ExtstartContext; +import org.elasticsearch.painless.PainlessParser.ExtstringContext; +import org.elasticsearch.painless.PainlessParser.ExttypeContext; +import org.elasticsearch.painless.PainlessParser.ExtvarContext; +import org.objectweb.asm.Opcodes; +import org.objectweb.asm.commons.GeneratorAdapter; + +import java.util.List; + +import static org.elasticsearch.painless.PainlessParser.ADD; +import static org.elasticsearch.painless.PainlessParser.DIV; +import static org.elasticsearch.painless.PainlessParser.MUL; +import static org.elasticsearch.painless.PainlessParser.REM; +import static org.elasticsearch.painless.PainlessParser.SUB; +import static org.elasticsearch.painless.WriterConstants.CLASS_TYPE; +import static org.elasticsearch.painless.WriterConstants.DEFINITION_TYPE; +import static org.elasticsearch.painless.WriterConstants.DEF_ARRAY_LOAD; +import static org.elasticsearch.painless.WriterConstants.DEF_ARRAY_STORE; +import static org.elasticsearch.painless.WriterConstants.DEF_FIELD_LOAD; +import static org.elasticsearch.painless.WriterConstants.DEF_FIELD_STORE; +import static org.elasticsearch.painless.WriterConstants.DEF_METHOD_CALL; +import static org.elasticsearch.painless.WriterConstants.TOBYTEEXACT_INT; +import static org.elasticsearch.painless.WriterConstants.TOBYTEEXACT_LONG; +import static org.elasticsearch.painless.WriterConstants.TOBYTEWOOVERFLOW_DOUBLE; +import static org.elasticsearch.painless.WriterConstants.TOBYTEWOOVERFLOW_FLOAT; +import static org.elasticsearch.painless.WriterConstants.TOCHAREXACT_INT; +import static org.elasticsearch.painless.WriterConstants.TOCHAREXACT_LONG; +import static org.elasticsearch.painless.WriterConstants.TOCHARWOOVERFLOW_DOUBLE; +import static org.elasticsearch.painless.WriterConstants.TOCHARWOOVERFLOW_FLOAT; +import static org.elasticsearch.painless.WriterConstants.TOFLOATWOOVERFLOW_DOUBLE; +import static org.elasticsearch.painless.WriterConstants.TOINTEXACT_LONG; +import static org.elasticsearch.painless.WriterConstants.TOINTWOOVERFLOW_DOUBLE; +import static org.elasticsearch.painless.WriterConstants.TOINTWOOVERFLOW_FLOAT; +import static org.elasticsearch.painless.WriterConstants.TOLONGWOOVERFLOW_DOUBLE; +import static org.elasticsearch.painless.WriterConstants.TOLONGWOOVERFLOW_FLOAT; +import static org.elasticsearch.painless.WriterConstants.TOSHORTEXACT_INT; +import static org.elasticsearch.painless.WriterConstants.TOSHORTEXACT_LONG; +import static org.elasticsearch.painless.WriterConstants.TOSHORTWOOVERFLOW_DOUBLE; +import static org.elasticsearch.painless.WriterConstants.TOSHORTWOOVERFLOW_FLOAT; + +class WriterExternal { + private final Metadata metadata; + private final Definition definition; + private final CompilerSettings settings; + + private final GeneratorAdapter execute; + + private final Writer writer; + private final WriterUtility utility; + private final WriterCaster caster; + + WriterExternal(final Metadata metadata, final GeneratorAdapter execute, final Writer writer, + final WriterUtility utility, final WriterCaster caster) { + this.metadata = metadata; + definition = metadata.definition; + settings = metadata.settings; + + this.execute = execute; + + this.writer = writer; + this.utility = utility; + this.caster = caster; + } + + void processExtstart(final ExtstartContext ctx) { + final ExternalMetadata startemd = metadata.getExternalMetadata(ctx); + + if (startemd.token == ADD) { + final ExpressionMetadata storeemd = metadata.getExpressionMetadata(startemd.storeExpr); + + if (startemd.current.sort == Sort.STRING || storeemd.from.sort == Sort.STRING) { + utility.writeNewStrings(); + utility.addStrings(startemd.storeExpr); + } + } + + final ExtprecContext precctx = ctx.extprec(); + final ExtcastContext castctx = ctx.extcast(); + final ExttypeContext typectx = ctx.exttype(); + final ExtvarContext varctx = ctx.extvar(); + final ExtnewContext newctx = ctx.extnew(); + final ExtstringContext stringctx = ctx.extstring(); + + if (precctx != null) { + writer.visit(precctx); + } else if (castctx != null) { + writer.visit(castctx); + } else if (typectx != null) { + writer.visit(typectx); + } else if (varctx != null) { + writer.visit(varctx); + } else if (newctx != null) { + writer.visit(newctx); + } else if (stringctx != null) { + writer.visit(stringctx); + } else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + } + + void processExtprec(final ExtprecContext ctx) { + final ExtprecContext precctx = ctx.extprec(); + final ExtcastContext castctx = ctx.extcast(); + final ExttypeContext typectx = ctx.exttype(); + final ExtvarContext varctx = ctx.extvar(); + final ExtnewContext newctx = ctx.extnew(); + final ExtstringContext stringctx = ctx.extstring(); + + if (precctx != null) { + writer.visit(precctx); + } else if (castctx != null) { + writer.visit(castctx); + } else if (typectx != null) { + writer.visit(typectx); + } else if (varctx != null) { + writer.visit(varctx); + } else if (newctx != null) { + writer.visit(newctx); + } else if (stringctx != null) { + writer.visit(stringctx); + } else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + if (dotctx != null) { + writer.visit(dotctx); + } else if (bracectx != null) { + writer.visit(bracectx); + } + } + + void processExtcast(final ExtcastContext ctx) { + ExtNodeMetadata castenmd = metadata.getExtNodeMetadata(ctx); + + final ExtprecContext precctx = ctx.extprec(); + final ExtcastContext castctx = ctx.extcast(); + final ExttypeContext typectx = ctx.exttype(); + final ExtvarContext varctx = ctx.extvar(); + final ExtnewContext newctx = ctx.extnew(); + final ExtstringContext stringctx = ctx.extstring(); + + if (precctx != null) { + writer.visit(precctx); + } else if (castctx != null) { + writer.visit(castctx); + } else if (typectx != null) { + writer.visit(typectx); + } else if (varctx != null) { + writer.visit(varctx); + } else if (newctx != null) { + writer.visit(newctx); + } else if (stringctx != null) { + writer.visit(stringctx); + } else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + + caster.checkWriteCast(ctx, castenmd.castTo); + } + + void processExtbrace(final ExtbraceContext ctx) { + final ExpressionContext exprctx = ctx.expression(); + + writer.visit(exprctx); + writeLoadStoreExternal(ctx); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + if (dotctx != null) { + writer.visit(dotctx); + } else if (bracectx != null) { + writer.visit(bracectx); + } + } + + void processExtdot(final ExtdotContext ctx) { + final ExtcallContext callctx = ctx.extcall(); + final ExtfieldContext fieldctx = ctx.extfield(); + + if (callctx != null) { + writer.visit(callctx); + } else if (fieldctx != null) { + writer.visit(fieldctx); + } + } + + void processExttype(final ExttypeContext ctx) { + writer.visit(ctx.extdot()); + } + + void processExtcall(final ExtcallContext ctx) { + writeCallExternal(ctx); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + if (dotctx != null) { + writer.visit(dotctx); + } else if (bracectx != null) { + writer.visit(bracectx); + } + } + + void processExtvar(final ExtvarContext ctx) { + writeLoadStoreExternal(ctx); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + if (dotctx != null) { + writer.visit(dotctx); + } else if (bracectx != null) { + writer.visit(bracectx); + } + } + + void processExtfield(final ExtfieldContext ctx) { + writeLoadStoreExternal(ctx); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + if (dotctx != null) { + writer.visit(dotctx); + } else if (bracectx != null) { + writer.visit(bracectx); + } + } + + void processExtnew(final ExtnewContext ctx) { + writeNewExternal(ctx); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + if (dotctx != null) { + writer.visit(dotctx); + } else if (bracectx != null) { + writer.visit(bracectx); + } + } + + void processExtstring(final ExtstringContext ctx) { + final ExtNodeMetadata stringenmd = metadata.getExtNodeMetadata(ctx); + + utility.writeConstant(ctx, stringenmd.target); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + if (dotctx != null) { + writer.visit(dotctx); + } else if (bracectx != null) { + writer.visit(bracectx); + } + } + + private void writeLoadStoreExternal(final ParserRuleContext source) { + final ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source); + final ExternalMetadata parentemd = metadata.getExternalMetadata(sourceenmd.parent); + + final boolean length = "#length".equals(sourceenmd.target); + final boolean array = "#brace".equals(sourceenmd.target); + final boolean name = sourceenmd.target instanceof String && !length && !array; + final boolean variable = sourceenmd.target instanceof Integer; + final boolean field = sourceenmd.target instanceof Field; + final boolean shortcut = sourceenmd.target instanceof Object[]; + + if (!length && !variable && !field && !array && !name && !shortcut) { + throw new IllegalStateException(WriterUtility.error(source) + "Target not found for load/store."); + } + + final boolean maplist = shortcut && (boolean)((Object[])sourceenmd.target)[2]; + final Object constant = shortcut ? ((Object[])sourceenmd.target)[3] : null; + + final boolean x1 = field || name || (shortcut && !maplist); + final boolean x2 = array || (shortcut && maplist); + + if (length) { + execute.arrayLength(); + } else if (sourceenmd.last && parentemd.storeExpr != null) { + final ExpressionMetadata expremd = metadata.getExpressionMetadata(parentemd.storeExpr); + final boolean cat = utility.containsStrings(parentemd.storeExpr); + + if (cat) { + if (field || name || shortcut) { + execute.dupX1(); + } else if (array) { + execute.dup2X1(); + } + + if (maplist) { + if (constant != null) { + utility.writeConstant(source, constant); + } + + execute.dupX2(); + } + + writeLoadStoreInstruction(source, false, variable, field, name, array, shortcut); + utility.writeAppendStrings(sourceenmd.type.sort); + writer.visit(parentemd.storeExpr); + + if (utility.containsStrings(parentemd.storeExpr)) { + utility.writeAppendStrings(expremd.to.sort); + utility.removeStrings(parentemd.storeExpr); + } + + utility.writeToStrings(); + caster.checkWriteCast(source, sourceenmd.castTo); + + if (parentemd.read) { + utility.writeDup(sourceenmd.type.sort.size, x1, x2); + } + + writeLoadStoreInstruction(source, true, variable, field, name, array, shortcut); + } else if (parentemd.token > 0) { + final int token = parentemd.token; + + if (field || name || shortcut) { + execute.dup(); + } else if (array) { + execute.dup2(); + } + + if (maplist) { + if (constant != null) { + utility.writeConstant(source, constant); + } + + execute.dupX1(); + } + + writeLoadStoreInstruction(source, false, variable, field, name, array, shortcut); + + if (parentemd.read && parentemd.post) { + utility.writeDup(sourceenmd.type.sort.size, x1, x2); + } + + caster.checkWriteCast(source, sourceenmd.castFrom); + writer.visit(parentemd.storeExpr); + + utility.writeBinaryInstruction(source, sourceenmd.promote, token); + + boolean exact = false; + + if (!settings.getNumericOverflow() && expremd.typesafe && sourceenmd.type.sort != Sort.DEF && + (token == MUL || token == DIV || token == REM || token == ADD || token == SUB)) { + exact = writeExactInstruction(sourceenmd.type.sort, sourceenmd.promote.sort); + } + + if (!exact) { + caster.checkWriteCast(source, sourceenmd.castTo); + } + + if (parentemd.read && !parentemd.post) { + utility.writeDup(sourceenmd.type.sort.size, x1, x2); + } + + writeLoadStoreInstruction(source, true, variable, field, name, array, shortcut); + } else { + if (constant != null) { + utility.writeConstant(source, constant); + } + + writer.visit(parentemd.storeExpr); + + if (parentemd.read) { + utility.writeDup(sourceenmd.type.sort.size, x1, x2); + } + + writeLoadStoreInstruction(source, true, variable, field, name, array, shortcut); + } + } else { + if (constant != null) { + utility.writeConstant(source, constant); + } + + writeLoadStoreInstruction(source, false, variable, field, name, array, shortcut); + } + } + + private void writeLoadStoreInstruction(final ParserRuleContext source, + final boolean store, final boolean variable, + final boolean field, final boolean name, + final boolean array, final boolean shortcut) { + final ExtNodeMetadata sourceemd = metadata.getExtNodeMetadata(source); + + if (variable) { + writeLoadStoreVariable(source, store, sourceemd.type, (int)sourceemd.target); + } else if (field) { + writeLoadStoreField(store, (Field)sourceemd.target); + } else if (name) { + writeLoadStoreField(source, store, (String)sourceemd.target); + } else if (array) { + writeLoadStoreArray(source, store, sourceemd.type); + } else if (shortcut) { + Object[] targets = (Object[])sourceemd.target; + writeLoadStoreShortcut(store, (Method)targets[0], (Method)targets[1]); + } else { + throw new IllegalStateException(WriterUtility.error(source) + "Load/Store requires a variable, field, or array."); + } + } + + private void writeLoadStoreVariable(final ParserRuleContext source, final boolean store, + final Type type, final int slot) { + if (type.sort == Sort.VOID) { + throw new IllegalStateException(WriterUtility.error(source) + "Cannot load/store void type."); + } + + if (store) { + execute.visitVarInsn(type.type.getOpcode(Opcodes.ISTORE), slot); + } else { + execute.visitVarInsn(type.type.getOpcode(Opcodes.ILOAD), slot); + } + } + + private void writeLoadStoreField(final boolean store, final Field field) { + if (java.lang.reflect.Modifier.isStatic(field.reflect.getModifiers())) { + if (store) { + execute.putStatic(field.owner.type, field.reflect.getName(), field.type.type); + } else { + execute.getStatic(field.owner.type, field.reflect.getName(), field.type.type); + + if (!field.generic.clazz.equals(field.type.clazz)) { + execute.checkCast(field.generic.type); + } + } + } else { + if (store) { + execute.putField(field.owner.type, field.reflect.getName(), field.type.type); + } else { + execute.getField(field.owner.type, field.reflect.getName(), field.type.type); + + if (!field.generic.clazz.equals(field.type.clazz)) { + execute.checkCast(field.generic.type); + } + } + } + } + + private void writeLoadStoreField(final ParserRuleContext source, final boolean store, final String name) { + if (store) { + final ExtNodeMetadata sourceemd = metadata.getExtNodeMetadata(source); + final ExternalMetadata parentemd = metadata.getExternalMetadata(sourceemd.parent); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(parentemd.storeExpr); + + execute.push(name); + execute.loadThis(); + execute.getField(CLASS_TYPE, "definition", DEFINITION_TYPE); + execute.push(parentemd.token == 0 && expremd.typesafe); + execute.invokeStatic(definition.defobjType.type, DEF_FIELD_STORE); + } else { + execute.push(name); + execute.loadThis(); + execute.getField(CLASS_TYPE, "definition", DEFINITION_TYPE); + execute.invokeStatic(definition.defobjType.type, DEF_FIELD_LOAD); + } + } + + private void writeLoadStoreArray(final ParserRuleContext source, final boolean store, final Type type) { + if (type.sort == Sort.VOID) { + throw new IllegalStateException(WriterUtility.error(source) + "Cannot load/store void type."); + } + + if (type.sort == Sort.DEF) { + final ExtbraceContext bracectx = (ExtbraceContext)source; + final ExpressionMetadata expremd0 = metadata.getExpressionMetadata(bracectx.expression()); + + if (store) { + final ExtNodeMetadata braceenmd = metadata.getExtNodeMetadata(bracectx); + final ExternalMetadata parentemd = metadata.getExternalMetadata(braceenmd.parent); + final ExpressionMetadata expremd1 = metadata.getExpressionMetadata(parentemd.storeExpr); + + execute.loadThis(); + execute.getField(CLASS_TYPE, "definition", DEFINITION_TYPE); + execute.push(expremd0.typesafe); + execute.push(parentemd.token == 0 && expremd1.typesafe); + execute.invokeStatic(definition.defobjType.type, DEF_ARRAY_STORE); + } else { + execute.loadThis(); + execute.getField(CLASS_TYPE, "definition", DEFINITION_TYPE); + execute.push(expremd0.typesafe); + execute.invokeStatic(definition.defobjType.type, DEF_ARRAY_LOAD); + } + } else { + if (store) { + execute.arrayStore(type.type); + } else { + execute.arrayLoad(type.type); + } + } + } + + private void writeLoadStoreShortcut(final boolean store, final Method getter, final Method setter) { + final Method method = store ? setter : getter; + + if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) { + execute.invokeInterface(method.owner.type, method.method); + } else { + execute.invokeVirtual(method.owner.type, method.method); + } + + if (store) { + utility.writePop(method.rtn.type.getSize()); + } else if (!method.rtn.clazz.equals(method.handle.type().returnType())) { + execute.checkCast(method.rtn.type); + } + } + + /** + * Called for any compound assignment (including increment/decrement instructions). + * We have to be stricter than writeBinary, and do overflow checks against the original type's size + * instead of the promoted type's size, since the result will be implicitly cast back. + * + * @return This will be true if an instruction is written, false otherwise. + */ + private boolean writeExactInstruction(final Sort osort, final Sort psort) { + if (psort == Sort.DOUBLE) { + if (osort == Sort.FLOAT) { + execute.invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE); + } else if (osort == Sort.FLOAT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE); + execute.checkCast(definition.floatobjType.type); + } else if (osort == Sort.LONG) { + execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE); + } else if (osort == Sort.LONG_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE); + execute.checkCast(definition.longobjType.type); + } else if (osort == Sort.INT) { + execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE); + } else if (osort == Sort.INT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE); + execute.checkCast(definition.intobjType.type); + } else if (osort == Sort.CHAR) { + execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE); + } else if (osort == Sort.CHAR_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE); + execute.checkCast(definition.charobjType.type); + } else if (osort == Sort.SHORT) { + execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE); + } else if (osort == Sort.SHORT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE); + execute.checkCast(definition.shortobjType.type); + } else if (osort == Sort.BYTE) { + execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE); + } else if (osort == Sort.BYTE_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE); + execute.checkCast(definition.byteobjType.type); + } else { + return false; + } + } else if (psort == Sort.FLOAT) { + if (osort == Sort.LONG) { + execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT); + } else if (osort == Sort.LONG_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT); + execute.checkCast(definition.longobjType.type); + } else if (osort == Sort.INT) { + execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT); + } else if (osort == Sort.INT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT); + execute.checkCast(definition.intobjType.type); + } else if (osort == Sort.CHAR) { + execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT); + } else if (osort == Sort.CHAR_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT); + execute.checkCast(definition.charobjType.type); + } else if (osort == Sort.SHORT) { + execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT); + } else if (osort == Sort.SHORT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT); + execute.checkCast(definition.shortobjType.type); + } else if (osort == Sort.BYTE) { + execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT); + } else if (osort == Sort.BYTE_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT); + execute.checkCast(definition.byteobjType.type); + } else { + return false; + } + } else if (psort == Sort.LONG) { + if (osort == Sort.INT) { + execute.invokeStatic(definition.mathType.type, TOINTEXACT_LONG); + } else if (osort == Sort.INT_OBJ) { + execute.invokeStatic(definition.mathType.type, TOINTEXACT_LONG); + execute.checkCast(definition.intobjType.type); + } else if (osort == Sort.CHAR) { + execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG); + } else if (osort == Sort.CHAR_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG); + execute.checkCast(definition.charobjType.type); + } else if (osort == Sort.SHORT) { + execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG); + } else if (osort == Sort.SHORT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG); + execute.checkCast(definition.shortobjType.type); + } else if (osort == Sort.BYTE) { + execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG); + } else if (osort == Sort.BYTE_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG); + execute.checkCast(definition.byteobjType.type); + } else { + return false; + } + } else if (psort == Sort.INT) { + if (osort == Sort.CHAR) { + execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_INT); + } else if (osort == Sort.CHAR_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_INT); + execute.checkCast(definition.charobjType.type); + } else if (osort == Sort.SHORT) { + execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT); + } else if (osort == Sort.SHORT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT); + execute.checkCast(definition.shortobjType.type); + } else if (osort == Sort.BYTE) { + execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT); + } else if (osort == Sort.BYTE_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT); + execute.checkCast(definition.byteobjType.type); + } else { + return false; + } + } else { + return false; + } + + return true; + } + + private void writeNewExternal(final ExtnewContext source) { + final ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source); + final ExternalMetadata parentemd = metadata.getExternalMetadata(sourceenmd.parent); + + final boolean makearray = "#makearray".equals(sourceenmd.target); + final boolean constructor = sourceenmd.target instanceof Constructor; + + if (!makearray && !constructor) { + throw new IllegalStateException(WriterUtility.error(source) + "Target not found for new call."); + } + + if (makearray) { + for (final ExpressionContext exprctx : source.expression()) { + writer.visit(exprctx); + } + + if (sourceenmd.type.sort == Sort.ARRAY) { + execute.visitMultiANewArrayInsn(sourceenmd.type.type.getDescriptor(), sourceenmd.type.type.getDimensions()); + } else { + execute.newArray(sourceenmd.type.type); + } + } else { + execute.newInstance(sourceenmd.type.type); + + if (parentemd.read) { + execute.dup(); + } + + for (final ExpressionContext exprctx : source.arguments().expression()) { + writer.visit(exprctx); + } + + final Constructor target = (Constructor)sourceenmd.target; + execute.invokeConstructor(target.owner.type, target.method); + } + } + + private void writeCallExternal(final ExtcallContext source) { + final ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source); + + final boolean method = sourceenmd.target instanceof Method; + final boolean def = sourceenmd.target instanceof String; + + if (!method && !def) { + throw new IllegalStateException(WriterUtility.error(source) + "Target not found for call."); + } + + final List arguments = source.arguments().expression(); + + if (method) { + for (final ExpressionContext exprctx : arguments) { + writer.visit(exprctx); + } + + final Method target = (Method)sourceenmd.target; + + if (java.lang.reflect.Modifier.isStatic(target.reflect.getModifiers())) { + execute.invokeStatic(target.owner.type, target.method); + } else if (java.lang.reflect.Modifier.isInterface(target.owner.clazz.getModifiers())) { + execute.invokeInterface(target.owner.type, target.method); + } else { + execute.invokeVirtual(target.owner.type, target.method); + } + + if (!target.rtn.clazz.equals(target.handle.type().returnType())) { + execute.checkCast(target.rtn.type); + } + } else { + execute.push((String)sourceenmd.target); + execute.loadThis(); + execute.getField(CLASS_TYPE, "definition", DEFINITION_TYPE); + + execute.push(arguments.size()); + execute.newArray(definition.defType.type); + + for (int argument = 0; argument < arguments.size(); ++argument) { + execute.dup(); + execute.push(argument); + writer.visit(arguments.get(argument)); + execute.arrayStore(definition.defType.type); + } + + execute.push(arguments.size()); + execute.newArray(definition.booleanType.type); + + for (int argument = 0; argument < arguments.size(); ++argument) { + execute.dup(); + execute.push(argument); + execute.push(metadata.getExpressionMetadata(arguments.get(argument)).typesafe); + execute.arrayStore(definition.booleanType.type); + } + + execute.invokeStatic(definition.defobjType.type, DEF_METHOD_CALL); + } + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterStatement.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterStatement.java new file mode 100644 index 000000000000..a0e70f319b51 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterStatement.java @@ -0,0 +1,391 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +import org.elasticsearch.painless.Definition.Sort; +import org.elasticsearch.painless.Metadata.ExpressionMetadata; +import org.elasticsearch.painless.Metadata.StatementMetadata; +import org.elasticsearch.painless.PainlessParser.AfterthoughtContext; +import org.elasticsearch.painless.PainlessParser.BlockContext; +import org.elasticsearch.painless.PainlessParser.DeclContext; +import org.elasticsearch.painless.PainlessParser.DeclarationContext; +import org.elasticsearch.painless.PainlessParser.DecltypeContext; +import org.elasticsearch.painless.PainlessParser.DeclvarContext; +import org.elasticsearch.painless.PainlessParser.DoContext; +import org.elasticsearch.painless.PainlessParser.EmptyscopeContext; +import org.elasticsearch.painless.PainlessParser.ExprContext; +import org.elasticsearch.painless.PainlessParser.ExpressionContext; +import org.elasticsearch.painless.PainlessParser.ForContext; +import org.elasticsearch.painless.PainlessParser.IfContext; +import org.elasticsearch.painless.PainlessParser.InitializerContext; +import org.elasticsearch.painless.PainlessParser.MultipleContext; +import org.elasticsearch.painless.PainlessParser.ReturnContext; +import org.elasticsearch.painless.PainlessParser.SingleContext; +import org.elasticsearch.painless.PainlessParser.SourceContext; +import org.elasticsearch.painless.PainlessParser.StatementContext; +import org.elasticsearch.painless.PainlessParser.ThrowContext; +import org.elasticsearch.painless.PainlessParser.TrapContext; +import org.elasticsearch.painless.PainlessParser.TryContext; +import org.elasticsearch.painless.PainlessParser.WhileContext; +import org.elasticsearch.painless.WriterUtility.Branch; +import org.objectweb.asm.Label; +import org.objectweb.asm.Opcodes; +import org.objectweb.asm.commons.GeneratorAdapter; + +import static org.elasticsearch.painless.WriterConstants.PAINLESS_ERROR_TYPE; + +class WriterStatement { + private final Metadata metadata; + + private final GeneratorAdapter execute; + + private final Writer writer; + private final WriterUtility utility; + + WriterStatement(final Metadata metadata, final GeneratorAdapter execute, + final Writer writer, final WriterUtility utility) { + this.metadata = metadata; + + this.execute = execute; + + this.writer = writer; + this.utility = utility; + } + + void processSource(final SourceContext ctx) { + final StatementMetadata sourcesmd = metadata.getStatementMetadata(ctx); + + for (final StatementContext sctx : ctx.statement()) { + writer.visit(sctx); + } + + if (!sourcesmd.methodEscape) { + execute.visitInsn(Opcodes.ACONST_NULL); + execute.returnValue(); + } + } + + void processIf(final IfContext ctx) { + final ExpressionContext exprctx = ctx.expression(); + final boolean els = ctx.ELSE() != null; + final Branch branch = utility.markBranch(ctx, exprctx); + branch.end = new Label(); + branch.fals = els ? new Label() : branch.end; + + writer.visit(exprctx); + + final BlockContext blockctx0 = ctx.block(0); + final StatementMetadata blockmd0 = metadata.getStatementMetadata(blockctx0); + writer.visit(blockctx0); + + if (els) { + if (!blockmd0.allLast) { + execute.goTo(branch.end); + } + + execute.mark(branch.fals); + writer.visit(ctx.block(1)); + } + + execute.mark(branch.end); + } + + void processWhile(final WhileContext ctx) { + final ExpressionContext exprctx = ctx.expression(); + final Branch branch = utility.markBranch(ctx, exprctx); + branch.begin = new Label(); + branch.end = new Label(); + branch.fals = branch.end; + + utility.pushJump(branch); + execute.mark(branch.begin); + writer.visit(exprctx); + + final BlockContext blockctx = ctx.block(); + boolean allLast = false; + + if (blockctx != null) { + final StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); + allLast = blocksmd.allLast; + writeLoopCounter(blocksmd.count > 0 ? blocksmd.count : 1); + writer.visit(blockctx); + } else if (ctx.empty() != null) { + writeLoopCounter(1); + } else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + + if (!allLast) { + execute.goTo(branch.begin); + } + + execute.mark(branch.end); + utility.popJump(); + } + + void processDo(final DoContext ctx) { + final ExpressionContext exprctx = ctx.expression(); + final Branch branch = utility.markBranch(ctx, exprctx); + Label start = new Label(); + branch.begin = new Label(); + branch.end = new Label(); + branch.fals = branch.end; + + final BlockContext blockctx = ctx.block(); + final StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); + + utility.pushJump(branch); + execute.mark(start); + writer.visit(blockctx); + execute.mark(branch.begin); + writer.visit(exprctx); + writeLoopCounter(blocksmd.count > 0 ? blocksmd.count : 1); + execute.goTo(start); + execute.mark(branch.end); + utility.popJump(); + } + + void processFor(final ForContext ctx) { + final ExpressionContext exprctx = ctx.expression(); + final AfterthoughtContext atctx = ctx.afterthought(); + final Branch branch = utility.markBranch(ctx, exprctx); + final Label start = new Label(); + branch.begin = atctx == null ? start : new Label(); + branch.end = new Label(); + branch.fals = branch.end; + + utility.pushJump(branch); + + if (ctx.initializer() != null) { + writer.visit(ctx.initializer()); + } + + execute.mark(start); + + if (exprctx != null) { + writer.visit(exprctx); + } + + final BlockContext blockctx = ctx.block(); + boolean allLast = false; + + if (blockctx != null) { + StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); + allLast = blocksmd.allLast; + + int count = blocksmd.count > 0 ? blocksmd.count : 1; + + if (atctx != null) { + ++count; + } + + writeLoopCounter(count); + writer.visit(blockctx); + } else if (ctx.empty() != null) { + writeLoopCounter(1); + } else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + + if (atctx != null) { + execute.mark(branch.begin); + writer.visit(atctx); + } + + if (atctx != null || !allLast) { + execute.goTo(start); + } + + execute.mark(branch.end); + utility.popJump(); + } + + void processDecl(final DeclContext ctx) { + writer.visit(ctx.declaration()); + } + + void processContinue() { + final Branch jump = utility.peekJump(); + execute.goTo(jump.begin); + } + + void processBreak() { + final Branch jump = utility.peekJump(); + execute.goTo(jump.end); + } + + void processReturn(final ReturnContext ctx) { + writer.visit(ctx.expression()); + execute.returnValue(); + } + + void processTry(final TryContext ctx) { + final TrapContext[] trapctxs = new TrapContext[ctx.trap().size()]; + ctx.trap().toArray(trapctxs); + final Branch branch = utility.markBranch(ctx, trapctxs); + + Label end = new Label(); + branch.begin = new Label(); + branch.end = new Label(); + branch.tru = trapctxs.length > 1 ? end : null; + + execute.mark(branch.begin); + + final BlockContext blockctx = ctx.block(); + final StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); + writer.visit(blockctx); + + if (!blocksmd.allLast) { + execute.goTo(end); + } + + execute.mark(branch.end); + + for (final TrapContext trapctx : trapctxs) { + writer.visit(trapctx); + } + + if (!blocksmd.allLast || trapctxs.length > 1) { + execute.mark(end); + } + } + + void processThrow(final ThrowContext ctx) { + writer.visit(ctx.expression()); + execute.throwException(); + } + + void processExpr(final ExprContext ctx) { + final StatementMetadata exprsmd = metadata.getStatementMetadata(ctx); + final ExpressionContext exprctx = ctx.expression(); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx); + writer.visit(exprctx); + + if (exprsmd.methodEscape) { + execute.returnValue(); + } else { + utility.writePop(expremd.to.type.getSize()); + } + } + + void processMultiple(final MultipleContext ctx) { + for (final StatementContext sctx : ctx.statement()) { + writer.visit(sctx); + } + } + + void processSingle(final SingleContext ctx) { + writer.visit(ctx.statement()); + } + + void processInitializer(InitializerContext ctx) { + final DeclarationContext declctx = ctx.declaration(); + final ExpressionContext exprctx = ctx.expression(); + + if (declctx != null) { + writer.visit(declctx); + } else if (exprctx != null) { + final ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx); + writer.visit(exprctx); + utility.writePop(expremd.to.type.getSize()); + } else { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + } + + void processAfterthought(AfterthoughtContext ctx) { + final ExpressionContext exprctx = ctx.expression(); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx); + writer.visit(ctx.expression()); + utility.writePop(expremd.to.type.getSize()); + } + + void processDeclaration(DeclarationContext ctx) { + for (final DeclvarContext declctx : ctx.declvar()) { + writer.visit(declctx); + } + } + + void processDeclvar(final DeclvarContext ctx) { + final ExpressionMetadata declvaremd = metadata.getExpressionMetadata(ctx); + final org.objectweb.asm.Type type = declvaremd.to.type; + final Sort sort = declvaremd.to.sort; + final int slot = (int)declvaremd.postConst; + + final ExpressionContext exprctx = ctx.expression(); + final boolean initialize = exprctx == null; + + if (!initialize) { + writer.visit(exprctx); + } + + switch (sort) { + case VOID: throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + case BOOL: + case BYTE: + case SHORT: + case CHAR: + case INT: if (initialize) execute.push(0); break; + case LONG: if (initialize) execute.push(0L); break; + case FLOAT: if (initialize) execute.push(0.0F); break; + case DOUBLE: if (initialize) execute.push(0.0); break; + default: if (initialize) execute.visitInsn(Opcodes.ACONST_NULL); + } + + execute.visitVarInsn(type.getOpcode(Opcodes.ISTORE), slot); + } + + void processTrap(final TrapContext ctx) { + final StatementMetadata trapsmd = metadata.getStatementMetadata(ctx); + + final Branch branch = utility.getBranch(ctx); + final Label jump = new Label(); + + final BlockContext blockctx = ctx.block(); + final EmptyscopeContext emptyctx = ctx.emptyscope(); + + execute.mark(jump); + execute.visitVarInsn(trapsmd.exception.type.getOpcode(Opcodes.ISTORE), trapsmd.slot); + + if (blockctx != null) { + writer.visit(ctx.block()); + } else if (emptyctx == null) { + throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state."); + } + + execute.visitTryCatchBlock(branch.begin, branch.end, jump, trapsmd.exception.type.getInternalName()); + + if (branch.tru != null && !trapsmd.allLast) { + execute.goTo(branch.tru); + } + } + + private void writeLoopCounter(final int count) { + final Label end = new Label(); + + execute.iinc(metadata.loopCounterSlot, -count); + execute.visitVarInsn(Opcodes.ILOAD, metadata.loopCounterSlot); + execute.push(0); + execute.ifICmp(GeneratorAdapter.GT, end); + execute.throwException(PAINLESS_ERROR_TYPE, + "The maximum number of statements that can be executed in a loop has been reached."); + execute.mark(end); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterUtility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterUtility.java new file mode 100644 index 000000000000..d7e4f4ce42ca --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterUtility.java @@ -0,0 +1,387 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +import org.antlr.v4.runtime.ParserRuleContext; +import org.elasticsearch.painless.Definition.Sort; +import org.elasticsearch.painless.Definition.Type; +import org.objectweb.asm.Label; +import org.objectweb.asm.Opcodes; +import org.objectweb.asm.commons.GeneratorAdapter; + +import java.util.ArrayDeque; +import java.util.Deque; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.painless.PainlessParser.ADD; +import static org.elasticsearch.painless.PainlessParser.BWAND; +import static org.elasticsearch.painless.PainlessParser.BWOR; +import static org.elasticsearch.painless.PainlessParser.BWXOR; +import static org.elasticsearch.painless.PainlessParser.DIV; +import static org.elasticsearch.painless.PainlessParser.LSH; +import static org.elasticsearch.painless.PainlessParser.MUL; +import static org.elasticsearch.painless.PainlessParser.REM; +import static org.elasticsearch.painless.PainlessParser.RSH; +import static org.elasticsearch.painless.PainlessParser.SUB; +import static org.elasticsearch.painless.PainlessParser.USH; +import static org.elasticsearch.painless.WriterConstants.ADDEXACT_INT; +import static org.elasticsearch.painless.WriterConstants.ADDEXACT_LONG; +import static org.elasticsearch.painless.WriterConstants.ADDWOOVERLOW_DOUBLE; +import static org.elasticsearch.painless.WriterConstants.ADDWOOVERLOW_FLOAT; +import static org.elasticsearch.painless.WriterConstants.DEF_ADD_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_AND_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_DIV_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_LSH_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_MUL_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_OR_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_REM_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_RSH_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_SUB_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_USH_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_XOR_CALL; +import static org.elasticsearch.painless.WriterConstants.DIVWOOVERLOW_DOUBLE; +import static org.elasticsearch.painless.WriterConstants.DIVWOOVERLOW_FLOAT; +import static org.elasticsearch.painless.WriterConstants.DIVWOOVERLOW_INT; +import static org.elasticsearch.painless.WriterConstants.DIVWOOVERLOW_LONG; +import static org.elasticsearch.painless.WriterConstants.MULEXACT_INT; +import static org.elasticsearch.painless.WriterConstants.MULEXACT_LONG; +import static org.elasticsearch.painless.WriterConstants.MULWOOVERLOW_DOUBLE; +import static org.elasticsearch.painless.WriterConstants.MULWOOVERLOW_FLOAT; +import static org.elasticsearch.painless.WriterConstants.REMWOOVERLOW_DOUBLE; +import static org.elasticsearch.painless.WriterConstants.REMWOOVERLOW_FLOAT; +import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_BOOLEAN; +import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_CHAR; +import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_DOUBLE; +import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_FLOAT; +import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_INT; +import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_LONG; +import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_OBJECT; +import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_STRING; +import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_CONSTRUCTOR; +import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_TOSTRING; +import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_TYPE; +import static org.elasticsearch.painless.WriterConstants.SUBEXACT_INT; +import static org.elasticsearch.painless.WriterConstants.SUBEXACT_LONG; +import static org.elasticsearch.painless.WriterConstants.SUBWOOVERLOW_DOUBLE; +import static org.elasticsearch.painless.WriterConstants.SUBWOOVERLOW_FLOAT; + +class WriterUtility { + static class Branch { + final ParserRuleContext source; + + Label begin = null; + Label end = null; + Label tru = null; + Label fals = null; + + private Branch(final ParserRuleContext source) { + this.source = source; + } + } + + /** + * A utility method to output consistent error messages. + * @param ctx The ANTLR node the error occurred in. + * @return The error message with tacked on line number and character position. + */ + static String error(final ParserRuleContext ctx) { + return "Writer Error [" + ctx.getStart().getLine() + ":" + ctx.getStart().getCharPositionInLine() + "]: "; + } + + private final Definition definition; + private final CompilerSettings settings; + + private final GeneratorAdapter execute; + + private final Map branches = new HashMap<>(); + private final Deque jumps = new ArrayDeque<>(); + private final Set strings = new HashSet<>(); + + WriterUtility(final Metadata metadata, final GeneratorAdapter execute) { + definition = metadata.definition; + settings = metadata.settings; + + this.execute = execute; + } + + Branch markBranch(final ParserRuleContext source, final ParserRuleContext... nodes) { + final Branch branch = new Branch(source); + + for (final ParserRuleContext node : nodes) { + branches.put(node, branch); + } + + return branch; + } + + void copyBranch(final Branch branch, final ParserRuleContext... nodes) { + for (final ParserRuleContext node : nodes) { + branches.put(node, branch); + } + } + + Branch getBranch(final ParserRuleContext source) { + return branches.get(source); + } + + void checkWriteBranch(final ParserRuleContext source) { + final Branch branch = getBranch(source); + + if (branch != null) { + if (branch.tru != null) { + execute.visitJumpInsn(Opcodes.IFNE, branch.tru); + } else if (branch.fals != null) { + execute.visitJumpInsn(Opcodes.IFEQ, branch.fals); + } + } + } + + void pushJump(final Branch branch) { + jumps.push(branch); + } + + Branch peekJump() { + return jumps.peek(); + } + + void popJump() { + jumps.pop(); + } + + void addStrings(final ParserRuleContext source) { + strings.add(source); + } + + boolean containsStrings(final ParserRuleContext source) { + return strings.contains(source); + } + + void removeStrings(final ParserRuleContext source) { + strings.remove(source); + } + + void writeDup(final int size, final boolean x1, final boolean x2) { + if (size == 1) { + if (x2) { + execute.dupX2(); + } else if (x1) { + execute.dupX1(); + } else { + execute.dup(); + } + } else if (size == 2) { + if (x2) { + execute.dup2X2(); + } else if (x1) { + execute.dup2X1(); + } else { + execute.dup2(); + } + } + } + + void writePop(final int size) { + if (size == 1) { + execute.pop(); + } else if (size == 2) { + execute.pop2(); + } + } + + void writeConstant(final ParserRuleContext source, final Object constant) { + if (constant instanceof Number) { + writeNumeric(source, constant); + } else if (constant instanceof Character) { + writeNumeric(source, (int)(char)constant); + } else if (constant instanceof String) { + writeString(source, constant); + } else if (constant instanceof Boolean) { + writeBoolean(source, constant); + } else if (constant != null) { + throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state."); + } + } + + void writeNumeric(final ParserRuleContext source, final Object numeric) { + if (numeric instanceof Double) { + execute.push((double)numeric); + } else if (numeric instanceof Float) { + execute.push((float)numeric); + } else if (numeric instanceof Long) { + execute.push((long)numeric); + } else if (numeric instanceof Number) { + execute.push(((Number)numeric).intValue()); + } else { + throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state."); + } + } + + void writeString(final ParserRuleContext source, final Object string) { + if (string instanceof String) { + execute.push((String)string); + } else { + throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state."); + } + } + + void writeBoolean(final ParserRuleContext source, final Object bool) { + if (bool instanceof Boolean) { + execute.push((boolean)bool); + } else { + throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state."); + } + } + + void writeNewStrings() { + execute.newInstance(STRINGBUILDER_TYPE); + execute.dup(); + execute.invokeConstructor(STRINGBUILDER_TYPE, STRINGBUILDER_CONSTRUCTOR); + } + + void writeAppendStrings(final Sort sort) { + switch (sort) { + case BOOL: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_BOOLEAN); break; + case CHAR: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_CHAR); break; + case BYTE: + case SHORT: + case INT: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_INT); break; + case LONG: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_LONG); break; + case FLOAT: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_FLOAT); break; + case DOUBLE: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_DOUBLE); break; + case STRING: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_STRING); break; + default: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_OBJECT); + } + } + + void writeToStrings() { + execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_TOSTRING); + } + + void writeBinaryInstruction(final ParserRuleContext source, final Type type, final int token) { + final Sort sort = type.sort; + final boolean exact = !settings.getNumericOverflow() && + ((sort == Sort.INT || sort == Sort.LONG) && + (token == MUL || token == DIV || token == ADD || token == SUB) || + (sort == Sort.FLOAT || sort == Sort.DOUBLE) && + (token == MUL || token == DIV || token == REM || token == ADD || token == SUB)); + + // If it's a 64-bit shift, fix-up the last argument to truncate to 32-bits. + // Note that unlike java, this means we still do binary promotion of shifts, + // but it keeps things simple, and this check works because we promote shifts. + if (sort == Sort.LONG && (token == LSH || token == USH || token == RSH)) { + execute.cast(org.objectweb.asm.Type.LONG_TYPE, org.objectweb.asm.Type.INT_TYPE); + } + + if (exact) { + switch (sort) { + case INT: + switch (token) { + case MUL: execute.invokeStatic(definition.mathType.type, MULEXACT_INT); break; + case DIV: execute.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_INT); break; + case ADD: execute.invokeStatic(definition.mathType.type, ADDEXACT_INT); break; + case SUB: execute.invokeStatic(definition.mathType.type, SUBEXACT_INT); break; + default: + throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state."); + } + + break; + case LONG: + switch (token) { + case MUL: execute.invokeStatic(definition.mathType.type, MULEXACT_LONG); break; + case DIV: execute.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_LONG); break; + case ADD: execute.invokeStatic(definition.mathType.type, ADDEXACT_LONG); break; + case SUB: execute.invokeStatic(definition.mathType.type, SUBEXACT_LONG); break; + default: + throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state."); + } + + break; + case FLOAT: + switch (token) { + case MUL: execute.invokeStatic(definition.utilityType.type, MULWOOVERLOW_FLOAT); break; + case DIV: execute.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_FLOAT); break; + case REM: execute.invokeStatic(definition.utilityType.type, REMWOOVERLOW_FLOAT); break; + case ADD: execute.invokeStatic(definition.utilityType.type, ADDWOOVERLOW_FLOAT); break; + case SUB: execute.invokeStatic(definition.utilityType.type, SUBWOOVERLOW_FLOAT); break; + default: + throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state."); + } + + break; + case DOUBLE: + switch (token) { + case MUL: execute.invokeStatic(definition.utilityType.type, MULWOOVERLOW_DOUBLE); break; + case DIV: execute.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_DOUBLE); break; + case REM: execute.invokeStatic(definition.utilityType.type, REMWOOVERLOW_DOUBLE); break; + case ADD: execute.invokeStatic(definition.utilityType.type, ADDWOOVERLOW_DOUBLE); break; + case SUB: execute.invokeStatic(definition.utilityType.type, SUBWOOVERLOW_DOUBLE); break; + default: + throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state."); + } + + break; + default: + throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state."); + } + } else { + if ((sort == Sort.FLOAT || sort == Sort.DOUBLE) && + (token == LSH || token == USH || token == RSH || token == BWAND || token == BWXOR || token == BWOR)) { + throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state."); + } + + if (sort == Sort.DEF) { + switch (token) { + case MUL: execute.invokeStatic(definition.defobjType.type, DEF_MUL_CALL); break; + case DIV: execute.invokeStatic(definition.defobjType.type, DEF_DIV_CALL); break; + case REM: execute.invokeStatic(definition.defobjType.type, DEF_REM_CALL); break; + case ADD: execute.invokeStatic(definition.defobjType.type, DEF_ADD_CALL); break; + case SUB: execute.invokeStatic(definition.defobjType.type, DEF_SUB_CALL); break; + case LSH: execute.invokeStatic(definition.defobjType.type, DEF_LSH_CALL); break; + case USH: execute.invokeStatic(definition.defobjType.type, DEF_RSH_CALL); break; + case RSH: execute.invokeStatic(definition.defobjType.type, DEF_USH_CALL); break; + case BWAND: execute.invokeStatic(definition.defobjType.type, DEF_AND_CALL); break; + case BWXOR: execute.invokeStatic(definition.defobjType.type, DEF_XOR_CALL); break; + case BWOR: execute.invokeStatic(definition.defobjType.type, DEF_OR_CALL); break; + default: + throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state."); + } + } else { + switch (token) { + case MUL: execute.math(GeneratorAdapter.MUL, type.type); break; + case DIV: execute.math(GeneratorAdapter.DIV, type.type); break; + case REM: execute.math(GeneratorAdapter.REM, type.type); break; + case ADD: execute.math(GeneratorAdapter.ADD, type.type); break; + case SUB: execute.math(GeneratorAdapter.SUB, type.type); break; + case LSH: execute.math(GeneratorAdapter.SHL, type.type); break; + case USH: execute.math(GeneratorAdapter.USHR, type.type); break; + case RSH: execute.math(GeneratorAdapter.SHR, type.type); break; + case BWAND: execute.math(GeneratorAdapter.AND, type.type); break; + case BWXOR: execute.math(GeneratorAdapter.XOR, type.type); break; + case BWOR: execute.math(GeneratorAdapter.OR, type.type); break; + default: + throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state."); + } + } + } + } +} From 1010e1e5436e275cb43bee04e17d0c3d4b574175 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Thu, 17 Mar 2016 13:48:54 -0400 Subject: [PATCH 286/320] [TEST] Make test less fragile by sorting only on _uid The previous method sorted first by _score, then _uid. In certain situations, this allowed floating point errors to slightly alter the sort order, causing test failure. We only sort on _uid now, which should be deterministic and allow comparison of ten documents. Not quite as useful, but less fragile and we still check to make sure num hits and max score are identical. Closes #17164 --- .../java/org/elasticsearch/search/profile/QueryProfilerIT.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java b/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java index f55075045865..d98787745762 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java +++ b/core/src/test/java/org/elasticsearch/search/profile/QueryProfilerIT.java @@ -131,7 +131,6 @@ public class QueryProfilerIT extends ESIntegTestCase { SearchRequestBuilder vanilla = client().prepareSearch("test") .setQuery(q) .setProfile(false) - .addSort("_score", SortOrder.DESC) .addSort("_uid", SortOrder.ASC) .setPreference("_primary") .setSearchType(SearchType.QUERY_THEN_FETCH); @@ -139,7 +138,6 @@ public class QueryProfilerIT extends ESIntegTestCase { SearchRequestBuilder profile = client().prepareSearch("test") .setQuery(q) .setProfile(true) - .addSort("_score", SortOrder.DESC) .addSort("_uid", SortOrder.ASC) .setPreference("_primary") .setSearchType(SearchType.QUERY_THEN_FETCH); From cf8c9ffabb1d3d37867a11ea0252dd7c3791f172 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 17 Mar 2016 12:21:31 -0700 Subject: [PATCH 287/320] Always set version, even for x-plugins --- build.gradle | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index 8ffc90cd31b3..7410b6cc2752 100644 --- a/build.gradle +++ b/build.gradle @@ -21,12 +21,13 @@ import org.gradle.plugins.ide.eclipse.model.SourceFolder // common maven publishing configuration subprojects { + group = 'org.elasticsearch' + version = org.elasticsearch.gradle.VersionProperties.elasticsearch + if (path.startsWith(':x-plugins')) { // don't try to configure publshing for extra plugins attached to this build return } - group = 'org.elasticsearch' - version = org.elasticsearch.gradle.VersionProperties.elasticsearch plugins.withType(MavenPublishPlugin).whenPluginAdded { publishing { From ace45e8bda3067d9ccb8eefa4ee456abf68f60e2 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 16 Mar 2016 19:29:16 -0400 Subject: [PATCH 288/320] Add module tests --- .../packaging/scripts/25_tar_plugins.bats | 2 +- .../scripts/50_modules_and_plugins.bats | 1 + .../packaging/scripts/50_plugins.bats | 1 - ...bash => module_and_plugin_test_cases.bash} | 45 +++++++++++-------- .../resources/packaging/scripts/modules.bash | 42 +++++++++++++++++ .../packaging/scripts/os_package.bash | 2 + .../test/resources/packaging/scripts/tar.bash | 1 + 7 files changed, 73 insertions(+), 21 deletions(-) create mode 120000 qa/vagrant/src/test/resources/packaging/scripts/50_modules_and_plugins.bats delete mode 120000 qa/vagrant/src/test/resources/packaging/scripts/50_plugins.bats rename qa/vagrant/src/test/resources/packaging/scripts/{plugin_test_cases.bash => module_and_plugin_test_cases.bash} (91%) create mode 100644 qa/vagrant/src/test/resources/packaging/scripts/modules.bash diff --git a/qa/vagrant/src/test/resources/packaging/scripts/25_tar_plugins.bats b/qa/vagrant/src/test/resources/packaging/scripts/25_tar_plugins.bats index 8f55b1eb78cb..552c404a3d6c 120000 --- a/qa/vagrant/src/test/resources/packaging/scripts/25_tar_plugins.bats +++ b/qa/vagrant/src/test/resources/packaging/scripts/25_tar_plugins.bats @@ -1 +1 @@ -plugin_test_cases.bash \ No newline at end of file +module_and_plugin_test_cases.bash \ No newline at end of file diff --git a/qa/vagrant/src/test/resources/packaging/scripts/50_modules_and_plugins.bats b/qa/vagrant/src/test/resources/packaging/scripts/50_modules_and_plugins.bats new file mode 120000 index 000000000000..552c404a3d6c --- /dev/null +++ b/qa/vagrant/src/test/resources/packaging/scripts/50_modules_and_plugins.bats @@ -0,0 +1 @@ +module_and_plugin_test_cases.bash \ No newline at end of file diff --git a/qa/vagrant/src/test/resources/packaging/scripts/50_plugins.bats b/qa/vagrant/src/test/resources/packaging/scripts/50_plugins.bats deleted file mode 120000 index 8f55b1eb78cb..000000000000 --- a/qa/vagrant/src/test/resources/packaging/scripts/50_plugins.bats +++ /dev/null @@ -1 +0,0 @@ -plugin_test_cases.bash \ No newline at end of file diff --git a/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash similarity index 91% rename from qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash rename to qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash index e829141def01..ed5d7d0cea18 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash @@ -33,13 +33,14 @@ ################################## # Common test cases for both tar and rpm/deb based plugin tests ################################## -# This file is symlinked to both 25_tar_plugins.bats and 50_plugins.bats so its +# This file is symlinked to both 25_tar_plugins.bats and 50_modules_and_plugins.bats so its # executed twice - once to test plugins using the tar distribution and once to # test files using the rpm distribution or the deb distribution, whichever the # system uses. # Load test utilities load packaging_test_utils +load modules load plugins setup() { @@ -219,16 +220,30 @@ fi install_and_check_plugin discovery ec2 aws-java-sdk-core-*.jar } -@test "[$GROUP] install lang-expression plugin" { - install_and_check_plugin lang expression +@test "[$GROUP] check ingest-grok module" { + check_module ingest-grok jcodings-*.jar joni-*.jar } -@test "[$GROUP] install lang-groovy plugin" { - install_and_check_plugin lang groovy +@test "[$GROUP] check lang-expression module" { + # we specify the version on the asm-5.0.4.jar so that the test does + # not spuriously pass if the jar is missing but the other asm jars + # are present + check_secure_module lang-expression antlr4-runtime-*.jar asm-5.0.4.jar asm-commons-*.jar asm-tree-*.jar lucene-expressions-*.jar } -@test "[$GROUP] install lang-painless plugin" { - install_and_check_plugin lang painless +@test "[$GROUP] check lang-groovy module" { + check_secure_module lang-groovy groovy-*-indy.jar +} + +@test "[$GROUP] check lang-mustache module" { + check_secure_module lang-mustache compiler-*.jar +} + +@test "[$GROUP] check lang-painless module" { + # we specify the version on the asm-5.0.4.jar so that the test does + # not spuriously pass if the jar is missing but the other asm jars + # are present + check_secure_module lang-painless antlr4-runtime-*.jar asm-5.0.4.jar asm-commons-*.jar asm-tree-*.jar } @test "[$GROUP] install javascript plugin" { @@ -247,6 +262,10 @@ fi install_and_check_plugin mapper murmur3 } +@test "[$GROUP] check reindex module" { + check_module reindex +} + @test "[$GROUP] install size mapper plugin" { install_and_check_plugin mapper size } @@ -321,18 +340,6 @@ fi remove_plugin discovery-ec2 } -@test "[$GROUP] remove lang-expression plugin" { - remove_plugin lang-expression -} - -@test "[$GROUP] remove lang-groovy plugin" { - remove_plugin lang-groovy -} - -@test "[$GROUP] remove lang-painless plugin" { - remove_plugin lang-painless -} - @test "[$GROUP] remove javascript plugin" { remove_plugin lang-javascript } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/modules.bash b/qa/vagrant/src/test/resources/packaging/scripts/modules.bash new file mode 100644 index 000000000000..047bd38da920 --- /dev/null +++ b/qa/vagrant/src/test/resources/packaging/scripts/modules.bash @@ -0,0 +1,42 @@ +#!/bin/sh + +# This file contains some utilities to test the elasticsearch scripts, +# the .deb/.rpm packages and the SysV/Systemd scripts. + +# WARNING: This testing file must be executed as root and can +# dramatically change your system. It removes the 'elasticsearch' +# user/group and also many directories. Do not execute this file +# unless you know exactly what you are doing. + +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +check_module() { + local name=$1 + shift + + for file in "$@"; do + assert_file_exist "$(readlink -m $ESMODULES/$name/$file)" + done + + assert_file_exist "$(readlink -m $ESMODULES/$name/$name-*.jar)" + assert_file_exist "$(readlink -m $ESMODULES/$name/plugin-descriptor.properties)" +} + +check_secure_module() { + check_module "$@" plugin-security.policy +} \ No newline at end of file diff --git a/qa/vagrant/src/test/resources/packaging/scripts/os_package.bash b/qa/vagrant/src/test/resources/packaging/scripts/os_package.bash index f48532cb3f33..f4b991e81efb 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/os_package.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/os_package.bash @@ -30,6 +30,7 @@ export_elasticsearch_paths() { export ESHOME="/usr/share/elasticsearch" export ESPLUGINS="$ESHOME/plugins" + export ESMODULES="$ESHOME/modules" export ESCONFIG="/etc/elasticsearch" export ESSCRIPTS="$ESCONFIG/scripts" export ESDATA="/var/lib/elasticsearch" @@ -82,6 +83,7 @@ verify_package_installation() { assert_file "$ESDATA" d elasticsearch elasticsearch 755 assert_file "$ESLOG" d elasticsearch elasticsearch 755 assert_file "$ESPLUGINS" d elasticsearch elasticsearch 755 + assert_file "$ESMODULES" d root root 755 assert_file "$ESPIDDIR" d elasticsearch elasticsearch 755 assert_file "$ESHOME/NOTICE.txt" f root root 644 assert_file "$ESHOME/README.textile" f root root 644 diff --git a/qa/vagrant/src/test/resources/packaging/scripts/tar.bash b/qa/vagrant/src/test/resources/packaging/scripts/tar.bash index f9bcc10525b5..56b162cdefee 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/tar.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/tar.bash @@ -68,6 +68,7 @@ move_elasticsearch() { # Export some useful paths. export_elasticsearch_paths() { + export ESMODULES="$ESHOME/modules" export ESPLUGINS="$ESHOME/plugins" export ESCONFIG="$ESHOME/config" export ESSCRIPTS="$ESCONFIG/scripts" From 9e739da1d9b077ecd1e2fb17fab85af5b05aa351 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 16 Mar 2016 19:58:14 -0400 Subject: [PATCH 289/320] Add new plugins --- .../scripts/module_and_plugin_test_cases.bash | 27 +++++++++++++++++++ .../resources/packaging/scripts/plugins.bash | 2 +- 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash index ed5d7d0cea18..59ba4b1d3642 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash @@ -220,6 +220,17 @@ fi install_and_check_plugin discovery ec2 aws-java-sdk-core-*.jar } +@test "[$GROUP] install ingest-attachment plugin" { + # we specify the version on the poi-3.13.jar so that the test does + # not spuriously pass if the jar is missing but the other poi jars + # are present + install_and_check_plugin ingest attachment bcprov-jdk15on-*.jar tika-core-*.jar pdfbox-*.jar poi-3.13.jar +} + +@test "[$GROUP] install ingest-geoip plugin" { + install_and_check_plugin ingest geoip geoip2-*.jar jackson-annotations-*.jar jackson-databind-*.jar maxmind-db-*.jar +} + @test "[$GROUP] check ingest-grok module" { check_module ingest-grok jcodings-*.jar joni-*.jar } @@ -266,6 +277,10 @@ fi check_module reindex } +@test "[$GROUP] install repository-hdfs plugin" { + install_and_check_plugin repository hdfs hadoop-client-*.jar hadoop-common-*.jar hadoop-annotations-*.jar hadoop-auth-*.jar hadoop-hdfs-*.jar htrace-core-*.jar guava-*.jar protobuf-java-*.jar commons-logging-*.jar commons-cli-*.jar commons-collections-*.jar commons-configuration-*.jar commons-io-*.jar commons-lang-*.jar servlet-api-*.jar slf4j-api-*.jar +} + @test "[$GROUP] install size mapper plugin" { install_and_check_plugin mapper size } @@ -340,6 +355,14 @@ fi remove_plugin discovery-ec2 } +@test "[$GROUP] remove ingest-attachment plugin" { + remove_plugin ingest-attachment +} + +@test "[$GROUP] remove ingest-geoip plugin" { + remove_plugin ingest-geoip +} + @test "[$GROUP] remove javascript plugin" { remove_plugin lang-javascript } @@ -364,6 +387,10 @@ fi remove_plugin repository-azure } +@test "[$GROUP] remove repository-hdfs plugin" { + remove_plugin repository-hdfs +} + @test "[$GROUP] remove repository-s3 plugin" { remove_plugin repository-s3 } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash b/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash index 8a32c982af1c..f58c6a924177 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash @@ -32,7 +32,7 @@ install_plugin() { assert_file_exist "$path" - sudo -E -u $ESPLUGIN_COMMAND_USER "$ESHOME/bin/elasticsearch-plugin" install "file://$path" + sudo -E -u $ESPLUGIN_COMMAND_USER "$ESHOME/bin/elasticsearch-plugin" install -batch "file://$path" assert_file_exist "$ESPLUGINS/$name" assert_file_exist "$ESPLUGINS/$name/plugin-descriptor.properties" From 9ed464629671eeae1868c6fbf8781f1761c30a67 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 16 Mar 2016 19:59:58 -0400 Subject: [PATCH 290/320] Plugin site-example no longer exists --- .../packaging/scripts/module_and_plugin_test_cases.bash | 4 ---- 1 file changed, 4 deletions(-) diff --git a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash index 59ba4b1d3642..9e9263a157fd 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash @@ -395,10 +395,6 @@ fi remove_plugin repository-s3 } -@test "[$GROUP] remove site example plugin" { - remove_plugin site-example -} - @test "[$GROUP] remove store-smb plugin" { remove_plugin store-smb } From ffc6ba9e202cb365b9650421a35b379d442fced6 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 16 Mar 2016 20:29:34 -0400 Subject: [PATCH 291/320] Fix installed plugins check --- .../packaging/scripts/module_and_plugin_test_cases.bash | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash index 9e9263a157fd..b654fa990ed7 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash @@ -298,7 +298,7 @@ fi } @test "[$GROUP] check the installed plugins can be listed with 'plugins list' and result matches the list of plugins in plugins pom" { - "$ESHOME/bin/elasticsearch-plugin" list | tail -n +2 | sed 's/^......//' > /tmp/installed + "$ESHOME/bin/elasticsearch-plugin" list > /tmp/installed compare_plugins_list "/tmp/installed" "'plugins list'" } From ebdacb129729d6f8f0eed2f48a1176a0490742b1 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 16 Mar 2016 20:52:10 -0400 Subject: [PATCH 292/320] Fix list of modules and official plugins --- .../org/elasticsearch/plugins/InstallPluginCommand.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index e72eb2100f61..76a05f18ffee 100644 --- a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -97,8 +97,11 @@ class InstallPluginCommand extends Command { // TODO: make this a resource file generated by gradle static final Set MODULES = unmodifiableSet(newHashSet( + "ingest-grok", "lang-expression", - "lang-groovy")); + "lang-groovy", + "lang-painless", + "reindex")); // TODO: make this a resource file generated by gradle static final Set OFFICIAL_PLUGINS = unmodifiableSet(new LinkedHashSet<>(Arrays.asList( @@ -111,8 +114,9 @@ class InstallPluginCommand extends Command { "discovery-azure", "discovery-ec2", "discovery-gce", + "ingest-attachment", + "ingest-geoip", "lang-javascript", - "lang-painless", "lang-python", "mapper-attachments", "mapper-murmur3", From ba6820caeb88a388e4e6c17e97e67a844c97e093 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 16 Mar 2016 23:10:30 -0400 Subject: [PATCH 293/320] Fix plugin file permissions tests --- .../resources/packaging/scripts/plugins.bash | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash b/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash index f58c6a924177..1db2b7a247c1 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash @@ -91,16 +91,13 @@ install_jvm_example() { #just make sure that everything is the same as $CONFIG_DIR, which was properly set up during install config_user=$(find "$ESCONFIG" -maxdepth 0 -printf "%u") config_owner=$(find "$ESCONFIG" -maxdepth 0 -printf "%g") - config_privileges=$(find "$ESCONFIG" -maxdepth 0 -printf "%m") - assert_file "$ESCONFIG/jvm-example" d $config_user $config_owner $config_privileges - #the original file has no execute permissions and that must not change, but all other permissions - #need to be inherited from the parent config dir. We check this by applying the 111 mask to the config dir privileges. - for i in `seq 0 2`; do - current_perm_dir=${config_privileges:$i:1} - final_perm=$(($current_perm_dir & ~1)) - expected_file_privileges+=$final_perm - done - assert_file "$ESCONFIG/jvm-example/example.yaml" f $config_user $config_owner $expected_file_privileges + # directories should user the user file-creation mask + config_privileges=$((0777 & ~$(sudo -E -u $ESPLUGIN_COMMAND_USER sh -c umask) | 0111)) + assert_file "$ESCONFIG/jvm-example" d $config_user $config_owner $(printf "%o" $config_privileges) + # config files should not be executable and otherwise use the user + # file-creation mask + expected_file_privileges=$((0777 & ~$(sudo -E -u $ESPLUGIN_COMMAND_USER sh -c umask) & ~0111)) + assert_file "$ESCONFIG/jvm-example/example.yaml" f $config_user $config_owner $(printf "%o" $expected_file_privileges) echo "Running jvm-example's bin script...." "$ESHOME/bin/jvm-example/test" | grep test From dacb96ba61d6f6a419bfaf60c70db8bf43d5c8da Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 16 Mar 2016 23:37:38 -0400 Subject: [PATCH 294/320] Fix plugin installation logging --- .../org/elasticsearch/plugins/PluginCli.java | 18 +++++++++++++++--- .../scripts/module_and_plugin_test_cases.bash | 18 +++++++++--------- 2 files changed, 24 insertions(+), 12 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginCli.java b/core/src/main/java/org/elasticsearch/plugins/PluginCli.java index 323b872044ef..be06ea7db1c5 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginCli.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginCli.java @@ -19,10 +19,9 @@ package org.elasticsearch.plugins; -import org.apache.log4j.BasicConfigurator; -import org.apache.log4j.varia.NullAppender; import org.elasticsearch.cli.MultiCommand; import org.elasticsearch.cli.Terminal; +import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.node.internal.InternalSettingsPreparer; @@ -40,7 +39,20 @@ public class PluginCli extends MultiCommand { } public static void main(String[] args) throws Exception { - BasicConfigurator.configure(new NullAppender()); + // initialize default for es.logger.level because we will not read the logging.yml + String loggerLevel = System.getProperty("es.logger.level", "INFO"); + // Set the appender for all potential log files to terminal so that other components that use the logger print out the + // same terminal. + // The reason for this is that the plugin cli cannot be configured with a file appender because when the plugin command is + // executed there is no way of knowing where the logfiles should be placed. For example, if elasticsearch + // is run as service then the logs should be at /var/log/elasticsearch but when started from the tar they should be at es.home/logs. + // Therefore we print to Terminal. + Environment loggingEnvironment = InternalSettingsPreparer.prepareEnvironment(Settings.builder() + .put("appender.terminal.type", "terminal") + .put("rootLogger", "${es.logger.level}, terminal") + .put("es.logger.level", loggerLevel) + .build(), Terminal.DEFAULT); + LogConfigurator.configure(loggingEnvironment.settings(), false); Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, Terminal.DEFAULT); exit(new PluginCli(env).main(args, Terminal.DEFAULT)); } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash index b654fa990ed7..e92c4b62b76f 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash @@ -423,14 +423,14 @@ fi local loglines=$(cat /tmp/plugin-cli-output | wc -l) if [ "$GROUP" == "TAR PLUGINS" ]; then # tar extraction does not create the plugins directory so the plugin tool will print an additional line that the directory will be created - [ "$loglines" -eq "7" ] || { - echo "Expected 7 lines but the output was:" + [ "$loglines" -eq "3" ] || { + echo "Expected 3 lines but the output was:" cat /tmp/plugin-cli-output false } else - [ "$loglines" -eq "6" ] || { - echo "Expected 6 lines but the output was:" + [ "$loglines" -eq "2" ] || { + echo "Expected 2 lines but the output was:" cat /tmp/plugin-cli-output false } @@ -438,17 +438,17 @@ fi remove_jvm_example local relativePath=${1:-$(readlink -m jvm-example-*.zip)} - sudo -E -u $ESPLUGIN_COMMAND_USER "$ESHOME/bin/elasticsearch-plugin" install "file://$relativePath" -Ees.logger.level=DEBUG > /tmp/plugin-cli-output + sudo -E -u $ESPLUGIN_COMMAND_USER "$ESHOME/bin/elasticsearch-plugin" install "file://$relativePath" -Des.logger.level=DEBUG > /tmp/plugin-cli-output local loglines=$(cat /tmp/plugin-cli-output | wc -l) if [ "$GROUP" == "TAR PLUGINS" ]; then - [ "$loglines" -gt "7" ] || { - echo "Expected more than 7 lines but the output was:" + [ "$loglines" -gt "3" ] || { + echo "Expected more than 3 lines but the output was:" cat /tmp/plugin-cli-output false } else - [ "$loglines" -gt "6" ] || { - echo "Expected more than 6 lines but the output was:" + [ "$loglines" -gt "2" ] || { + echo "Expected more than 2 lines but the output was:" cat /tmp/plugin-cli-output false } From 9f73152940374bb77a5b3ab7e71b605922031b58 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 17 Mar 2016 15:35:21 -0400 Subject: [PATCH 295/320] Fix plugins permissions --- .../plugins/InstallPluginCommand.java | 24 ++++++++++++++++++- distribution/build.gradle | 16 ++++++------- .../packaging/scripts/os_package.bash | 2 +- 3 files changed, 31 insertions(+), 11 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index 76a05f18ffee..b83ca5c5fc62 100644 --- a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -46,7 +46,9 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.nio.file.attribute.PosixFileAttributeView; +import java.nio.file.attribute.PosixFileAttributes; import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -239,7 +241,15 @@ class InstallPluginCommand extends Command { private Path unzip(Path zip, Path pluginsDir) throws IOException, UserError { // unzip plugin to a staging temp dir - Path target = Files.createTempDirectory(pluginsDir, ".installing-"); + Set perms = new HashSet<>(); + perms.add(PosixFilePermission.OWNER_EXECUTE); + perms.add(PosixFilePermission.OWNER_READ); + perms.add(PosixFilePermission.OWNER_WRITE); + perms.add(PosixFilePermission.GROUP_READ); + perms.add(PosixFilePermission.GROUP_EXECUTE); + perms.add(PosixFilePermission.OTHERS_READ); + perms.add(PosixFilePermission.OTHERS_EXECUTE); + Path target = Files.createTempDirectory(pluginsDir, ".installing-", PosixFilePermissions.asFileAttribute(perms)); Files.createDirectories(target); boolean hasEsDir = false; @@ -428,6 +438,10 @@ class InstallPluginCommand extends Command { // create the plugin's config dir "if necessary" Files.createDirectories(destConfigDir); + final PosixFileAttributes destConfigDirAttributes = + Files.getFileAttributeView(destConfigDir.getParent(), PosixFileAttributeView.class).readAttributes(); + setOwnerGroup(destConfigDir, destConfigDirAttributes); + try (DirectoryStream stream = Files.newDirectoryStream(tmpConfigDir)) { for (Path srcFile : stream) { if (Files.isDirectory(srcFile)) { @@ -437,9 +451,17 @@ class InstallPluginCommand extends Command { Path destFile = destConfigDir.resolve(tmpConfigDir.relativize(srcFile)); if (Files.exists(destFile) == false) { Files.copy(srcFile, destFile); + setOwnerGroup(destFile, destConfigDirAttributes); } } } IOUtils.rm(tmpConfigDir); // clean up what we just copied } + + private static void setOwnerGroup(Path path, PosixFileAttributes attributes) throws IOException { + PosixFileAttributeView fileAttributeView = Files.getFileAttributeView(path, PosixFileAttributeView.class); + fileAttributeView.setOwner(attributes.owner()); + fileAttributeView.setGroup(attributes.group()); + } + } diff --git a/distribution/build.gradle b/distribution/build.gradle index d70f0254f3be..6936f898d957 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -337,21 +337,19 @@ configure(subprojects.findAll { ['deb', 'rpm'].contains(it.name) }) { /** * Suck up all the empty directories that we need to install into the path. */ - Closure suckUpEmptyDirectories = { path -> + Closure suckUpEmptyDirectories = { path, u, g -> into(path) { from "${packagingFiles}/${path}" includeEmptyDirs true createDirectoryEntry true - /* All of these empty directories have this ownership. We're just - lucky! */ - user 'elasticsearch' - permissionGroup 'elasticsearch' + user u + permissionGroup g } } - suckUpEmptyDirectories('/var/run') - suckUpEmptyDirectories('/var/log') - suckUpEmptyDirectories('/var/lib') - suckUpEmptyDirectories('/usr/share/elasticsearch') + suckUpEmptyDirectories('/var/run', 'elasticsearch', 'elasticsearch') + suckUpEmptyDirectories('/var/log', 'elasticsearch', 'elasticsearch') + suckUpEmptyDirectories('/var/lib', 'elasticsearch', 'elasticsearch') + suckUpEmptyDirectories('/usr/share/elasticsearch', 'root', 'root') } } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/os_package.bash b/qa/vagrant/src/test/resources/packaging/scripts/os_package.bash index f4b991e81efb..bcc0fd66f2ee 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/os_package.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/os_package.bash @@ -82,7 +82,7 @@ verify_package_installation() { assert_file "$ESSCRIPTS" d root elasticsearch 750 assert_file "$ESDATA" d elasticsearch elasticsearch 755 assert_file "$ESLOG" d elasticsearch elasticsearch 755 - assert_file "$ESPLUGINS" d elasticsearch elasticsearch 755 + assert_file "$ESPLUGINS" d root root 755 assert_file "$ESMODULES" d root root 755 assert_file "$ESPIDDIR" d elasticsearch elasticsearch 755 assert_file "$ESHOME/NOTICE.txt" f root root 644 From 69ff402b62e3ddcf7acd001218a77f017a80b97e Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 17 Mar 2016 13:35:16 -0700 Subject: [PATCH 296/320] Still create the archive name for core as elasticsearch --- core/build.gradle | 1 + 1 file changed, 1 insertion(+) diff --git a/core/build.gradle b/core/build.gradle index 329f255688ad..3f66db767b59 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -28,6 +28,7 @@ apply plugin: 'nebula.maven-scm' apply plugin: 'nebula.source-jar' apply plugin: 'nebula.javadoc-jar' +archivesBaseName = 'elasticsearch' publishing { publications { nebula { From 6ecfc7adbe795b19824c769e0b9fc42b1bdcd104 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 17 Mar 2016 22:11:59 -0400 Subject: [PATCH 297/320] Permissions tests for modules and plugins --- .../resources/packaging/scripts/modules.bash | 16 ++++++++++--- .../scripts/packaging_test_utils.bash | 24 +++++++++++++++++++ .../resources/packaging/scripts/plugins.bash | 15 ++++++++++-- 3 files changed, 50 insertions(+), 5 deletions(-) diff --git a/qa/vagrant/src/test/resources/packaging/scripts/modules.bash b/qa/vagrant/src/test/resources/packaging/scripts/modules.bash index 047bd38da920..3c2c643cb45c 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/modules.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/modules.bash @@ -29,12 +29,22 @@ check_module() { local name=$1 shift + assert_module_or_plugin_directory "$ESMODULES/$name" + for file in "$@"; do - assert_file_exist "$(readlink -m $ESMODULES/$name/$file)" + assert_module_file "$ESMODULES/$name/$file" done - assert_file_exist "$(readlink -m $ESMODULES/$name/$name-*.jar)" - assert_file_exist "$(readlink -m $ESMODULES/$name/plugin-descriptor.properties)" + assert_module_file "$ESMODULES/$name/$name-*.jar" + assert_module_file "$ESMODULES/$name/plugin-descriptor.properties" +} + +assert_module_file() { + local file=$1 + shift + + assert_file_exist "$(readlink -m $file)" + assert_module_or_plugin_file $file } check_secure_module() { diff --git a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash index 64006483f68b..1077f8f30440 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash @@ -179,6 +179,30 @@ assert_file() { fi } +assert_module_or_plugin_directory() { + local directory=$1 + shift + + #owner group and permissions vary depending on how es was installed + #just make sure that everything is the same as $CONFIG_DIR, which was properly set up during install + config_user=$(find "$ESHOME" -maxdepth 0 -printf "%u") + config_owner=$(find "$ESHOME" -maxdepth 0 -printf "%g") + # directories should use the user file-creation mask + config_privileges=$((0777 & ~$(sudo -E -u $ESPLUGIN_COMMAND_USER sh -c umask) | 0111)) + + assert_file $directory d $config_user $config_owner $(printf "%o" $config_privileges) +} + +assert_module_or_plugin_file() { + local file=$1 + shift + + # config files should not be executable and otherwise use the user + # file-creation mask + expected_file_privileges=$((0777 & ~$(sudo -E -u $ESPLUGIN_COMMAND_USER sh -c umask) & ~0111)) + assert_file $file f $config_user $config_owner $(printf "%o" $expected_file_privileges) +} + assert_output() { echo "$output" | grep -E "$1" } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash b/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash index 1db2b7a247c1..2af5009e3d30 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash @@ -133,14 +133,25 @@ install_and_check_plugin() { fi install_jvm_plugin $fullName "$(readlink -m $fullName-*.zip)" + + assert_module_or_plugin_directory "$ESPLUGINS/$fullName" + if [ $prefix == 'analysis' ]; then - assert_file_exist "$(readlink -m $ESPLUGINS/$fullName/lucene-analyzers-$name-*.jar)" + assert_plugin_file "$ESPLUGINS/$fullName/lucene-analyzers-$name-*.jar" fi for file in "$@"; do - assert_file_exist "$(readlink -m $ESPLUGINS/$fullName/$file)" + assert_plugin_file "$ESPLUGINS/$fullName/$file" done } +assert_plugin_file() { + local file=$1 + shift + + assert_file_exist "$(readlink -m $file)" + assert_module_or_plugin_file $file +} + # Compare a list of plugin names to the plugins in the plugins pom and see if they are the same # $1 the file containing the list of plugins we want to compare to # $2 description of the source of the plugin list From 6bd19cff6729f2e5bfbb7eeee78f8dae2c274ada Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 17 Mar 2016 22:34:33 -0400 Subject: [PATCH 298/320] Centralize existence check for modules and plugins --- qa/vagrant/src/test/resources/packaging/scripts/modules.bash | 1 - .../test/resources/packaging/scripts/packaging_test_utils.bash | 2 ++ qa/vagrant/src/test/resources/packaging/scripts/plugins.bash | 1 - 3 files changed, 2 insertions(+), 2 deletions(-) diff --git a/qa/vagrant/src/test/resources/packaging/scripts/modules.bash b/qa/vagrant/src/test/resources/packaging/scripts/modules.bash index 3c2c643cb45c..f6c39ca48522 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/modules.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/modules.bash @@ -43,7 +43,6 @@ assert_module_file() { local file=$1 shift - assert_file_exist "$(readlink -m $file)" assert_module_or_plugin_file $file } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash index 1077f8f30440..c2c19236f48c 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash @@ -197,6 +197,8 @@ assert_module_or_plugin_file() { local file=$1 shift + assert_file_exist "$(readlink -m $file)" + # config files should not be executable and otherwise use the user # file-creation mask expected_file_privileges=$((0777 & ~$(sudo -E -u $ESPLUGIN_COMMAND_USER sh -c umask) & ~0111)) diff --git a/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash b/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash index 2af5009e3d30..3931c91e7769 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash @@ -148,7 +148,6 @@ assert_plugin_file() { local file=$1 shift - assert_file_exist "$(readlink -m $file)" assert_module_or_plugin_file $file } From 7898522514c1ea532da7d7d0760c586f39c482c5 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 18 Mar 2016 09:51:00 +0100 Subject: [PATCH 299/320] Provide better error message when an incompatible node connects to a node We should give a better exception message when an incompatible node connects and we receive a messeage. This commit adds a clear excpetion based on the protocol version received instead of throwing cryptic messages about not fully reaed buffer etc. Relates to #17090 --- .../elasticsearch/transport/netty/MessageChannelHandler.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java b/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java index 302f8296ad32..9eef44011447 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java @@ -127,6 +127,10 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler { } streamIn = compressor.streamInput(streamIn); } + if (version.onOrAfter(Version.CURRENT.minimumCompatibilityVersion()) == false || version.major != Version.CURRENT.major) { + throw new IllegalStateException("Received message from unsupported version: [" + version + + "] minimal compatible version is: [" +Version.CURRENT.minimumCompatibilityVersion() + "]"); + } streamIn.setVersion(version); if (TransportStatus.isRequest(status)) { threadContext.readHeaders(streamIn); From 1dd2be81c36eb0836d2cdb69b8156a2e8710fa12 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 17 Mar 2016 17:29:10 +0100 Subject: [PATCH 300/320] nested / parent child: Removed `total` score mode in favour of `sum` score mode. Closes #17083 --- .../index/query/HasChildQueryBuilder.java | 2 +- .../index/query/HasChildQueryParser.java | 12 +++++++++++- .../index/query/NestedQueryBuilder.java | 2 +- .../index/query/NestedQueryParser.java | 15 +-------------- .../index/query/HasChildQueryParserTests.java | 7 ++++++- .../migration/migrate_5_0/search.asciidoc | 4 ++-- 6 files changed, 22 insertions(+), 20 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java index 3c5ca1ce4444..cb1c59eab016 100644 --- a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java @@ -189,7 +189,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilderhas_child queries. @@ -104,12 +105,21 @@ public class HasChildQueryParser implements QueryParser { return ScoreMode.Max; } else if ("avg".equals(scoreModeString)) { return ScoreMode.Avg; - } else if ("total".equals(scoreModeString)) { + } else if ("sum".equals(scoreModeString)) { return ScoreMode.Total; } throw new IllegalArgumentException("No score mode for child query [" + scoreModeString + "] found"); } + public static String scoreModeAsString(ScoreMode scoreMode) { + if (scoreMode == ScoreMode.Total) { + // Lucene uses 'total' but 'sum' is more consistent with other elasticsearch APIs + return "sum"; + } else { + return scoreMode.name().toLowerCase(Locale.ROOT); + } + } + @Override public HasChildQueryBuilder getBuilderPrototype() { return HasChildQueryBuilder.PROTOTYPE; diff --git a/core/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index 596c2499211f..bd5f348db33d 100644 --- a/core/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -121,7 +121,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder query.toXContent(builder, params); builder.field(NestedQueryParser.PATH_FIELD.getPreferredName(), path); if (scoreMode != null) { - builder.field(NestedQueryParser.SCORE_MODE_FIELD.getPreferredName(), scoreMode.name().toLowerCase(Locale.ROOT)); + builder.field(NestedQueryParser.SCORE_MODE_FIELD.getPreferredName(), HasChildQueryParser.scoreModeAsString(scoreMode)); } printBoostAndQueryName(builder); if (queryInnerHits != null) { diff --git a/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java index ba5d7c2447e2..218919f7ed22 100644 --- a/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java @@ -68,20 +68,7 @@ public class NestedQueryParser implements QueryParser { } else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) { boost = parser.floatValue(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, SCORE_MODE_FIELD)) { - String sScoreMode = parser.text(); - if ("avg".equals(sScoreMode)) { - scoreMode = ScoreMode.Avg; - } else if ("min".equals(sScoreMode)) { - scoreMode = ScoreMode.Min; - } else if ("max".equals(sScoreMode)) { - scoreMode = ScoreMode.Max; - } else if ("total".equals(sScoreMode) || "sum".equals(sScoreMode)) { - scoreMode = ScoreMode.Total; - } else if ("none".equals(sScoreMode)) { - scoreMode = ScoreMode.None; - } else { - throw new ParsingException(parser.getTokenLocation(), "illegal score_mode for nested query [" + sScoreMode + "]"); - } + scoreMode = HasChildQueryParser.parseScoreMode(parser.text()); } else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) { queryName = parser.text(); } else { diff --git a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryParserTests.java b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryParserTests.java index 7be9a6c74fff..dc8ebc6c4ba1 100644 --- a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryParserTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryParserTests.java @@ -27,22 +27,27 @@ import static org.hamcrest.Matchers.is; public class HasChildQueryParserTests extends ESTestCase { public void testMinFromString() { assertThat("fromString(min) != MIN", ScoreMode.Min, equalTo(HasChildQueryParser.parseScoreMode("min"))); + assertThat("min", equalTo(HasChildQueryParser.scoreModeAsString(ScoreMode.Min))); } public void testMaxFromString() { assertThat("fromString(max) != MAX", ScoreMode.Max, equalTo(HasChildQueryParser.parseScoreMode("max"))); + assertThat("max", equalTo(HasChildQueryParser.scoreModeAsString(ScoreMode.Max))); } public void testAvgFromString() { assertThat("fromString(avg) != AVG", ScoreMode.Avg, equalTo(HasChildQueryParser.parseScoreMode("avg"))); + assertThat("avg", equalTo(HasChildQueryParser.scoreModeAsString(ScoreMode.Avg))); } public void testSumFromString() { - assertThat("fromString(total) != SUM", ScoreMode.Total, equalTo(HasChildQueryParser.parseScoreMode("total"))); + assertThat("fromString(total) != SUM", ScoreMode.Total, equalTo(HasChildQueryParser.parseScoreMode("sum"))); + assertThat("sum", equalTo(HasChildQueryParser.scoreModeAsString(ScoreMode.Total))); } public void testNoneFromString() { assertThat("fromString(none) != NONE", ScoreMode.None, equalTo(HasChildQueryParser.parseScoreMode("none"))); + assertThat("none", equalTo(HasChildQueryParser.scoreModeAsString(ScoreMode.None))); } /** diff --git a/docs/reference/migration/migrate_5_0/search.asciidoc b/docs/reference/migration/migrate_5_0/search.asciidoc index 48807bf187ac..5f1b483cd245 100644 --- a/docs/reference/migration/migrate_5_0/search.asciidoc +++ b/docs/reference/migration/migrate_5_0/search.asciidoc @@ -113,8 +113,8 @@ in favour of `query` and `no_match_query`. * The `collect_payloads` parameter of the `span_near` query has been deprecated. Payloads will be loaded when needed. -* The `score_type` parameter to the `has_child` and `has_parent` queries has been removed in favour of `score_mode`. - Also, the `sum` score mode has been removed in favour of the `total` mode. +* The `score_type` parameter to the `nested`, has_child` and `has_parent` queries has been removed in favour of `score_mode`. + Also, the `total` score mode has been removed in favour of the `sum` mode. * When the `max_children` parameter was set to `0` on the `has_child` query then there was no upper limit on how many child documents were allowed to From 3b17ddcd46fdb86f3da7fa4666bb4cb4f0be8366 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 17 Mar 2016 17:31:39 +0100 Subject: [PATCH 301/320] Removed old 1.x parent/child logic that should have been removed. `0` really means, don't match any child docs. --- .../index/query/HasChildQueryBuilder.java | 7 +--- .../search/child/ChildQuerySearchIT.java | 40 +++++++++---------- .../migration/migrate_5_0/search.asciidoc | 10 ++--- 3 files changed, 26 insertions(+), 31 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java index cb1c59eab016..324552d7b54e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java @@ -257,12 +257,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder Date: Fri, 18 Mar 2016 10:37:44 +0100 Subject: [PATCH 302/320] Revert "Merge pull request #17182 from s1monw/issues/17090" This reverts commit 0fe47f813610e9f13ba45a6539d9ac8dbf8490cc, reversing changes made to 3b17ddcd46fdb86f3da7fa4666bb4cb4f0be8366. --- .../elasticsearch/transport/netty/MessageChannelHandler.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java b/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java index 9eef44011447..302f8296ad32 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java @@ -127,10 +127,6 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler { } streamIn = compressor.streamInput(streamIn); } - if (version.onOrAfter(Version.CURRENT.minimumCompatibilityVersion()) == false || version.major != Version.CURRENT.major) { - throw new IllegalStateException("Received message from unsupported version: [" + version - + "] minimal compatible version is: [" +Version.CURRENT.minimumCompatibilityVersion() + "]"); - } streamIn.setVersion(version); if (TransportStatus.isRequest(status)) { threadContext.readHeaders(streamIn); From 2dffad9ec34713a7b75c065d748951f75276607f Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Fri, 18 Mar 2016 10:44:16 +0100 Subject: [PATCH 303/320] Docs: Display reindex/update by query API and fix build doc issue The documentation existed, but was not linked anywhere. Also fixed the docs to make sure they build with this enabled. --- docs/reference/docs.asciidoc | 4 ++++ docs/reference/docs/reindex.asciidoc | 6 ++---- docs/reference/docs/update-by-query.asciidoc | 11 ++++------- 3 files changed, 10 insertions(+), 11 deletions(-) diff --git a/docs/reference/docs.asciidoc b/docs/reference/docs.asciidoc index a35fa4c4a89d..465d2e60c772 100644 --- a/docs/reference/docs.asciidoc +++ b/docs/reference/docs.asciidoc @@ -29,10 +29,14 @@ include::docs/delete.asciidoc[] include::docs/update.asciidoc[] +include::docs/update-by-query.asciidoc[] + include::docs/multi-get.asciidoc[] include::docs/bulk.asciidoc[] +include::docs/reindex.asciidoc[] + include::docs/termvectors.asciidoc[] include::docs/multi-termvectors.asciidoc[] diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index 1b5483d5ee12..8173503054ff 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -1,5 +1,5 @@ [[docs-reindex]] -==== Reindex API +== Reindex API `_reindex`'s most basic form just copies documents from one index to another. This will copy documents from `twitter` into `new_twitter`: @@ -420,9 +420,7 @@ will finish when their sum is equal to the `total` field. [float] -=== Examples - -==== Change the name of a field +=== Reindex to change the name of a field `_reindex` can be used to build a copy of an index with renamed fields. Say you create an index containing documents that look like this: diff --git a/docs/reference/docs/update-by-query.asciidoc b/docs/reference/docs/update-by-query.asciidoc index c3f57deeaae7..93ad2f698e1f 100644 --- a/docs/reference/docs/update-by-query.asciidoc +++ b/docs/reference/docs/update-by-query.asciidoc @@ -1,5 +1,5 @@ [[docs-update-by-query]] -==== Update By Query API +== Update By Query API The simplest usage of `_update_by_query` just performs an update on every document in the index without changing the source. This is useful to @@ -101,8 +101,8 @@ Just as in {ref}/docs-update.html[Update API] you can set `ctx.op = "noop"` if your script decides that it doesn't have to make any changes. That will cause `_update_by_query` to omit that document from its updates. Setting `ctx.op` to anything else is an error. If you want to delete by a query you can use the -<> instead. Setting any other -field in `ctx` is an error. +{plugins}/delete-by-query.html[Delete by Query plugin] instead. Setting any +other field in `ctx` is an error. Note that we stopped specifying `conflicts=proceed`. In this case we want a version conflict to abort the process so we can handle the failure. @@ -267,11 +267,8 @@ progress by adding the `updated`, `created`, and `deleted` fields. The request will finish when their sum is equal to the `total` field. -[float] -=== Examples - [[picking-up-a-new-property]] -==== Pick up a new property +=== Pick up a new property Say you created an index without dynamic mapping, filled it with data, and then added a mapping value to pick up more fields from the data: From dc21ab75768ac9259ba8bf72d2d878e4e476de5a Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Fri, 18 Mar 2016 10:57:16 +0100 Subject: [PATCH 304/320] Docs: Corrected behaviour of max_token_length in standard tokenizer --- docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc index c8b405bf8207..42dbe5a864ac 100644 --- a/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc @@ -13,6 +13,6 @@ type: |======================================================================= |Setting |Description |`max_token_length` |The maximum token length. If a token is seen that -exceeds this length then it is discarded. Defaults to `255`. +exceeds this length then it is split at `max_token_length` intervals. Defaults to `255`. |======================================================================= From 99321f068f96930de6b3535ce3979efeade452db Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 18 Mar 2016 10:38:16 +0100 Subject: [PATCH 305/320] Revert "Revert "Merge pull request #17182 from s1monw/issues/17090"" This reverts commit b693a520ee3e4622059bf450bf0cad5c2f8d54aa. --- .../transport/netty/MessageChannelHandler.java | 4 ++++ .../transport/AbstractSimpleTransportTestCase.java | 8 +++++--- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java b/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java index 302f8296ad32..9eef44011447 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java @@ -127,6 +127,10 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler { } streamIn = compressor.streamInput(streamIn); } + if (version.onOrAfter(Version.CURRENT.minimumCompatibilityVersion()) == false || version.major != Version.CURRENT.major) { + throw new IllegalStateException("Received message from unsupported version: [" + version + + "] minimal compatible version is: [" +Version.CURRENT.minimumCompatibilityVersion() + "]"); + } streamIn.setVersion(version); if (TransportStatus.isRequest(status)) { threadContext.readHeaders(streamIn); diff --git a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 454fa836b8e9..2a25b86bc83a 100644 --- a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -56,11 +56,11 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { protected ThreadPool threadPool; - protected static final Version version0 = Version.fromId(/*0*/99); + protected static final Version version0 = Version.CURRENT.minimumCompatibilityVersion(); protected DiscoveryNode nodeA; protected MockTransportService serviceA; - protected static final Version version1 = Version.fromId(199); + protected static final Version version1 = Version.fromId(Version.CURRENT.id+1); protected DiscoveryNode nodeB; protected MockTransportService serviceB; @@ -542,12 +542,13 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } public void testTimeoutSendExceptionWithDelayedResponse() throws Exception { + CountDownLatch doneLatch = new CountDownLatch(1); serviceA.registerRequestHandler("sayHelloTimeoutDelayedResponse", StringMessageRequest::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { @Override public void messageReceived(StringMessageRequest request, TransportChannel channel) { TimeValue sleep = TimeValue.parseTimeValue(request.message, null, "sleep"); try { - Thread.sleep(sleep.millis()); + doneLatch.await(sleep.millis(), TimeUnit.MILLISECONDS); } catch (InterruptedException e) { // ignore } @@ -625,6 +626,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } serviceA.removeHandler("sayHelloTimeoutDelayedResponse"); + doneLatch.countDown(); } @TestLogging(value = "test. transport.tracer:TRACE") From d4abfb2a878e28670f5a202078a3fb8139a67e69 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 18 Mar 2016 08:25:21 -0400 Subject: [PATCH 306/320] Centralize umask utilities in bats tests --- .../scripts/packaging_test_utils.bash | 20 +++++++++++++++++-- .../resources/packaging/scripts/plugins.bash | 4 ++-- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash index c2c19236f48c..cb18363d60aa 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash @@ -188,7 +188,7 @@ assert_module_or_plugin_directory() { config_user=$(find "$ESHOME" -maxdepth 0 -printf "%u") config_owner=$(find "$ESHOME" -maxdepth 0 -printf "%g") # directories should use the user file-creation mask - config_privileges=$((0777 & ~$(sudo -E -u $ESPLUGIN_COMMAND_USER sh -c umask) | 0111)) + config_privileges=$(executable_privileges_for_user_from_umask $ESPLUGIN_COMMAND_USER) assert_file $directory d $config_user $config_owner $(printf "%o" $config_privileges) } @@ -201,7 +201,7 @@ assert_module_or_plugin_file() { # config files should not be executable and otherwise use the user # file-creation mask - expected_file_privileges=$((0777 & ~$(sudo -E -u $ESPLUGIN_COMMAND_USER sh -c umask) & ~0111)) + expected_file_privileges=$(file_privileges_for_user_from_umask $ESPLUGIN_COMMAND_USER) assert_file $file f $config_user $config_owner $(printf "%o" $expected_file_privileges) } @@ -473,3 +473,19 @@ install_script() { echo "Installing $script to $ESSCRIPTS" cp $script $ESSCRIPTS } + +# permissions from the user umask with the executable bit set +executable_privileges_for_user_from_umask() { + local user=$1 + shift + + echo $((0777 & ~$(sudo -E -u $user sh -c umask) | 0111)) +} + +# permissions from the user umask without the executable bit set +file_privileges_for_user_from_umask() { + local user=$1 + shift + + echo $((0777 & ~$(sudo -E -u $user sh -c umask) & ~0111)) +} diff --git a/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash b/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash index 3931c91e7769..da6b9a444272 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash @@ -92,11 +92,11 @@ install_jvm_example() { config_user=$(find "$ESCONFIG" -maxdepth 0 -printf "%u") config_owner=$(find "$ESCONFIG" -maxdepth 0 -printf "%g") # directories should user the user file-creation mask - config_privileges=$((0777 & ~$(sudo -E -u $ESPLUGIN_COMMAND_USER sh -c umask) | 0111)) + config_privileges=$(executable_privileges_for_user_from_umask $ESPLUGIN_COMMAND_USER) assert_file "$ESCONFIG/jvm-example" d $config_user $config_owner $(printf "%o" $config_privileges) # config files should not be executable and otherwise use the user # file-creation mask - expected_file_privileges=$((0777 & ~$(sudo -E -u $ESPLUGIN_COMMAND_USER sh -c umask) & ~0111)) + expected_file_privileges=$(file_privileges_for_user_from_umask $ESPLUGIN_COMMAND_USER) assert_file "$ESCONFIG/jvm-example/example.yaml" f $config_user $config_owner $(printf "%o" $expected_file_privileges) echo "Running jvm-example's bin script...." From 4506b7ad822318fa8f83ddcba18dc419cffb2c49 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Fri, 18 Mar 2016 13:26:01 +0100 Subject: [PATCH 307/320] Docs: Fixed bad asciidoc link --- docs/reference/docs/update-by-query.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/docs/update-by-query.asciidoc b/docs/reference/docs/update-by-query.asciidoc index 93ad2f698e1f..13b5f6fc0ebb 100644 --- a/docs/reference/docs/update-by-query.asciidoc +++ b/docs/reference/docs/update-by-query.asciidoc @@ -101,7 +101,7 @@ Just as in {ref}/docs-update.html[Update API] you can set `ctx.op = "noop"` if your script decides that it doesn't have to make any changes. That will cause `_update_by_query` to omit that document from its updates. Setting `ctx.op` to anything else is an error. If you want to delete by a query you can use the -{plugins}/delete-by-query.html[Delete by Query plugin] instead. Setting any +{plugins}/plugins-delete-by-query.html[Delete by Query plugin] instead. Setting any other field in `ctx` is an error. Note that we stopped specifying `conflicts=proceed`. In this case we want a From e07b6a2641fe26b0e843dafbcd833adb973aab0c Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Fri, 18 Mar 2016 14:51:49 +0100 Subject: [PATCH 308/320] Docs: Added 5.0.0-alpha1 release notes --- docs/reference/release-notes.asciidoc | 8 +- .../release-notes/5.0.0-alpha1-2x.asciidoc | 600 +++++++++++++++ .../release-notes/5.0.0-alpha1.asciidoc | 688 ++++++++++++++++++ 3 files changed, 1295 insertions(+), 1 deletion(-) create mode 100644 docs/reference/release-notes/5.0.0-alpha1-2x.asciidoc create mode 100644 docs/reference/release-notes/5.0.0-alpha1.asciidoc diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index 267525b1b3cb..f9391cece063 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -3,5 +3,11 @@ [partintro] -- -This section will summarize the changes in released versions. +This section summarizes the changes in each release. + +* <> +* <> + -- +include::release-notes/5.0.0-alpha1.asciidoc[] +include::release-notes/5.0.0-alpha1-2x.asciidoc[] diff --git a/docs/reference/release-notes/5.0.0-alpha1-2x.asciidoc b/docs/reference/release-notes/5.0.0-alpha1-2x.asciidoc new file mode 100644 index 000000000000..061f3ae4839c --- /dev/null +++ b/docs/reference/release-notes/5.0.0-alpha1-2x.asciidoc @@ -0,0 +1,600 @@ +[[release-notes-5.0.0-alpha1-2x]] +== 5.0.0-alpha1 Release Notes (Changes previously released in 2.x) + +The changes listed below were first released in the 2.x series. Changes +released for the first time in Elasticsearch 5.0.0-alpha1 are listed in +<>. + + + +[[breaking-5.0.0-alpha1-2x]] +[float] +=== Breaking changes + +Allocation:: +* Speed up shard balancer by reusing shard model while moving shards that can no longer be allocated to a node {pull}16926[#16926] + +Index APIs:: +* Change Field stats API response to include both number and string based min and max values {pull}14674[#14674] (issue: {issue}14404[#14404]) +* Add Force Merge API, deprecate Optimize API {pull}13778[#13778] + +Internal:: +* Forbid changing thread pool types {pull}14367[#14367] (issues: {issue}14294[#14294], {issue}2509[#2509], {issue}2858[#2858], {issue}5152[#5152]) + +Logging:: +* Log cluster health status changes {pull}14557[#14557] (issue: {issue}11657[#11657]) + +Mapping:: +* Add per-index setting to limit number of nested fields {pull}15989[#15989] (issue: {issue}14983[#14983]) + +Nested Docs:: +* If sorting by nested field then the `nested_path` should always be specified {pull}13429[#13429] (issue: {issue}13420[#13420]) + +Scripting:: +* Filter classes loaded by scripts {pull}15262[#15262] +* Lock down javascript and python script engines better {pull}13924[#13924] + +Search:: +* Limit the size of the result window to a dynamic property {pull}13188[#13188] (issue: {issue}9311[#9311]) + + + +[[feature-5.0.0-alpha1-2x]] +[float] +=== New features + +Aggregations:: +* Adds geo_centroid metric aggregator {pull}13846[#13846] (issue: {issue}13621[#13621]) +* Add `percentiles_bucket` pipeline aggregation {pull}13186[#13186] +* Add `stats_bucket` / `extended_stats_bucket` pipeline aggs {pull}13128[#13128] + +Geo:: +* Add CONTAINS relation to geo_shape query {pull}14810[#14810] (issue: {issue}14713[#14713]) +* Add support for Lucene 5.4 GeoPoint queries {pull}14537[#14537] +* Add GeoPointV2 Field Mapping {pull}14536[#14536] + +Network:: +* Allow binding to multiple addresses. {pull}13954[#13954] (issue: {issue}13592[#13592]) + +Plugin Analysis Phonetic:: +* Add support for `daitch_mokotoff` {pull}14834[#14834] + +Plugin Repository S3:: +* Add support for S3 storage class {pull}13656[#13656] (issue: {issue}13655[#13655]) + +Plugins:: +* Decentralize plugin security {pull}14108[#14108] + +Search:: +* Add query profiler {pull}14889[#14889] (issues: {issue}12974[#12974], {issue}6699[#6699]) + + + +[[enhancement-5.0.0-alpha1-2x]] +[float] +=== Enhancements + +Aliases:: +* Add support to _aliases endpoint to specify multiple indices and aliases in one action {pull}15305[#15305] (issue: {issue}15186[#15186]) + +Allocation:: +* Skip capturing least/most FS info for an FS with no total {pull}16001[#16001] (issue: {issue}15919[#15919]) +* Speed improvements for BalancedShardsAllocator {pull}15678[#15678] (issue: {issue}6372[#6372]) +* Simplify delayed shard allocation {pull}14808[#14808] +* Add cluster-wide setting for total shard limit {pull}14563[#14563] (issue: {issue}14456[#14456]) +* Early terminate high disk watermark checks on single data node cluster {pull}13882[#13882] (issue: {issue}9391[#9391]) +* Also use PriorityComparator in shard balancer {pull}13256[#13256] (issue: {issue}13249[#13249]) +* Add support for filtering by publish IP address {pull}8801[#8801] + +Analysis:: +* Add detail response support for _analyze API {pull}11660[#11660] (issue: {issue}11076[#11076]) + +CAT API:: +* Add sync_id to cat shards API {pull}14712[#14712] (issue: {issue}14705[#14705]) +* Add duration field to /_cat/snapshots {pull}14385[#14385] +* Add cat API for repositories and snapshots {pull}14247[#14247] (issue: {issue}13919[#13919]) +* Adds disk used by indices to _cat/allocation {pull}13783[#13783] (issue: {issue}13529[#13529]) + +CRUD:: + +Cluster:: +* Shard state action request logging {pull}16396[#16396] +* Safe cluster state task notifications {pull}15777[#15777] +* Reroute once per batch of shard failures {pull}15510[#15510] +* Add callback for publication of new cluster state {pull}15494[#15494] (issue: {issue}15482[#15482]) +* Use general cluster state batching mechanism for shard started {pull}15023[#15023] (issues: {issue}14725[#14725], {issue}14899[#14899]) +* Use general cluster state batching mechanism for shard failures {pull}15016[#15016] (issues: {issue}14725[#14725], {issue}14899[#14899]) +* Set an newly created IndexShard's ShardRouting before exposing it to operations {pull}14918[#14918] (issue: {issue}10708[#10708]) +* Uniform exceptions for TransportMasterNodeAction {pull}14737[#14737] + +Core:: +* Remove log4j exception hiding {pull}16834[#16834] +* Avoid cloning MessageDigest instances {pull}16479[#16479] +* Add a hard check to ensure we are running with the expected lucene version {pull}16305[#16305] (issue: {issue}16301[#16301]) +* If we can't get a MAC address for the node, use a dummy one {pull}15266[#15266] (issue: {issue}10099[#10099]) +* Simplify shard inactive logging {pull}15259[#15259] (issue: {issue}15252[#15252]) +* Simplify IndexingMemoryController#checkIdle {pull}15252[#15252] (issue: {issue}15251[#15251]) +* IndexingMemoryController should not track shard index states {pull}15251[#15251] (issues: {issue}13918[#13918], {issue}15225[#15225]) +* Verify Checksum once it has been fully written to fail as soon as possible {pull}13896[#13896] + +Discovery:: +* Don't allow nodes with missing custom meta data to join cluster {pull}15401[#15401] (issue: {issue}13445[#13445]) + +Exceptions:: +* Added file name to exceptions when failing to read index state {pull}16850[#16850] (issue: {issue}16713[#16713]) +* Add Exception class name to message in `NotSerializableExceptionWrapper` {pull}16325[#16325] +* Deduplicate cause if already contained in shard failures {pull}14432[#14432] +* Give a better exception when running from freebsd jail without enforce_statfs=1 {pull}14135[#14135] (issue: {issue}12018[#12018]) +* Make root_cause of field conflicts more obvious {pull}13976[#13976] (issue: {issue}12839[#12839]) +* Use a dedicated id to serialize EsExceptions instead of it's class name. {pull}13629[#13629] + +Fielddata:: +* Update GeoPoint FieldData for GeoPointV2 {pull}14345[#14345] + +Geo:: +* Upgrade GeoPointField to use Lucene 5.5 PrefixEncoding {pull}16482[#16482] +* Geo: Fix toString() in GeoDistanceRangeQuery and GeoPolygonQuery {pull}15026[#15026] +* Enable GeoPointV2 with backward compatibility testing {pull}14667[#14667] (issues: {issue}10761[#10761], {issue}11159[#11159], {issue}9859[#9859]) +* Refactor Geo utilities to Lucene 5.4 {pull}14339[#14339] + +Index APIs:: +* Add option to disable closing indices {pull}14169[#14169] (issue: {issue}14168[#14168]) + +Index Templates:: +* Disallow index template pattern to be the same as an alias name {pull}15184[#15184] (issue: {issue}14842[#14842]) + +Internal:: +* Cleanup search sub transport actions and collapse o.e.action.search.type package into o.e.action.search {pull}16758[#16758] (issue: {issue}11710[#11710]) +* Simplify the Text API. {pull}15511[#15511] +* Simpler using compressed oops flag representation {pull}15509[#15509] (issue: {issue}15489[#15489]) +* Info on compressed ordinary object pointers {pull}15489[#15489] (issues: {issue}13187[#13187], {issue}455[#455]) +* Explicitly log cluster state update failures {pull}15428[#15428] (issues: {issue}14899[#14899], {issue}15016[#15016], {issue}15023[#15023]) +* Use transport service to handle RetryOnReplicaException to execute replica action on the current node {pull}15363[#15363] +* Make IndexShard operation be more explicit about whether they are expected to run on a primary or replica {pull}15282[#15282] +* Avoid trace logging allocations in TransportBroadcastByNodeAction {pull}15221[#15221] +* Only trace log shard not available exceptions {pull}14950[#14950] (issue: {issue}14927[#14927]) +* Transport options should be immutable {pull}14760[#14760] +* Fix dangling comma in ClusterBlock#toString {pull}14483[#14483] +* Improve some logging around master election and cluster state {pull}14481[#14481] +* Add System#exit(), Runtime#exit() and Runtime#halt() to forbidden APIs {pull}14473[#14473] (issue: {issue}12596[#12596]) +* Simplify XContent detection. {pull}14472[#14472] +* Add threadgroup isolation. {pull}14353[#14353] +* Cleanup plugin security {pull}14311[#14311] +* Add workaround for JDK-8014008 {pull}14274[#14274] +* Refactor retry logic for TransportMasterNodeAction {pull}14222[#14222] +* Remove MetaDataSerivce and it's semaphores {pull}14159[#14159] (issue: {issue}1296[#1296]) +* Cleanup IndexMetaData {pull}14119[#14119] +* Add SpecialPermission to guard exceptions to security policy. {pull}13854[#13854] +* Clean up scripting permissions. {pull}13844[#13844] +* Factor groovy out of core into lang-groovy {pull}13834[#13834] (issue: {issue}13725[#13725]) +* More helpful error message on parameter order {pull}13737[#13737] +* Factor expressions scripts out to lang-expression plugin {pull}13726[#13726] (issue: {issue}13725[#13725]) +* Cleanup InternalClusterInfoService {pull}13543[#13543] +* Remove and forbid use of com.google.common.base.Throwables {pull}13409[#13409] (issue: {issue}13224[#13224]) +* Remove cyclic dependencies between IndexService and FieldData / BitSet caches {pull}13381[#13381] +* Remove and forbid use of com.google.common.base.Objects {pull}13355[#13355] (issue: {issue}13224[#13224]) +* Enable indy (invokedynamic) compile flag for Groovy scripts by default {pull}8201[#8201] (issue: {issue}8184[#8184]) + +Java API:: +* Align handling of interrupts in BulkProcessor {pull}15527[#15527] (issue: {issue}14833[#14833]) +* BulkProcessor backs off exponentially by default {pull}15513[#15513] (issue: {issue}14829[#14829]) +* Reject refresh usage in bulk items when using and fix NPE when no source {pull}15082[#15082] (issue: {issue}7361[#7361]) +* BulkProcessor retries after request handling has been rejected due to a full thread pool {pull}14829[#14829] (issue: {issue}14620[#14620]) + +Logging:: +* Log suppressed stack traces under DEBUG {pull}16627[#16627] (issues: {issue}12991[#12991], {issue}15329[#15329], {issue}16622[#16622]) +* Add circuit breaker name to logging package {pull}14661[#14661] +* Move logging for the amount of free disk to TRACE {pull}14403[#14403] (issue: {issue}12843[#12843]) +* Map log-level 'trace' to JDK-Level 'FINEST' {pull}14234[#14234] + +Mapping:: +* Expose the reason why a mapping merge is issued. {pull}16059[#16059] (issue: {issue}15989[#15989]) +* Add sub-fields support to `bool` fields. {pull}15636[#15636] (issue: {issue}6587[#6587]) +* Improve cross-type dynamic mapping updates. {pull}15633[#15633] (issue: {issue}15568[#15568]) +* Make mapping updates more robust. {pull}15539[#15539] +* Make mapping serialization more robust. {pull}15480[#15480] +* Make mappings immutable. {pull}15313[#15313] (issue: {issue}9365[#9365]) +* Make MappedFieldType.checkTypeName part of MappedFieldType.checkCompatibility. {pull}15245[#15245] +* Register field mappers at the node level. {pull}14896[#14896] (issue: {issue}14828[#14828]) + +Network:: +* Provide better error message when an incompatible node connects to a node {pull}17182[#17182] (issue: {issue}17090[#17090]) +* Add additional fallback to http.publish_port and restrict fallback to transport.publish_port {pull}16626[#16626] (issue: {issue}14535[#14535]) +* only allow code to bind to the user's configured port numbers/ranges {pull}14549[#14549] +* Port of publishAddress should match port of corresponding boundAddress {pull}14535[#14535] (issues: {issue}14503[#14503], {issue}14513[#14513], {issue}14514[#14514]) + +Packaging:: +* Windows service: Use JAVA_HOME environment variable in registry {pull}16552[#16552] (issue: {issue}13521[#13521]) +* Default standard output to the journal in systemd {pull}16159[#16159] (issues: {issue}15315[#15315], {issue}16134[#16134]) +* Use egrep instead of grep -E for Solaris {pull}15755[#15755] (issue: {issue}15628[#15628]) +* punch thru symlinks when loading plugins/modules {pull}15311[#15311] +* set ActiveProcessLimit=1 on windows {pull}15055[#15055] +* set RLIMIT_NPROC = 0 on bsd/os X systems. {pull}15039[#15039] +* Drop ability to execute on Solaris {pull}14200[#14200] +* Nuke ES_CLASSPATH appending, JarHell fail on empty classpath elements {pull}13880[#13880] (issues: {issue}13812[#13812], {issue}13864[#13864]) +* improve seccomp syscall filtering {pull}13829[#13829] +* Block process execution with seccomp on linux/amd64 {pull}13753[#13753] +* Get lang-javascript, lang-python, securemock ready for script refactoring {pull}13695[#13695] +* Remove some bogus permissions only needed for tests. {pull}13620[#13620] +* Remove java.lang.reflect.ReflectPermission "suppressAccessChecks" {pull}13603[#13603] +* Remove JAVA_HOME detection from the debian init script {pull}13514[#13514] (issues: {issue}13403[#13403], {issue}9774[#9774]) + +Plugin Cloud GCE:: +* cloud-gce plugin should check `discovery.type` {pull}13809[#13809] (issue: {issue}13614[#13614]) +* Adding backoff from retries on GCE errors {pull}13671[#13671] (issue: {issue}13460[#13460]) + +Plugin Discovery EC2:: +* Add ap-northeast-2 (seoul) endpoints for EC2 discovery and S3 snapshots {pull}16167[#16167] (issue: {issue}16166[#16166]) +* Adding US-Gov-West {pull}14358[#14358] +* Improved building of disco nodes {pull}14155[#14155] + +Plugin Ingest Attachment:: +* Fix attachments plugins with docx {pull}17059[#17059] (issue: {issue}16864[#16864]) + +Plugin Repository Azure:: +* Add support for secondary azure storage account {pull}13779[#13779] (issue: {issue}13228[#13228]) + +Plugin Repository S3:: +* Add aws canned acl {pull}14297[#14297] (issue: {issue}14103[#14103]) +* Enable S3SignerType {pull}13360[#13360] (issue: {issue}13332[#13332]) + +Plugins:: +* Expose http.type setting, and collapse al(most all) modules relating to transport/http {pull}15434[#15434] (issue: {issue}14148[#14148]) +* Ban RuntimePermission("getClassLoader") {pull}15253[#15253] +* Add nicer error message when a plugin descriptor is missing {pull}15200[#15200] (issue: {issue}15197[#15197]) +* Don't be lenient in PluginService#processModule(Module) {pull}14306[#14306] +* Adds a validation for plugins script to check if java is set {pull}13633[#13633] (issue: {issue}13613[#13613]) +* Output plugin info only in verbose mode {pull}12908[#12908] (issue: {issue}12907[#12907]) + +Query DSL:: +* Allow CIDR notation in query string query {pull}14773[#14773] (issue: {issue}7464[#7464]) +* Internal: simplify filtered query conversion to lucene query {pull}13312[#13312] (issue: {issue}13272[#13272]) + +REST:: +* Make XContentGenerator.writeRaw* safer. {pull}15358[#15358] +* Filter path refactoring {pull}14390[#14390] (issues: {issue}10980[#10980], {issue}11560[#11560], {issue}13344[#13344]) + +Recovery:: +* Handle cancel exceptions on recovery target if the cancel comes from the source {pull}15309[#15309] +* Decouple routing and primary operation logic in TransportReplicationAction {pull}14852[#14852] + +Reindex API:: +* Implement helpful interfaces in reindex requests {pull}17032[#17032] +* Reindex should timeout if sub-requests timeout {pull}16962[#16962] +* Teach reindex to retry on rejection {pull}16556[#16556] (issue: {issue}16093[#16093]) + +Scripting:: +* Remove suppressAccessChecks permission for Groovy script plugin {pull}16839[#16839] (issue: {issue}16527[#16527]) +* Class permission for Groovy references {pull}16660[#16660] (issue: {issue}16657[#16657]) +* Scripting: Allow to get size of array in mustache {pull}16193[#16193] +* Enhancements to the mustache script engine {pull}15661[#15661] +* Add property permissions so groovy scripts can serialize json {pull}14500[#14500] (issue: {issue}14488[#14488]) +* Remove ScriptEngineService.unwrap. {pull}13958[#13958] +* Remove ScriptEngineService.execute. {pull}13956[#13956] + +Search:: +* Caching Weight wrappers should propagate the BulkScorer. {pull}14317[#14317] +* fix numerical issue in function score query {pull}14085[#14085] +* Optimize scrolls for constant-score queries. {pull}13311[#13311] + +Settings:: +* Log warning if max file descriptors too low {pull}16506[#16506] + +Snapshot/Restore:: +* Support wildcards for getting repositories and snapshots {pull}15151[#15151] (issue: {issue}4758[#4758]) +* Add ignore_unavailable parameter to skip unavailable snapshot {pull}14471[#14471] (issue: {issue}13887[#13887]) +* Simplify the BlobContainer blob writing interface {pull}13434[#13434] + +Stats:: +* Pull Fields instance once from LeafReader in completion stats {pull}15090[#15090] (issue: {issue}6593[#6593]) +* Add os.allocated_processors stats {pull}14409[#14409] (issue: {issue}13917[#13917]) +* Adds stats counter for failed indexing requests {pull}13130[#13130] (issue: {issue}8938[#8938]) + +Top Hits:: +* Put method addField on TopHitsBuilder {pull}14597[#14597] (issue: {issue}12962[#12962]) + +Translog:: +* Check for tragic event on all kinds of exceptions not only ACE and IOException {pull}15535[#15535] + +Tribe Node:: +* Tribe nodes should apply cluster state updates in batches {pull}14993[#14993] (issues: {issue}14725[#14725], {issue}14899[#14899]) + + + +[[bug-5.0.0-alpha1-2x]] +[float] +=== Bug fixes + +Aggregations:: +* Build empty extended stats aggregation if no docs collected for bucket {pull}16972[#16972] (issues: {issue}16812[#16812], {issue}9544[#9544]) +* Set meta data for pipeline aggregations {pull}16516[#16516] (issue: {issue}16484[#16484]) +* Filter(s) aggregation should create weights only once. {pull}15998[#15998] +* Make `missing` on terms aggs work with all execution modes. {pull}15746[#15746] (issue: {issue}14882[#14882]) +* Run pipeline aggregations for empty buckets added in the Range Aggregation {pull}15519[#15519] (issue: {issue}15471[#15471]) +* [Children agg] fix bug that prevented all child docs from being evaluated {pull}15457[#15457] +* Correct typo in class name of StatsAggregator {pull}15321[#15321] (issue: {issue}14730[#14730]) +* Fix significant terms reduce for long terms {pull}14948[#14948] (issue: {issue}13522[#13522]) +* Fix NPE in Derivative Pipeline when current bucket value is null {pull}14745[#14745] +* Pass extended bounds into HistogramAggregator when creating an unmapped aggregator {pull}14742[#14742] (issue: {issue}14735[#14735]) +* Added correct generic type parameter on ScriptedMetricBuilder {pull}14018[#14018] (issue: {issue}13986[#13986]) +* Pipeline Aggregations at the root of the agg tree are now validated {pull}13475[#13475] (issue: {issue}13179[#13179]) + +Aliases:: +* Fix _aliases filter and null parameters {pull}16553[#16553] (issues: {issue}16547[#16547], {issue}16549[#16549]) + +Allocation:: +* IndicesStore checks for `allocated elsewhere` for every shard not alocated on the local node {pull}17106[#17106] +* Prevent peer recovery from node with older version {pull}15775[#15775] +* Fix calculation of next delay for delayed shard allocation {pull}14765[#14765] +* Take ignored unallocated shards into account when making allocation decision {pull}14678[#14678] (issue: {issue}14670[#14670]) +* Only allow rebalance operations to run if all shard store data is available {pull}14591[#14591] (issue: {issue}14387[#14387]) +* Delayed allocation can miss a reroute {pull}14494[#14494] (issues: {issue}14010[#14010], {issue}14011[#14011], {issue}14445[#14445]) +* Check rebalancing constraints when shards are moved from a node they can no longer remain on {pull}14259[#14259] (issue: {issue}14057[#14057]) + +Analysis:: +* Analysis : Allow string explain param in JSON {pull}16977[#16977] (issue: {issue}16925[#16925]) +* Analysis : Fix no response from Analyze API without specified index {pull}15447[#15447] (issue: {issue}15148[#15148]) + +Bulk:: +* Bulk api: fail deletes when routing is required but not specified {pull}16675[#16675] (issues: {issue}10136[#10136], {issue}16645[#16645]) +* Do not release unacquired semaphore {pull}14909[#14909] (issue: {issue}14908[#14908]) + +CAT API:: +* Properly set indices and indicesOptions on subrequest made by /_cat/indices {pull}14360[#14360] + +CRUD:: +* Throw exception if content type could not be determined in Update API {pull}15904[#15904] (issue: {issue}15822[#15822]) +* Index name expressions should not be broken up {pull}13691[#13691] (issue: {issue}13665[#13665]) + +Cache:: +* Handle closed readers in ShardCoreKeyMap {pull}16027[#16027] + +Cluster:: +* Index deletes not applied when cluster UUID has changed {pull}16825[#16825] (issue: {issue}11665[#11665]) +* Only fail the relocation target when a replication request on it fails {pull}15791[#15791] (issue: {issue}15790[#15790]) +* Handle shards assigned to nodes that are not in the cluster state {pull}14586[#14586] (issue: {issue}14584[#14584]) +* Bulk cluster state updates on index deletion {pull}11258[#11258] (issue: {issue}7295[#7295]) + +Core:: +* BitSetFilterCache duplicates its content. {pull}15836[#15836] (issue: {issue}15820[#15820]) +* Limit the max size of bulk and index thread pools to bounded number of processors {pull}15585[#15585] (issue: {issue}15582[#15582]) +* AllTermQuery's scorer should skip segments that never saw the requested term {pull}15506[#15506] +* Include root-cause exception when we fail to change shard's index buffer {pull}14867[#14867] +* Restore thread interrupt flag after an InterruptedException {pull}14799[#14799] (issue: {issue}14798[#14798]) +* Record all bytes of the checksum in VerifyingIndexOutput {pull}13923[#13923] (issues: {issue}13848[#13848], {issue}13896[#13896]) +* When shard becomes active again, immediately increase its indexing buffer {pull}13918[#13918] (issue: {issue}13802[#13802]) +* Close TokenStream in finally clause {pull}13870[#13870] (issue: {issue}11947[#11947]) +* LoggingRunnable.run should catch and log all errors, not just Exception? {pull}13718[#13718] (issue: {issue}13487[#13487]) + +Exceptions:: +* Fix ensureNodesAreAvailable's error message {pull}14007[#14007] (issue: {issue}13957[#13957]) + +Expressions:: +* Check that _value is used in aggregations script before setting value to specialValue {pull}17091[#17091] (issue: {issue}14262[#14262]) + +Fielddata:: +* Don't cache top level field data for fields that don't exist {pull}14693[#14693] + +Geo:: +* Remove .geohash suffix from GeoDistanceQuery and GeoDistanceRangeQuery {pull}15871[#15871] (issue: {issue}15179[#15179]) +* Geo: Allow numeric parameters enclosed in quotes for 'geohash_grid' aggregation {pull}14440[#14440] (issue: {issue}13132[#13132]) +* Resync Geopoint hashCode/equals method {pull}14124[#14124] (issue: {issue}14083[#14083]) +* Fix GeoPointFieldMapper to index geohash at correct precision. {pull}13649[#13649] (issue: {issue}12467[#12467]) + +Highlighting:: +* Don't override originalQuery with request filters {pull}15793[#15793] (issue: {issue}15689[#15689]) +* Fix spans extraction to not also include individual terms. {pull}15516[#15516] (issues: {issue}13239[#13239], {issue}15291[#15291]) + +Index APIs:: +* Field stats: Index constraints should remove indices in the response if the field to evaluate is empty {pull}14868[#14868] +* Field stats: Fix NPE for index constraint on empty index {pull}14841[#14841] +* Field stats: Added `format` option for index constraints {pull}14823[#14823] (issue: {issue}14804[#14804]) +* Forbid index name `.` and `..` {pull}13862[#13862] (issue: {issue}13858[#13858]) + +Inner Hits:: +* Query and top level inner hit definitions shouldn't overwrite each other {pull}16222[#16222] (issue: {issue}16218[#16218]) + +Internal:: +* Log uncaught exceptions from scheduled once tasks {pull}15824[#15824] (issue: {issue}15814[#15814]) +* FunctionScoreQuery should implement two-phase iteration. {pull}15602[#15602] +* Make sure the remaining delay of unassigned shard is updated with every reroute {pull}14890[#14890] (issue: {issue}14808[#14808]) +* Throw a meaningful error when loading metadata and an alias and index have the same name {pull}14842[#14842] (issue: {issue}14706[#14706]) +* fixup issues with 32-bit jvm {pull}14609[#14609] +* Failure to update the cluster state with the recovered state should make sure it will be recovered later {pull}14485[#14485] +* Gateway: a race condition can prevent the initial cluster state from being recovered {pull}13997[#13997] +* Verify actually written checksum in VerifyingIndexOutput {pull}13848[#13848] +* An inactive shard is activated by triggered synced flush {pull}13802[#13802] +* Remove all setAccessible in tests and forbid {pull}13539[#13539] +* Remove easy uses of setAccessible in tests. {pull}13537[#13537] +* Ban setAccessible from core code, restore monitoring stats under java 9 {pull}13531[#13531] (issue: {issue}13527[#13527]) + +Logging:: +* Add missing index name to indexing slow log {pull}17026[#17026] (issue: {issue}17025[#17025]) +* ParseFieldMatcher should log when using deprecated settings. {pull}16988[#16988] +* Don't log multi-megabyte guice exceptions. {pull}13782[#13782] +* Moving system property setting to before it can be used {pull}13660[#13660] (issue: {issue}13658[#13658]) + +Mapping:: +* Put mapping operations must update metadata of all types. {pull}16264[#16264] (issue: {issue}16239[#16239]) +* Fix serialization of `search_analyzer`. {pull}16255[#16255] +* Reuse metadata mappers for dynamic updates. {pull}16023[#16023] (issue: {issue}15997[#15997]) +* Fix MapperService#searchFilter(...) {pull}15923[#15923] (issue: {issue}15757[#15757]) +* Fix initial sizing of BytesStreamOutput. {pull}15864[#15864] (issue: {issue}15789[#15789]) +* MetaDataMappingService should call MapperService.merge with the original mapping update. {pull}15508[#15508] +* MapperService: check index.mapper.dynamic during index creation {pull}15424[#15424] (issue: {issue}15381[#15381]) +* Only text fields should accept analyzer and term vector settings. {pull}15308[#15308] +* Mapper parsers should not check for a `tokenized` property. {pull}15289[#15289] +* Validate that fields are defined only once. {pull}15243[#15243] (issue: {issue}15057[#15057]) +* Check mapping compatibility up-front. {pull}15175[#15175] (issue: {issue}15049[#15049]) +* Don't treat _default_ as a regular type. {pull}15156[#15156] (issue: {issue}15049[#15049]) +* Don't ignore mapping merge failures. {pull}15144[#15144] (issue: {issue}15049[#15049]) +* Treat mappings at an index-level feature. {pull}15142[#15142] +* Make _type use doc values {pull}14783[#14783] (issue: {issue}14781[#14781]) + +Network:: +* Only accept transport requests after node is fully initialized {pull}16746[#16746] (issue: {issue}16723[#16723]) + +Packaging:: +* Fix waiting for pidfile {pull}16718[#16718] (issue: {issue}16717[#16717]) +* Fix Windows service installation failure {pull}15549[#15549] (issue: {issue}15349[#15349]) +* Enable es_include at init {pull}15173[#15173] +* Handle system policy correctly {pull}14704[#14704] (issue: {issue}14690[#14690]) +* Startup script exit status should catch daemonized startup failures {pull}14170[#14170] (issue: {issue}14163[#14163]) +* Don't let ubuntu try to install its crazy jayatana agent. {pull}13813[#13813] (issue: {issue}13785[#13785]) + +Parent/Child:: +* Check that parent_type in Has Parent Query has child types {pull}16923[#16923] (issue: {issue}16692[#16692]) +* Has child query forces default similarity {pull}16611[#16611] (issues: {issue}16550[#16550], {issue}4977[#4977]) + +Percolator:: +* Don't replace found fields if map unmapped fields as string is enabled {pull}16043[#16043] (issue: {issue}10500[#10500]) +* mpercolate api should serialise start time {pull}15938[#15938] (issue: {issue}15908[#15908]) + +Plugin Delete By Query:: +* Fix Delete-by-Query with Shield {pull}14658[#14658] (issue: {issue}14527[#14527]) + +Plugin Discovery GCE:: +* Add setFactory permission to GceDiscoveryPlugin {pull}16860[#16860] (issue: {issue}16485[#16485]) + +Plugin Mapper Attachment:: +* Fix toXContent() for mapper attachments field {pull}15110[#15110] + +Plugin Repository Azure:: + +Plugin Repository S3:: +* Hack around aws security hole of accessing sun.security.ssl, s3 repository works on java 9 again {pull}13538[#13538] (issue: {issue}432[#432]) + +Plugins:: +* Fix plugin list command error message {pull}14288[#14288] (issue: {issue}14287[#14287]) +* Fix HTML response during redirection {pull}11374[#11374] (issue: {issue}11370[#11370]) + +Query DSL:: +* Fix FunctionScore equals/hashCode to include minScore and friends {pull}15676[#15676] +* Min should match greater than the number of optional clauses should return no result {pull}15571[#15571] (issue: {issue}15521[#15521]) +* Return a better exception message when `regexp` query is used on a numeric field {pull}14910[#14910] (issue: {issue}14782[#14782]) + +REST:: +* Remove detect_noop from REST spec {pull}16386[#16386] +* Make text parsing less lenient. {pull}15679[#15679] +* Throw exception when trying to write map with null keys {pull}15479[#15479] (issue: {issue}14346[#14346]) +* Fix OOM in AbstractXContentParser {pull}15350[#15350] (issue: {issue}15338[#15338]) +* XContentFactory.xContentType: allow for possible UTF-8 BOM for JSON XContentType {pull}14611[#14611] (issue: {issue}14442[#14442]) +* RestUtils.decodeQueryString ignores the URI fragment when parsing a query string {pull}13365[#13365] (issue: {issue}13320[#13320]) + +Recovery:: +* Try to renew sync ID if `flush=true` on forceMerge {pull}17108[#17108] (issue: {issue}17019[#17019]) +* CancellableThreads should also treat ThreadInterruptedException as InterruptedException {pull}15318[#15318] + +Reindex API:: +* Properly register reindex status {pull}17125[#17125] +* Make search failure cause rest failure {pull}16889[#16889] (issue: {issue}16037[#16037]) + +Scripting:: +* Add permission to access sun.reflect.MethodAccessorImpl from Groovy scripts {pull}16540[#16540] (issue: {issue}16536[#16536]) +* Security permissions for Groovy closures {pull}16196[#16196] (issues: {issue}16194[#16194], {issue}248[#248]) + +Search:: +* Do not apply minimum_should_match on auto generated boolean query if the coordination factor is disabled. {pull}16155[#16155] +* Do not apply minimum-should-match on a boolean query if the coords are disabled {pull}16078[#16078] (issue: {issue}15858[#15858]) +* Fix blended terms take 2 {pull}15894[#15894] (issue: {issue}15860[#15860]) +* Fix NPE when a segment with an empty cache gets closed. {pull}15202[#15202] (issue: {issue}15043[#15043]) +* Fix the quotes in the explain message for a script score function without parameters {pull}11398[#11398] + +Settings:: +* TransportClient should use updated setting for initialization of modules and service {pull}16095[#16095] +* ByteSizeValue.equals should normalize units {pull}13784[#13784] + +Snapshot/Restore:: +* Prevent closing index during snapshot restore {pull}16933[#16933] (issue: {issue}16321[#16321]) +* Add node version check to shard allocation during restore {pull}16520[#16520] (issue: {issue}16519[#16519]) +* Snapshot restore and index creates should keep index settings and cluster blocks in sync {pull}13931[#13931] (issue: {issue}13213[#13213]) +* Fix blob size in writeBlob() method {pull}13574[#13574] (issue: {issue}13434[#13434]) + +Stats:: +* Fix recovery translog stats totals when recovering from store {pull}16493[#16493] (issue: {issue}15974[#15974]) +* Fix calculation of age of pending tasks {pull}15995[#15995] (issue: {issue}15988[#15988]) +* Add extra validation into `cluster/stats` {pull}14699[#14699] (issue: {issue}7390[#7390]) +* Omit current* stats for OldShardStats {pull}13801[#13801] (issue: {issue}13386[#13386]) + +Translog:: +* Never delete translog-N.tlog file when creation fails {pull}15788[#15788] +* Close recovered translog readers if createWriter fails {pull}15762[#15762] (issue: {issue}15754[#15754]) +* Fail and close translog hard if writing to disk fails {pull}15420[#15420] (issue: {issue}15333[#15333]) +* Prevent writing to closed channel if translog is already closed {pull}15012[#15012] (issue: {issue}14866[#14866]) +* Don't delete temp recovered checkpoint file if it was renamed {pull}14872[#14872] (issue: {issue}14695[#14695]) +* Translog recovery can repeatedly fail if we run out of disk {pull}14695[#14695] +* Pending operations in the translog prevent shard from being marked as inactive {pull}13759[#13759] (issue: {issue}13707[#13707]) + +Tribe Node:: +* Passthrough environment and network settings to tribe client nodes {pull}16893[#16893] +* Tribe node: pass path.conf to inner tribe clients {pull}16258[#16258] (issue: {issue}16253[#16253]) +* Fix tribe node to load config file for internal client nodes {pull}15300[#15300] (issues: {issue}13383[#13383], {issue}14573[#14573]) + + + +[[regression-5.0.0-alpha1-2x]] +[float] +=== Regressions + +Analysis:: +* Add PathHierarchy type back to path_hierarchy tokenizer for backward compatibility with 1.x {pull}15785[#15785] (issue: {issue}15756[#15756]) + +Internal:: +* Deduplicate concrete indices after indices resolution {pull}14316[#14316] (issues: {issue}11258[#11258], {issue}12058[#12058]) + +Plugin Cloud Azure:: +* Filter cloud azure credentials {pull}14863[#14863] (issues: {issue}13779[#13779], {issue}14843[#14843]) + +REST:: +* Don't return all indices immediately if count of expressions >1 and first expression is * {pull}17033[#17033] (issue: {issue}17027[#17027]) + + + +[[upgrade-5.0.0-alpha1-2x]] +[float] +=== Upgrades + +Core:: +* Upgrade to Lucene 5.5.0 official release {pull}16742[#16742] +* Upgrade to lucene 5.5.0-snapshot-850c6c2 {pull}16615[#16615] +* Upgrade to lucene 5.5.0-snapshot-4de5f1d {pull}16400[#16400] (issues: {issue}16373[#16373], {issue}16399[#16399]) +* Update lucene to r1725675 {pull}16114[#16114] +* Upgrade to lucene-5.5.0-snapshot-1721183. {pull}15575[#15575] +* Upgrade Lucene to 5.4.0-snapshot-1715952 {pull}14951[#14951] +* Upgrade Lucene to 5.4.0-snapshot-1714615 {pull}14784[#14784] +* Upgrade to lucene-5.4.0-snapshot-1712973. {pull}14619[#14619] +* update to lucene-5.4.x-snapshot-1711508 {pull}14398[#14398] +* Upgrade to lucene-5.4-snapshot-1710880. {pull}14320[#14320] +* Upgrade to lucene-5.4-snapshot-1708254. {pull}14074[#14074] +* upgrade lucene to r1702265 {pull}13439[#13439] +* Upgrade master to lucene 5.4-snapshot r1701068 {pull}13324[#13324] + +Geo:: +* Update to spatial4j 0.5 for correct Multi-Geometry {pull}14269[#14269] (issue: {issue}9904[#9904]) + +Internal:: +* Update to Jackson 2.6.2 {pull}13344[#13344] (issues: {issue}10980[#10980], {issue}207[#207], {issue}213[#213]) + +Plugin Cloud AWS:: +* Update AWS SDK version to 1.10.19 {pull}13655[#13655] (issue: {issue}13656[#13656]) + +Plugin Cloud Azure:: +* Update Azure Service Management API to 0.9.0 {pull}15232[#15232] (issue: {issue}15209[#15209]) + +Plugin Discovery Azure:: +* Upgrade azure SDK to 0.9.3 {pull}17102[#17102] (issues: {issue}17042[#17042], {issue}557[#557]) + +Plugin Lang JS:: +* upgrade rhino for plugins/lang-javascript {pull}14466[#14466] + +Plugin Repository Azure:: +* Upgrade Azure Storage client to 4.0.0 {pull}16084[#16084] (issues: {issue}12567[#12567], {issue}15080[#15080], {issue}15976[#15976]) + +Plugin Repository S3:: +* Upgrade to aws 1.10.33 {pull}14672[#14672] + +Scripting:: +* Upgrade groovy dependency in lang-groovy module to version 2.4.6 {pull}16830[#16830] (issue: {issue}16527[#16527]) + + + diff --git a/docs/reference/release-notes/5.0.0-alpha1.asciidoc b/docs/reference/release-notes/5.0.0-alpha1.asciidoc new file mode 100644 index 000000000000..eac01a915e5c --- /dev/null +++ b/docs/reference/release-notes/5.0.0-alpha1.asciidoc @@ -0,0 +1,688 @@ +[[release-notes-5.0.0-alpha1]] +== 5.0.0-alpha1 Release Notes + +The changes listed below have been released for the first time in +Elasticsearch 5.0.0-alpha1. Changes in this release which were first released +in the 2.x series are listed in <>. + +[[breaking-5.0.0-alpha1]] +[float] +=== Breaking changes + +Aggregations:: +* getKeyAsString and key_as_string should be the same for terms aggregation on boolean field {pull}15393[#15393] + +Aliases:: +* make get alias expand to open and closed indices by default {pull}15954[#15954] (issue: {issue}14982[#14982]) + +Allocation:: +* Simplify shard balancer interface {pull}17028[#17028] (issue: {issue}8954[#8954]) +* Remove DisableAllocationDecider {pull}13313[#13313] + +CAT API:: +* Add raw recovery progress to cat recovery API {pull}17064[#17064] (issue: {issue}17022[#17022]) +* Remove host from cat nodes API {pull}16656[#16656] (issues: {issue}12959[#12959], {issue}16575[#16575]) +* Using the accept header in the request instead of content-type in _cat API. {pull}14421[#14421] (issue: {issue}14195[#14195]) + +CRUD:: +* Remove object notation for core types. {pull}15684[#15684] (issue: {issue}15388[#15388]) + +Cache:: +* Refactor IndicesRequestCache to make it testable. {pull}16610[#16610] +* Remove deprecated query cache settings {pull}15592[#15592] + +Core:: +* Bootstrap does not set system properties {pull}17088[#17088] (issues: {issue}16579[#16579], {issue}16791[#16791]) +* Add max number of processes check {pull}16919[#16919] +* Add mlockall bootstrap check {pull}16909[#16909] +* Remove es.useLinkedTransferQueue {pull}16786[#16786] +* One log {pull}16703[#16703] (issue: {issue}16585[#16585]) + +Engine:: +* Remove `index.compound_on_flush` setting and default to `true` {pull}15594[#15594] (issue: {issue}10778[#10778]) + +Fielddata:: +* Remove "uninverted" and "binary" fielddata support for numeric and boolean fields. {pull}14082[#14082] + +Index APIs:: +* Remove `GET` option for /_forcemerge {pull}15223[#15223] (issue: {issue}15165[#15165]) +* Remove /_optimize REST API endpoint {pull}14226[#14226] (issue: {issue}13778[#13778]) + +Internal:: +* Cli: Switch to jopt-simple {pull}17024[#17024] (issue: {issue}11564[#11564]) +* Replace ContextAndHeaders with a ThreadPool based ThreadLocal implementation {pull}15776[#15776] +* Remove NodeBuilder {pull}15354[#15354] +* Fix IndexSearcherWrapper interface to not depend on the EngineConfig {pull}14654[#14654] +* Cleanup query parsing and remove IndexQueryParserService {pull}14452[#14452] +* Fold IndexCacheModule into IndexModule {pull}14293[#14293] +* Remove circular dependency between IndicesService and IndicesStore {pull}14285[#14285] +* Remove guice injection from IndexStore and friends {pull}14279[#14279] +* Simplify similarity module and friends {pull}13942[#13942] +* Remove shard-level injector {pull}13881[#13881] +* Refactor SearchRequest to be parsed on the coordinating node {pull}13859[#13859] +* Remove support for pre 2.0 indices {pull}13799[#13799] + +Java API:: +* Remove the count api {pull}14166[#14166] (issue: {issue}13928[#13928]) +* IdsQueryBuilder to accept only non null ids and types {pull}13937[#13937] + +Mapping:: +* Change the field mapping index time boost into a query time boost. {pull}16900[#16900] +* Deprecate string in favor of text/keyword. {pull}16877[#16877] +* [Mapping] Several MappingService cleanups {pull}16133[#16133] (issue: {issue}15924[#15924]) +* [Mapping] Cleanup ParentFieldMapper: {pull}16045[#16045] +* Remove the `format` option of the `_source` field. {pull}15398[#15398] +* Remove transform {pull}13657[#13657] (issue: {issue}12674[#12674]) + +Network:: +* Remove ability to disable Netty gathering writes {pull}16774[#16774] (issue: {issue}7811[#7811]) + +Parent/Child:: +* Removed `total` score mode in favour for `sum` score mode. {pull}17174[#17174] (issues: {issue}13470[#13470], {issue}17083[#17083]) +* Several other parent/child cleanups {pull}13470[#13470] +* Removed pre 2.x parent child implementation {pull}13376[#13376] + +Percolator:: +* Change the percolate api to not dynamically add fields to mapping {pull}16077[#16077] (issue: {issue}15751[#15751]) + +Plugins:: +* Rename bin/plugin in bin/elasticsearch-plugin {pull}16454[#16454] +* Change the inner structure of the plugins zip {pull}16453[#16453] +* Remove multicast plugin {pull}16326[#16326] (issue: {issue}16310[#16310]) +* Plugins: Remove site plugins {pull}16038[#16038] +* Don't use guice for QueryParsers {pull}15761[#15761] +* Remove guice from the index level {pull}14518[#14518] +* Simplify Analysis registration and configuration {pull}14355[#14355] +* Replace IndicesLifecycle with a per-index IndexEventListener {pull}14217[#14217] (issue: {issue}13259[#13259]) + +Query DSL:: +* Remove the MissingQueryBuilder which was deprecated in 2.2.0. {pull}15364[#15364] (issue: {issue}14112[#14112]) +* Remove NotQueryBuilder {pull}14204[#14204] (issue: {issue}13761[#13761]) +* Function score query: remove deprecated support for boost_factor {pull}13510[#13510] +* Remove support for deprecated queries. {pull}13418[#13418] (issue: {issue}13326[#13326]) + +REST:: +* Limit the accepted length of the _id {pull}16036[#16036] (issue: {issue}16034[#16034]) + +Scripting:: +* Script settings {pull}16197[#16197] + +Search:: +* Remove some deprecations {pull}14331[#14331] +* Remove search exists api {pull}13911[#13911] (issues: {issue}13682[#13682], {issue}13910[#13910]) +* Query refactoring: split parse phase into fromXContent and toQuery for all queries {pull}13788[#13788] (issue: {issue}10217[#10217]) +* Remove the scan and count search types. {pull}13310[#13310] + +Search Refactoring:: +* Remove deprecated parameter from field sort builder. {pull}16573[#16573] (issue: {issue}16127[#16127]) +* Remove support for query_binary and filter_binary {pull}14433[#14433] (issue: {issue}14308[#14308]) +* Validate query api: move query parsing to the coordinating node {pull}14384[#14384] +* Remove "query" query and fix related parsing bugs {pull}14304[#14304] (issue: {issue}13326[#13326]) + +Settings:: +* Prevent index level setting from being configured on a node level {pull}17144[#17144] (issue: {issue}16799[#16799]) +* Remove es.max-open-files flag {pull}16757[#16757] (issues: {issue}16506[#16506], {issue}483[#483]) +* Enforce node level limits if node is started in production env {pull}16733[#16733] (issue: {issue}16727[#16727]) +* Move remaining settings in NettyHttpServerTransport to the new infra {pull}16531[#16531] +* Make settings validation strict {pull}16365[#16365] +* Remove the ability to fsync on every operation and only schedule fsync task if really needed {pull}16257[#16257] (issue: {issue}16152[#16152]) +* Remove index.flush_on_close entirely {pull}15977[#15977] +* Restore chunksize of 512kb on recovery and remove configurability {pull}15235[#15235] (issue: {issue}15161[#15161]) +* Remove ancient deprecated and alternative recovery settings {pull}15234[#15234] +* Replace IndexSettings annotation with a full-fledged class {pull}14251[#14251] +* Fix ping timeout settings inconsistencies {pull}13701[#13701] (issue: {issue}6579[#6579]) + +Similarities:: +* Renames `default` similarity into `classic` {pull}15446[#15446] (issue: {issue}15102[#15102]) + +Snapshot/Restore:: +* Fail closing or deleting indices during a full snapshot {pull}17021[#17021] (issue: {issue}16321[#16321]) + +Stats:: +* Modify load average format {pull}15932[#15932] (issue: {issue}15907[#15907]) +* Reintroduce five-minute and fifteen-minute load averages on Linux {pull}15907[#15907] (issues: {issue}12049[#12049], {issue}14741[#14741]) +* Add system CPU percent to OS stats {pull}14741[#14741] + +Store:: +* Standardize state format type for global and index level metadata {pull}17123[#17123] + +Term Vectors:: +* Remove DFS support from TermVector API {pull}16452[#16452] +* Term vector APIs should no longer update mappings {pull}16285[#16285] + +Translog:: +* Drop support for simple translog and hard-wire buffer to 8kb {pull}15574[#15574] +* Simplify translog-based flush settings {pull}15573[#15573] + +Warmers:: +* Remove query warmers and the warmer API. {pull}15614[#15614] (issue: {issue}15607[#15607]) + + + +[[deprecation-5.0.0-alpha1]] +[float] +=== Deprecations + +Plugin Mapper Attachment:: +* Deprecate mapper-attachments plugin {pull}16948[#16948] (issue: {issue}16910[#16910]) + +Search:: +* Deprecate fuzzy query {pull}16211[#16211] (issues: {issue}15760[#15760], {issue}16121[#16121]) + + + +[[feature-5.0.0-alpha1]] +[float] +=== New features + +Discovery:: +* Add two phased commit to Cluster State publishing {pull}13062[#13062] + +Ingest:: +* Merge feature/ingest branch into master branch {pull}16049[#16049] (issue: {issue}14049[#14049]) + +Mapping:: +* Add a text field. {pull}16637[#16637] +* Add a new `keyword` field. {pull}16589[#16589] + +Percolator:: +* index the query terms from the percolator query {pull}13646[#13646] (issue: {issue}12664[#12664]) + +Plugin Ingest Attachment:: +* Ingest: Add attachment processor {pull}16490[#16490] (issue: {issue}16303[#16303]) + +Plugin Mapper Attachment:: +* Migrate mapper attachments plugin to main repository {pull}14605[#14605] + +Plugin Repository HDFS:: +* HDFS Snapshot/Restore plugin {pull}15192[#15192] (issue: {issue}15191[#15191]) + +Query DSL:: +* Adds a rewrite phase to queries on the shard level {pull}16870[#16870] (issue: {issue}9526[#9526]) + +Reindex API:: +* Merge reindex to master {pull}16861[#16861] + +Scripting:: +* Exceptions and Infinite Loop Checking {pull}15936[#15936] +* Added a new scripting language (PlanA) {pull}15136[#15136] (issue: {issue}13084[#13084]) + +Search:: +* Add `search_after` parameter in the SearchAPI {pull}16125[#16125] (issue: {issue}8192[#8192]) + +Settings:: +* Add infrastructure to transactionally apply and reset dynamic settings {pull}15278[#15278] + +Stats:: +* API for listing index file sizes {pull}16661[#16661] (issue: {issue}16131[#16131]) + +Suggesters:: +* Add document-oriented completion suggester {pull}14410[#14410] (issue: {issue}10746[#10746]) + +Task Manager:: +* Add task cancellation mechanism {pull}16320[#16320] +* Make the Task object available to the action caller {pull}16033[#16033] +* Task Management: Add framework for registering and communicating with tasks {pull}15347[#15347] (issue: {issue}15117[#15117]) + + + +[[enhancement-5.0.0-alpha1]] +[float] +=== Enhancements + +Aggregations:: +* Add tests and documentation for using `time_zone` in date range aggregation {pull}16955[#16955] (issue: {issue}10130[#10130]) +* Refactoring of Aggregations {pull}14136[#14136] + +Allocation:: +* Write shard state metadata as soon as shard is created / initializing {pull}16625[#16625] (issue: {issue}14739[#14739]) +* Reuse existing allocation id for primary shard allocation {pull}16530[#16530] (issue: {issue}14739[#14739]) +* Remove version in ShardRouting (now obsolete) {pull}16243[#16243] (issue: {issue}14739[#14739]) +* Prefer nodes that previously held primary shard for primary shard allocation {pull}16096[#16096] (issue: {issue}14739[#14739]) +* Extend reroute with an option to force assign stale primary shard copies {pull}15708[#15708] (issue: {issue}14739[#14739]) +* Allocate primary shards based on allocation IDs {pull}15281[#15281] (issue: {issue}14739[#14739]) +* Persist currently started allocation IDs to index metadata {pull}14964[#14964] (issue: {issue}14739[#14739]) +* Use ObjectParser to parse AllocationID {pull}14962[#14962] (issue: {issue}14831[#14831]) +* Persist allocation ID with shard state metadata on nodes {pull}14831[#14831] (issue: {issue}14739[#14739]) + +CAT API:: +* Expose http address in cat/nodes {pull}16770[#16770] +* [cat/recovery] Make recovery time a TimeValue() {pull}16743[#16743] (issue: {issue}9209[#9209]) +* :CAT API: remove space at the end of a line {pull}15250[#15250] (issue: {issue}9464[#9464]) + +CRUD:: +* CRUD: Allow to get and set ttl as a time value/string {pull}15047[#15047] + +Cache:: +* Enable the indices request cache by default {pull}17162[#17162] (issues: {issue}16870[#16870], {issue}17134[#17134]) + +Cluster:: +* Resolve index names to Index instances early {pull}17048[#17048] +* Remove DiscoveryNode#shouldConnectTo method {pull}16898[#16898] (issue: {issue}16815[#16815]) +* Fail demoted primary shards and retry request {pull}16415[#16415] (issue: {issue}14252[#14252]) +* Illegal shard failure requests {pull}16275[#16275] +* Shard failure requests for non-existent shards {pull}16089[#16089] (issue: {issue}14252[#14252]) +* Add handling of channel failures when starting a shard {pull}16041[#16041] (issue: {issue}15895[#15895]) +* Wait for new master when failing shard {pull}15748[#15748] (issue: {issue}14252[#14252]) +* Master should wait on cluster state publication when failing a shard {pull}15468[#15468] (issue: {issue}14252[#14252]) +* Split cluster state update tasks into roles {pull}14899[#14899] (issue: {issue}13627[#13627]) +* Add timeout mechanism for sending shard failures {pull}14707[#14707] (issue: {issue}14252[#14252]) +* Add listener mechanism for failures to send shard failed {pull}14295[#14295] (issue: {issue}14252[#14252]) + +Core:: +* Use index UUID to lookup indices on IndicesService {pull}17001[#17001] +* Add -XX+AlwaysPreTouch JVM flag {pull}16937[#16937] +* Use and test relative time in TransportBulkAction {pull}16916[#16916] +* Bump Elasticsearch version to 5.0.0-alpha1-SNAPSHOT {pull}16862[#16862] +* Assert that we can write in all data-path on startup {pull}16745[#16745] +* Add G1GC check on startup {pull}16737[#16737] (issue: {issue}10740[#10740]) +* Shards with heavy indexing should get more of the indexing buffer {pull}14121[#14121] +* Remove and ban ImmutableMap {pull}13939[#13939] (issue: {issue}13224[#13224]) +* Finish banning ImmutableSet {pull}13820[#13820] (issue: {issue}13224[#13224]) +* Removes and bans ImmutableSet {pull}13754[#13754] (issue: {issue}13224[#13224]) +* Remove and ban ImmutableMap#entrySet {pull}13724[#13724] +* Forbid ForwardingSet {pull}13720[#13720] (issue: {issue}13224[#13224]) + +Discovery:: +* Add a dedicate queue for incoming ClusterStates {pull}13303[#13303] (issue: {issue}13062[#13062]) + +Engine:: +* Remove writeLockTimeout from InternalEngine {pull}16930[#16930] +* Don't guard IndexShard#refresh calls by a check to isRefreshNeeded {pull}16118[#16118] +* Never call a listerner under lock in InternalEngine {pull}15786[#15786] +* Use System.nanoTime() to initialize Engine.lastWriteNanos {pull}14321[#14321] +* Flush big merges automatically if shard is inactive {pull}14275[#14275] +* Remove Engine.Create {pull}13955[#13955] +* Remove the disabled autogenerated id optimization from InternalEngine {pull}13857[#13857] + +Exceptions:: +* Fix typos in exception/assert/log messages in core module. {pull}16649[#16649] +* Add field names to several mapping errors {pull}16508[#16508] (issue: {issue}16378[#16378]) +* Add serialization support for more important IOExceptions {pull}15766[#15766] +* Adds exception objects to log messages. {pull}14827[#14827] (issue: {issue}10021[#10021]) +* Remove reflection hacks from ElasticsearchException {pull}13796[#13796] +* Rename QueryParsingException to a more generic ParsingException {pull}13631[#13631] +* Add *Exception(Throwable cause) constructors/ call where appropriate {pull}13544[#13544] (issue: {issue}10021[#10021]) + +Geo:: +* Fix a potential parsing problem in GeoDistanceSortParser {pull}17111[#17111] +* Geo: Add validation of shapes to ShapeBuilders {pull}15551[#15551] (issue: {issue}14416[#14416]) +* Make remaining ShapeBuilders implement Writeable {pull}15010[#15010] (issue: {issue}14416[#14416]) +* Geo: Remove internal `translated` flag from LineStringBuilder {pull}14969[#14969] +* Make PointBuilder, CircleBuilder & EnvelopeBuilder implement Writable {pull}14933[#14933] (issue: {issue}14416[#14416]) +* Merging BaseLineString and BasePolygonBuilder with subclass {pull}14887[#14887] (issue: {issue}14482[#14482]) +* Moving static factory methods to ShapeBuilders {pull}14529[#14529] +* Remove InternalLineStringBuilder and InternalPolygonBuilder {pull}14482[#14482] (issue: {issue}14416[#14416]) + +Highlighting:: +* Joint parsing of common global Hightlighter and subfield parameters {pull}15368[#15368] (issue: {issue}15285[#15285]) +* Add fromXContent method to HighlightBuilder {pull}15157[#15157] + +Ingest:: +* Added ingest statistics to node stats API {pull}16915[#16915] +* Add `ingest_took` to bulk response {pull}16876[#16876] +* Add ingest info to node info API, which contains a list of available processors {pull}16865[#16865] +* Use diffs for ingest metadata in cluster state {pull}16847[#16847] +* hide null-valued metadata fields from WriteableIngestDocument#toXContent {pull}16557[#16557] +* Ingest: use bulk thread pool for bulk request processing (was index before) {pull}16539[#16539] (issue: {issue}16503[#16503]) +* Add foreach processor {pull}16432[#16432] +* revert PipelineFactoryError handling with throwing ElasticsearchParseException in ingest pipeline creation {pull}16355[#16355] +* Add processor tags to on_failure metadata in ingest pipeline {pull}16324[#16324] (issue: {issue}16202[#16202]) +* catch processor/pipeline factory exceptions and return structured error responses {pull}16276[#16276] (issue: {issue}16010[#16010]) +* Ingest: move get/put/delete pipeline methods to ClusterAdminClient {pull}16242[#16242] +* Geoip processor: remove redundant latitude and longitude fields and make location an object with lat and lon subfields {pull}16173[#16173] + +Internal:: +* Support scheduled commands in current context {pull}17077[#17077] +* Thread limits {pull}17003[#17003] +* Remove leniency from segments info integrity checks {pull}16985[#16985] (issue: {issue}16973[#16973]) +* Rename SearchServiceTransportAction to SearchTransportService {pull}16880[#16880] +* Decouple the TransportService and ClusterService {pull}16872[#16872] (issue: {issue}16788[#16788]) +* Refactor bootstrap checks {pull}16844[#16844] (issues: {issue}16733[#16733], {issue}16835[#16835]) +* Add LifecycleRunnable {pull}16752[#16752] +* Hot inlined methods in your area {pull}16725[#16725] +* Move IndicesQueryCache and IndicesRequestCache into IndicesService {pull}16603[#16603] +* Forbid use of java.security.MessageDigest#clone() {pull}16543[#16543] (issue: {issue}16479[#16479]) +* Make IndicesWarmer a private class of IndexService {pull}16470[#16470] +* Simplify IndicesFieldDataCache and detach from guice {pull}16469[#16469] +* Uppercase ells ('L') in long literals {pull}16329[#16329] (issue: {issue}16279[#16279]) +* ShardId equality and hash code inconsistency {pull}16319[#16319] (issue: {issue}16217[#16217]) +* Ensure all resources are closed on Node#close() {pull}16316[#16316] (issue: {issue}13685[#13685]) +* Make index uuid available in Index, ShardRouting & ShardId {pull}16217[#16217] +* Move RefreshTask into IndexService and use since task per index {pull}15933[#15933] +* Make IndexingMemoryController private to IndicesService {pull}15877[#15877] +* Cleanup IndexingOperationListeners infrastructure {pull}15875[#15875] +* Remove and forbid use of j.u.c.ThreadLocalRandom {pull}15862[#15862] (issue: {issue}15294[#15294]) +* Fix IntelliJ query builder type inference issues {pull}15429[#15429] +* Remove and forbid use of Collections#shuffle(List) and Random#() {pull}15299[#15299] (issue: {issue}15287[#15287]) +* Remove and forbid use of the type-unsafe empty Collections fields {pull}15187[#15187] +* Move IndicesService.canDeleteShardContent to use IndexSettings {pull}15150[#15150] (issue: {issue}15059[#15059]) +* Simplify MonitorService construction and detach from guice {pull}15035[#15035] +* Use Supplier for StreamInput#readOptionalStreamable {pull}14806[#14806] +* Add variable-length long encoding {pull}14780[#14780] +* Extend usage of IndexSetting class {pull}14731[#14731] (issue: {issue}14251[#14251]) +* Fold SimilarityModule into IndexModule {pull}14284[#14284] +* Move to lucene BoostQuery {pull}14264[#14264] +* Use built-in method for computing hash code of longs {pull}14213[#14213] +* Refactor ShardFailure listener infrastructure {pull}14206[#14206] +* Add methods for variable-length encoding integral arrays {pull}14087[#14087] +* Fold IndexAliasesService into IndexService {pull}14044[#14044] +* Remove unneeded Module abstractions {pull}13944[#13944] +* Query refactoring: simplify IndexQueryParserService parse methods {pull}13938[#13938] (issue: {issue}13859[#13859]) +* Remove and forbid use of com.google.common.collect.Iterators {pull}13916[#13916] (issue: {issue}13224[#13224]) +* Remove and forbid use of com.google.common.collect.ImmutableCollection {pull}13909[#13909] (issue: {issue}13224[#13224]) +* Remove and forbid use of com.google.common.io.Resources {pull}13908[#13908] (issue: {issue}13224[#13224]) +* Remove and forbid use of com.google.common.hash.* {pull}13907[#13907] (issue: {issue}13224[#13224]) +* Remove and forbid use of com.google.common.net.InetAddresses {pull}13905[#13905] (issue: {issue}13224[#13224]) +* Remove and forbid use of com.google.common.collect.EvictingQueue {pull}13903[#13903] (issue: {issue}13224[#13224]) +* Replace Guava cache with simple concurrent LRU cache {pull}13879[#13879] +* Remove ClusterSerivce and IndexSettingsService dependency from IndexShard {pull}13853[#13853] +* Start making RecoverySourceHandler unittestable {pull}13840[#13840] +* Remove IndexService dep. from IndexShard {pull}13797[#13797] +* Remove ES internal deletion policies in favour of Lucenes implementations {pull}13794[#13794] +* Move ShardTermVectorService to be on indices level as TermVectorService {pull}13786[#13786] +* Move ShardPercolateService creation into IndexShard {pull}13777[#13777] +* Remove `ExpressionScriptCompilationException` and `ExpressionScriptExecutionException` {pull}13742[#13742] +* Reduced the number of ClusterStateUpdateTask variants {pull}13735[#13735] +* Add a BaseParser helper for stream parsing {pull}13615[#13615] +* Remove and forbid use of com.google.common.primitives.Ints {pull}13596[#13596] (issue: {issue}13224[#13224]) +* Remove and forbid use of com.google.common.math.LongMath {pull}13575[#13575] (issue: {issue}13224[#13224]) +* Remove and forbid use of com.google.common.base.Joiner {pull}13572[#13572] (issue: {issue}13224[#13224]) +* Replace and ban next batch of Guava classes {pull}13562[#13562] (issue: {issue}13224[#13224]) +* Remove and forbid use of com.google.common.collect.Iterables {pull}13559[#13559] (issue: {issue}13224[#13224]) +* Replace LoadingCache usage with a simple ConcurrentHashMap {pull}13552[#13552] (issue: {issue}13224[#13224]) +* Use Supplier instead of Reflection {pull}13545[#13545] +* Remove and forbid use of com.google.common.base.Preconditions {pull}13540[#13540] (issue: {issue}13224[#13224]) +* Remove and forbid use of guava Function, Charsets, Collections2 {pull}13533[#13533] (issue: {issue}13224[#13224]) +* Remove and forbid use of com.google.common.collect.ImmutableSortedMap {pull}13525[#13525] (issue: {issue}13224[#13224]) +* Remove and forbid use of several com.google.common.util. classes {pull}13524[#13524] (issue: {issue}13224[#13224]) +* Cleanup SearchRequest & SearchRequestBuilder {pull}13518[#13518] +* Remove and forbid use of com.google.common.collect.Queues {pull}13498[#13498] (issue: {issue}13224[#13224]) +* Remove and forbid use of com.google.common.base.Preconditions#checkNotNull {pull}13493[#13493] (issue: {issue}13224[#13224]) +* Remove and forbid use of com.google.common.collect.Sets {pull}13463[#13463] (issue: {issue}13224[#13224]) +* Remove and forbid use of com.google.common.collect.Maps {pull}13438[#13438] (issue: {issue}13224[#13224]) +* Remove use of underscore as an identifier {pull}13353[#13353] +* Remove and forbid the use of com.google.common.base.Predicate(s)? {pull}13349[#13349] (issues: {issue}13224[#13224], {issue}13314[#13314]) +* This commit removes com.google.common.io {pull}13302[#13302] (issue: {issue}13224[#13224]) + +Java API:: +* Remove copy constructors from request classes and TransportMessage type {pull}16640[#16640] (issue: {issue}15776[#15776]) + +Mapping:: +* Remove friction from the mapping changes in 5.0. {pull}16991[#16991] +* Rework norms parameters for 5.0. {pull}16987[#16987] +* Moved dynamic field handling in doc parsing to end of parsing {pull}16798[#16798] +* Remove the MapperBuilders utility class. {pull}16609[#16609] +* Make the `index` property a boolean. {pull}16161[#16161] +* Remove the ability to enable doc values with the `fielddata.format` setting. {pull}16147[#16147] +* Be stricter about parsing boolean values in mappings. {pull}16146[#16146] +* Fix default doc values to be enabled when a field is not indexed. {pull}16141[#16141] +* Dynamically map floating-point numbers as floats instead of doubles. {pull}15319[#15319] (issue: {issue}13851[#13851]) +* Simplify MetaDataMappingService. {pull}15217[#15217] +* Remove MergeMappingException. {pull}15177[#15177] + +NOT CLASSIFIED:: +* Use a seed node to form multi-node cluster in integ tests {pull}17078[#17078] + +Packaging:: +* Fail early on JDK with compiler bug {pull}16418[#16418] (issues: {issue}16097[#16097], {issue}16362[#16362]) +* Make security non-optional {pull}16176[#16176] +* Remove RuntimePermission("accessDeclaredMembers") {pull}15378[#15378] +* Remove Guava as a dependency {pull}14055[#14055] (issue: {issue}13224[#13224]) +* Remove Guava as a dependency {pull}14054[#14054] (issue: {issue}13224[#13224]) + +Plugin Ingest Attachment:: +* Minor attachment processor improvements {pull}16574[#16574] + +Plugin Lang Painless:: +* Make Painless a Module {pull}16755[#16755] +* Minor Clean up {pull}16457[#16457] + +Plugin Mapper Attachment:: +* minor attachments cleanups: IDE test support and EPUB format {pull}14626[#14626] + +Plugin Repository Azure:: +* Support global `repositories.azure.` settings {pull}15141[#15141] (issue: {issue}13776[#13776]) +* Add timeout settings (default to 5 minutes) {pull}15080[#15080] (issue: {issue}14277[#14277]) + +Plugin Repository HDFS:: +* merge current hdfs improvements to master {pull}15588[#15588] + +Plugin Repository S3:: +* Add support for proxy authentication for s3 and ec2 {pull}15293[#15293] (issue: {issue}15268[#15268]) + +Plugins:: +* CliTool: Cleanup and document Terminal {pull}16443[#16443] +* Plugin cli: Improve maven coordinates detection {pull}16384[#16384] (issue: {issue}16376[#16376]) +* Enforce plugin zip does not contain zip entries outside of the plugin dir {pull}16361[#16361] +* CliTool: Allow unexpected exceptions to propagate {pull}16359[#16359] +* Reduce complexity of plugin cli {pull}16336[#16336] +* Remove Plugin.onIndexService. {pull}15029[#15029] (issue: {issue}14896[#14896]) +* Open up QueryCache and SearcherWrapper extension points {pull}14303[#14303] + +Query DSL:: +* Function Score Query: make parsing stricter {pull}16617[#16617] (issue: {issue}16583[#16583]) +* Parsers should throw exception on unknown objects {pull}14255[#14255] (issue: {issue}10974[#10974]) +* UNICODE_CHARACTER_CLASS fix {pull}11598[#11598] (issue: {issue}10146[#10146]) + +Query Refactoring:: +* Add infrastructure to rewrite query builders {pull}16599[#16599] +* Switch geo validation to enum {pull}13672[#13672] (issue: {issue}13608[#13608]) + +REST:: +* More robust handling of CORS HTTP Access Control {pull}16092[#16092] +* Add option to exclude based on paths in XContent {pull}16017[#16017] + +Recovery:: +* Relocation source should be marked as relocating before starting recovery to primary relocation target {pull}16500[#16500] +* Operation counter for IndexShard {pull}15956[#15956] (issue: {issue}15900[#15900]) +* Primary relocation handoff {pull}15900[#15900] (issue: {issue}15532[#15532]) +* Remove recovery threadpools and throttle outgoing recoveries on the master {pull}15372[#15372] +* Refactor StoreRecoveryService to be a simple package private util class {pull}13766[#13766] + +Reindex API:: +* Add ingest pipeline support to reindex {pull}16932[#16932] + +Scripting:: +* Remove Extra String Concat Token {pull}16382[#16382] +* Skipping hidden files compilation for script service {pull}16286[#16286] (issue: {issue}15269[#15269]) +* Rename Plan A to Painless {pull}16245[#16245] +* Add plumbing for script compile-time parameters {pull}15464[#15464] +* Factor mustache -> modules/lang-mustache {pull}15328[#15328] + +Search:: +* Store _all payloads on 1 byte instead of 4. {pull}16899[#16899] +* Refuse to load fields from _source when using the `fields` option and support wildcards. {pull}15017[#15017] (issues: {issue}10783[#10783], {issue}14489[#14489]) +* Add response into ClearScrollResponse {pull}13835[#13835] (issue: {issue}13817[#13817]) +* Shuffle shards for _only_nodes + support multiple specifications like cluster API {pull}12575[#12575] (issues: {issue}12546[#12546], {issue}12700[#12700]) + +Search Refactoring:: +* Move sort `order` field up into SortBuilder {pull}17035[#17035] +* Moves SortParser:parse(...) to only require QueryShardContext {pull}16999[#16999] (issue: {issue}15178[#15178]) +* Change internal representation of suggesters {pull}16873[#16873] +* Fixes serialisation of Ranges {pull}16674[#16674] +* Make GeoDistanceSortBuilder serializable, 2nd try {pull}16572[#16572] (issues: {issue}15178[#15178], {issue}16151[#16151]) +* Move missing() from SortBuilder interface to class {pull}16225[#16225] (issues: {issue}15178[#15178], {issue}16151[#16151]) +* Remove deprecated parameters from ScriptSortBuilder {pull}16153[#16153] (issue: {issue}15178[#15178]) +* Refactor GeoSortBuilder {pull}16151[#16151] (issue: {issue}15178[#15178]) +* Refactor FieldSortBuilder {pull}16127[#16127] (issue: {issue}15178[#15178]) +* Make sort order enum writable. {pull}16124[#16124] (issue: {issue}15178[#15178]) +* Make DistanceUnit writable. {pull}16122[#16122] (issue: {issue}15178[#15178]) +* RescoreBuilder: Add parsing and creating of RescoreSearchContext {pull}16014[#16014] (issue: {issue}15559[#15559]) +* Make RescoreBuilder and nested QueryRescorer Writable {pull}15953[#15953] (issue: {issue}15559[#15559]) +* Use HighlightBuilder in SearchSourceBuilder {pull}15376[#15376] (issue: {issue}15044[#15044]) +* Enable HighlightBuilder to create SearchContextHighlight {pull}15324[#15324] +* Explain api: move query parsing to the coordinating node {pull}14270[#14270] +* Switch query parsers to use ParseField {pull}14249[#14249] (issue: {issue}8964[#8964]) + +Settings:: +* Settings with complex matchers should not overlap {pull}16754[#16754] +* Moves GCE settings to the new infra {pull}16722[#16722] (issue: {issue}16720[#16720]) +* Add filtering support within Setting class {pull}16629[#16629] (issue: {issue}16598[#16598]) +* Migrate AWS settings to new settings infrastructure {pull}16602[#16602] (issue: {issue}16293[#16293]) +* Remove `gateway.initial_meta` and always rely on min master nodes {pull}16446[#16446] +* Rewrite SettingsFilter to be immutable {pull}16425[#16425] +* Simplify azure settings {pull}16363[#16363] +* Convert PageCacheRecycler settings {pull}16341[#16341] +* Monitor settings {pull}16313[#16313] +* Cut over tribe node settings to new settings infra {pull}16311[#16311] +* Convert multcast plugin settings to the new infra {pull}16295[#16295] +* Convert `request.headers.*` to the new settings infra {pull}16292[#16292] +* Migrate Azure settings to new settings infrastructure {pull}16291[#16291] +* Validate logger settings and allow them to be reset via API {pull}16289[#16289] +* Switch NodeEnvironment's settings to new settings {pull}16273[#16273] +* Simplify AutoCreateIndex and add more tests {pull}16270[#16270] +* Convert several pending settings {pull}16269[#16269] +* Migrate query caching settings to the new settings infra. {pull}16267[#16267] +* Convert `action.auto_create_index` and `action.master.force_local` to the new settings infra {pull}16263[#16263] +* Convert `cluster.routing.allocation.type` and `processors` to the new settings infra. {pull}16238[#16238] +* Validate tribe node settings on startup {pull}16237[#16237] +* Move node.client, node.data, node.master, node.local and node.mode to new settings infra {pull}16230[#16230] +* Moved http settings to the new settings infrastructure {pull}16188[#16188] +* Migrate network service to the new infra {pull}16187[#16187] +* Convert client.transport settings to new infra {pull}16183[#16183] +* Move discovery.* settings to new Setting infrastructure {pull}16182[#16182] +* Change over to o.e.common.settings.Setting for http settings {pull}16181[#16181] +* Convert "path.*" and "pidfile" to new settings infra {pull}16180[#16180] +* Migrate repository settings to the new settings API {pull}16178[#16178] +* Convert "indices.*" settings to new infra. {pull}16177[#16177] +* Migrate gateway settings to the new settings API. {pull}16175[#16175] +* Convert several node and test level settings {pull}16172[#16172] +* Run Metadata upgrade tool on every version {pull}16168[#16168] +* Check for invalid index settings on metadata upgrade {pull}16156[#16156] +* Validate the settings key if it's simple chars separated by `.` {pull}16120[#16120] +* Validate known global settings on startup {pull}16091[#16091] +* Cut over all index scope settings to the new setting infrastrucuture {pull}16054[#16054] (issues: {issue}12790[#12790], {issue}12854[#12854], {issue}16032[#16032], {issue}6732[#6732]) +* Remove updatability of `index.flush_on_close` {pull}15964[#15964] (issue: {issue}15955[#15955]) +* Move all dynamic settings and their config classes to the index level {pull}15955[#15955] (issue: {issue}6732[#6732]) +* Always require units for bytes and time settings {pull}15948[#15948] (issue: {issue}11437[#11437]) +* Make MetaData parsing less lenient. {pull}15828[#15828] +* Move async translog sync logic into IndexService {pull}15584[#15584] +* Remove `index.merge.scheduler.notify_on_failure` and default to `true` {pull}15572[#15572] (issue: {issue}15570[#15570]) +* Remove cache concurrency level settings that no longer apply {pull}14210[#14210] (issues: {issue}13224[#13224], {issue}13717[#13717], {issue}7836[#7836]) + +Similarities:: +* Defining a global default similarity {pull}16682[#16682] (issue: {issue}16594[#16594]) + +Snapshot/Restore:: +* Remove AbstractLegacyBlobContainer {pull}14650[#14650] (issue: {issue}13434[#13434]) + +Stats:: +* Normalize unavailable load average {pull}16061[#16061] (issues: {issue}12049[#12049], {issue}14741[#14741], {issue}15907[#15907], {issue}15932[#15932], {issue}15934[#15934]) +* Add load averages to OS stats on FreeBSD {pull}15934[#15934] (issue: {issue}15917[#15917]) +* Expose pending cluster state queue size in node stats {pull}14040[#14040] (issue: {issue}13610[#13610]) + +Store:: +* Remove support for legacy checksums {pull}16931[#16931] +* Rename index folder to index_uuid {pull}16442[#16442] (issues: {issue}13264[#13264], {issue}13265[#13265], {issue}14512[#14512], {issue}14932[#14932], {issue}15853[#15853]) + +Suggesters:: +* Refactoring of Suggestions {pull}17096[#17096] (issue: {issue}10217[#10217]) + +Task Manager:: +* Add start time and duration to tasks {pull}16829[#16829] +* Combine node name and task id into single string task id {pull}16744[#16744] +* Add task status {pull}16356[#16356] (issue: {issue}16344[#16344]) +* Extend tracking of parent tasks to master node, replication and broadcast actions {pull}15931[#15931] + +Translog:: +* Remove ChannelReference and simplify Views {pull}15898[#15898] +* Simplify TranslogWriter to always write to a stream {pull}15771[#15771] +* Remove TranslogService and fold it into synchronous IndexShard API {pull}13707[#13707] + + + +[[bug-5.0.0-alpha1]] +[float] +=== Bug fixes + +Aggregations:: +* Correct typo in class name of StatsAggregator {pull}15264[#15264] (issue: {issue}14730[#14730]) + +Allocation:: +* Replica shards must be failed before primary shards {pull}15686[#15686] + +CRUD:: +* Prevent TransportReplicationAction to route request based on stale local routing table {pull}16274[#16274] (issues: {issue}12573[#12573], {issue}12574[#12574]) +* Resolves the conflict between alias routing and parent routing by applying the alias routing and ignoring the parent routing. {pull}15371[#15371] (issue: {issue}3068[#3068]) + +Cluster:: +* Shard state action channel exceptions {pull}16057[#16057] (issue: {issue}15748[#15748]) + +Geo:: +* Fix multi-field support for GeoPoint types {pull}15702[#15702] (issue: {issue}15701[#15701]) +* Enforce distance in distance query is > 0 {pull}15135[#15135] + +Ingest:: +* The IngestDocument copy constructor should make a deep copy {pull}16248[#16248] (issue: {issue}16246[#16246]) + +Internal:: +* Enable unmap hack for java 9 {pull}16986[#16986] (issue: {issue}1[#1]) +* Fix issues with failed cache loads {pull}14315[#14315] +* Allow parser to move on the START_OBJECT token when parsing search source {pull}14145[#14145] +* Ensure searcher is release if wrapping fails {pull}14107[#14107] +* Avoid deadlocks in Cache#computeIfAbsent {pull}14091[#14091] (issue: {issue}14090[#14090]) + +Java API:: +* Fix potential NPE in SearchSourceBuilder {pull}16905[#16905] (issue: {issue}16902[#16902]) + +Mapping:: +* Fix dynamic mapper when its parent already has an update {pull}17065[#17065] +* Fix copy_to when the target is a dynamic object field. {pull}15216[#15216] (issues: {issue}111237[#111237], {issue}11237[#11237]) +* Preserve existing mappings on batch mapping updates {pull}15130[#15130] (issues: {issue}14899[#14899], {issue}15129[#15129]) + +Packaging:: +* Do not pass double-dash arguments on startup {pull}17087[#17087] (issue: {issue}17084[#17084]) + +Plugin Store SMB:: +* Fix calling ensureOpen() on the wrong directory (master forwardport) {pull}16395[#16395] (issue: {issue}16383[#16383]) + +Plugins:: +* CliTool: Messages printed in Terminal should have percent char escaped {pull}16367[#16367] + +Query DSL:: +* `constant_score` query should throw error on more than one filter {pull}17135[#17135] (issue: {issue}17126[#17126]) +* Single IPv4 addresses in IP field term queries {pull}16068[#16068] (issue: {issue}16058[#16058]) +* Make strategy optional in GeoShapeQueryBuilder readFrom and writeTo {pull}13963[#13963] + +Query Refactoring:: +* Query refactoring: set has_parent & has_child types context properly {pull}13863[#13863] +* Make sure equivalent geohashCellQueries are equal after toQuery called {pull}13792[#13792] + +Recovery:: +* Prevent interruption while store checks lucene files for consistency {pull}16308[#16308] +* Mark shard as recovering on the cluster state thread {pull}14276[#14276] (issues: {issue}13766[#13766], {issue}14115[#14115]) + +Search:: +* Fix for search after {pull}16271[#16271] +* Do not be lenient when parsing CIDRs {pull}14874[#14874] (issue: {issue}14862[#14862]) + +Settings:: +* Register bootstrap settings {pull}16513[#16513] +* Add settings filtering to node info requests {pull}16445[#16445] +* Ban write access to system properties {pull}14914[#14914] + +Translog:: +* Mark shard active during recovery; push settings after engine finally inits {pull}16250[#16250] (issues: {issue}14121[#14121], {issue}16209[#16209]) + + + +[[upgrade-5.0.0-alpha1]] +[float] +=== Upgrades + +Core:: +* Upgrade to lucene-6.0.0-f0aa4fc. {pull}17075[#17075] +* upgrade to lucene 6.0.0-snapshot-bea235f {pull}16964[#16964] +* Upgrade to Jackson 2.7.1 {pull}16801[#16801] (issue: {issue}16294[#16294]) + +Ingest:: +* Update MaxMind geoip2 version to 2.6 {pull}16837[#16837] (issue: {issue}16801[#16801]) + +Internal:: +* Bump master (3.0-snapshot) to java 8 {pull}13314[#13314] + +Search Templates:: +* Update mustache.java to version 0.9.1 {pull}14053[#14053] (issue: {issue}13224[#13224]) + + From 830e2e049a88d7f57a4bbf3e2cf886ccd73a51ce Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Fri, 18 Mar 2016 15:04:49 +0100 Subject: [PATCH 309/320] Docs: Build release notes --- docs/reference/index.asciidoc | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index 87c400e018cd..abfcb18de2e8 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -47,9 +47,7 @@ include::testing.asciidoc[] include::glossary.asciidoc[] -////////////////////////////////////////// - include::release-notes.asciidoc[] -////////////////////////////////////////// +include::release-notes.asciidoc[] include::redirects.asciidoc[] From c3f6ad89ef215e75e5531eebe6f56648731efd7b Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 18 Mar 2016 11:50:49 -0400 Subject: [PATCH 310/320] Simplify module or plugin file checks --- .../test/resources/packaging/scripts/modules.bash | 13 +++---------- .../test/resources/packaging/scripts/plugins.bash | 11 ++--------- 2 files changed, 5 insertions(+), 19 deletions(-) diff --git a/qa/vagrant/src/test/resources/packaging/scripts/modules.bash b/qa/vagrant/src/test/resources/packaging/scripts/modules.bash index f6c39ca48522..bd6da680da9e 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/modules.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/modules.bash @@ -32,18 +32,11 @@ check_module() { assert_module_or_plugin_directory "$ESMODULES/$name" for file in "$@"; do - assert_module_file "$ESMODULES/$name/$file" + assert_module_or_plugin_file "$ESMODULES/$name/$file" done - assert_module_file "$ESMODULES/$name/$name-*.jar" - assert_module_file "$ESMODULES/$name/plugin-descriptor.properties" -} - -assert_module_file() { - local file=$1 - shift - - assert_module_or_plugin_file $file + assert_module_or_plugin_file "$ESMODULES/$name/$name-*.jar" + assert_module_or_plugin_file "$ESMODULES/$name/plugin-descriptor.properties" } check_secure_module() { diff --git a/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash b/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash index da6b9a444272..925beaade098 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash @@ -137,20 +137,13 @@ install_and_check_plugin() { assert_module_or_plugin_directory "$ESPLUGINS/$fullName" if [ $prefix == 'analysis' ]; then - assert_plugin_file "$ESPLUGINS/$fullName/lucene-analyzers-$name-*.jar" + assert_module_or_plugin_file "$ESPLUGINS/$fullName/lucene-analyzers-$name-*.jar" fi for file in "$@"; do - assert_plugin_file "$ESPLUGINS/$fullName/$file" + assert_module_or_plugin_file "$ESPLUGINS/$fullName/$file" done } -assert_plugin_file() { - local file=$1 - shift - - assert_module_or_plugin_file $file -} - # Compare a list of plugin names to the plugins in the plugins pom and see if they are the same # $1 the file containing the list of plugins we want to compare to # $2 description of the source of the plugin list From f71f0d601097f6de098baf2a93ed5e64678e1f68 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 18 Mar 2016 17:22:25 -0700 Subject: [PATCH 311/320] Revert "Build: Switch to maven-publish plugin" This reverts commit a90a2b34fc6fd135b23f6518156078f03804b7ae. --- build.gradle | 79 ++++----- buildSrc/.gitignore | 1 - buildSrc/build.gradle | 163 +++++++----------- .../elasticsearch/gradle/BuildPlugin.groovy | 100 +++++------ .../gradle/plugin/PluginBuildPlugin.groovy | 37 ---- .../plugin/PluginPropertiesExtension.groovy | 4 - core/build.gradle | 12 +- distribution/build.gradle | 13 -- distribution/deb/build.gradle | 9 +- distribution/integ-test-zip/build.gradle | 9 +- distribution/rpm/build.gradle | 9 +- distribution/tar/build.gradle | 9 +- distribution/zip/build.gradle | 9 +- modules/build.gradle | 4 + plugins/build.gradle | 2 - settings.gradle | 3 - test/build.gradle | 5 - 17 files changed, 156 insertions(+), 312 deletions(-) delete mode 100644 buildSrc/.gitignore diff --git a/build.gradle b/build.gradle index 7410b6cc2752..6ab00d73881e 100644 --- a/build.gradle +++ b/build.gradle @@ -17,6 +17,7 @@ * under the License. */ +import com.bmuschko.gradle.nexus.NexusPlugin import org.gradle.plugins.ide.eclipse.model.SourceFolder // common maven publishing configuration @@ -24,55 +25,46 @@ subprojects { group = 'org.elasticsearch' version = org.elasticsearch.gradle.VersionProperties.elasticsearch - if (path.startsWith(':x-plugins')) { - // don't try to configure publshing for extra plugins attached to this build - return - } + plugins.withType(NexusPlugin).whenPluginAdded { + modifyPom { + project { + url 'https://github.com/elastic/elasticsearch' + inceptionYear '2009' - plugins.withType(MavenPublishPlugin).whenPluginAdded { - publishing { - publications { - // add license information to generated poms - all { - pom.withXml { XmlProvider xml -> - Node node = xml.asNode() - node.appendNode('inceptionYear', '2009') + scm { + url 'https://github.com/elastic/elasticsearch' + connection 'scm:https://elastic@github.com/elastic/elasticsearch' + developerConnection 'scm:git://github.com/elastic/elasticsearch.git' + } - Node license = node.appendNode('licenses').appendNode('license') - license.appendNode('name', 'The Apache Software License, Version 2.0') - license.appendNode('url', 'http://www.apache.org/licenses/LICENSE-2.0.txt') - license.appendNode('distribution', 'repo') + licenses { + license { + name 'The Apache Software License, Version 2.0' + url 'http://www.apache.org/licenses/LICENSE-2.0.txt' + distribution 'repo' } } } - repositories.maven { - name 'sonatype' - if (version.endsWith('-SNAPSHOT')) { - url 'https://oss.sonatype.org/content/repositories/snapshots/' - } else { - url 'https://oss.sonatype.org/service/local/staging/deploy/maven2/' + } + extraArchive { + javadoc = true + tests = false + } + // we have our own username/password prompts so that they only happen once + // TODO: add gpg signing prompts + project.gradle.taskGraph.whenReady { taskGraph -> + if (taskGraph.allTasks.any { it.name == 'uploadArchives' }) { + Console console = System.console() + if (project.hasProperty('nexusUsername') == false) { + String nexusUsername = console.readLine('\nNexus username: ') + project.rootProject.allprojects.each { + it.ext.nexusUsername = nexusUsername + } } - - // It would be nice to pass a custom impl of PasswordCredentials - // that could lazily read username/password from the console if not - // present as properties. However, gradle's credential handling is - // completely broken for custom impls. It checks that the class - // passed in is exactly PasswordCredentials or AwsCredentials. - // So instead, we must rely on heuristics of "are we publishing" - // by inspecting the command line, stash the credentials - // once read in the root project, and set them on each project - if (gradle.startParameter.taskNames.contains('publish')) { - Console console = System.console() - if (project.rootProject.hasProperty('nexusUsername') == false) { - project.rootProject.ext.nexusUsername = console.readLine('\nNexus username: ') - } - if (project.rootProject.hasProperty('nexusPassword') == false) { - project.rootProject.ext.nexusPassword = new String(console.readPassword("\nNexus password: ")) - } - - credentials { - username = project.rootProject.nexusUsername - password = project.rootProject.nexusPassword + if (project.hasProperty('nexusPassword') == false) { + String nexusPassword = new String(console.readPassword('\nNexus password: ')) + project.rootProject.allprojects.each { + it.ext.nexusPassword = nexusPassword } } } @@ -80,7 +72,6 @@ subprojects { } } - allprojects { // injecting groovy property variables into all projects project.ext { diff --git a/buildSrc/.gitignore b/buildSrc/.gitignore deleted file mode 100644 index bfdaf60b97ed..000000000000 --- a/buildSrc/.gitignore +++ /dev/null @@ -1 +0,0 @@ -build-bootstrap/ diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 30053db683e5..f8d806795049 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -1,3 +1,5 @@ +import java.nio.file.Files + /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -17,21 +19,25 @@ * under the License. */ -import java.nio.file.Files - +// we must use buildscript + apply so that an external plugin +// can apply this file, since the plugins directive is not +// supported through file includes +buildscript { + repositories { + jcenter() + } + dependencies { + classpath 'com.bmuschko:gradle-nexus-plugin:2.3.1' + } +} apply plugin: 'groovy' +apply plugin: 'com.bmuschko.nexus' +// TODO: move common IDE configuration to a common file to include +apply plugin: 'idea' +apply plugin: 'eclipse' group = 'org.elasticsearch.gradle' - -if (project == rootProject) { - // change the build dir used during build init, so that doing a clean - // won't wipe out the buildscript jar - buildDir = 'build-bootstrap' -} - -/***************************************************************************** - * Propagating version.properties to the rest of the build * - *****************************************************************************/ +archivesBaseName = 'build-tools' Properties props = new Properties() props.load(project.file('version.properties').newDataInputStream()) @@ -45,6 +51,32 @@ if (snapshot) { props.put("elasticsearch", version); } + +repositories { + mavenCentral() + maven { + name 'sonatype-snapshots' + url "https://oss.sonatype.org/content/repositories/snapshots/" + } + jcenter() +} + +dependencies { + compile gradleApi() + compile localGroovy() + compile "com.carrotsearch.randomizedtesting:junit4-ant:${props.getProperty('randomizedrunner')}" + compile("junit:junit:${props.getProperty('junit')}") { + transitive = false + } + compile 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3' + compile 'com.netflix.nebula:gradle-info-plugin:3.0.3' + compile 'org.eclipse.jgit:org.eclipse.jgit:3.2.0.201312181205-r' + compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE.... + compile 'de.thetaphi:forbiddenapis:2.0' + compile 'com.bmuschko:gradle-nexus-plugin:2.3.1' + compile 'org.apache.rat:apache-rat:0.11' +} + File tempPropertiesFile = new File(project.buildDir, "version.properties") task writeVersionProperties { inputs.properties(props) @@ -63,92 +95,31 @@ processResources { from tempPropertiesFile } -/***************************************************************************** - * Dependencies used by the entire build * - *****************************************************************************/ - -repositories { - jcenter() +extraArchive { + javadoc = false + tests = false } -dependencies { - compile gradleApi() - compile localGroovy() - compile "com.carrotsearch.randomizedtesting:junit4-ant:${props.getProperty('randomizedrunner')}" - compile("junit:junit:${props.getProperty('junit')}") { - transitive = false - } - compile 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3' - compile 'com.netflix.nebula:nebula-publishing-plugin:4.4.4' - compile 'com.netflix.nebula:gradle-info-plugin:3.0.3' - compile 'org.eclipse.jgit:org.eclipse.jgit:3.2.0.201312181205-r' - compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE.... - compile 'de.thetaphi:forbiddenapis:2.0' - compile 'com.bmuschko:gradle-nexus-plugin:2.3.1' - compile 'org.apache.rat:apache-rat:0.11' -} - - -/***************************************************************************** - * Bootstrap repositories and IDE setup * - *****************************************************************************/ -// this will only happen when buildSrc is built on its own during build init -if (project == rootProject) { - - repositories { - mavenCentral() - maven { - name 'sonatype-snapshots' - url "https://oss.sonatype.org/content/repositories/snapshots/" - } - } - - apply plugin: 'idea' - apply plugin: 'eclipse' - - idea { - module { - inheritOutputDirs = false - outputDir = file('build-idea/classes/main') - testOutputDir = file('build-idea/classes/test') - } - } - - eclipse { - classpath { - defaultOutputDir = file('build-eclipse') - } - } - - task copyEclipseSettings(type: Copy) { - from project.file('src/main/resources/eclipse.settings') - into '.settings' - } - // otherwise .settings is not nuked entirely - tasks.cleanEclipse { - delete '.settings' - } - tasks.eclipse.dependsOn(cleanEclipse, copyEclipseSettings) -} - -/***************************************************************************** - * Normal project checks * - *****************************************************************************/ - -// this happens when included as a normal project in the build, which we do -// to enforce precommit checks like forbidden apis, as well as setup publishing -if (project != rootProject) { - apply plugin: 'nebula.maven-base-publish' - apply plugin: 'nebula.maven-scm' - apply plugin: 'nebula.source-jar' - apply plugin: 'nebula.javadoc-jar' - - publishing { - publications { - nebula { - artifactId 'build-tools' - } - } +idea { + module { + inheritOutputDirs = false + outputDir = file('build-idea/classes/main') + testOutputDir = file('build-idea/classes/test') } } +eclipse { + classpath { + defaultOutputDir = file('build-eclipse') + } +} + +task copyEclipseSettings(type: Copy) { + from project.file('src/main/resources/eclipse.settings') + into '.settings' +} +// otherwise .settings is not nuked entirely +tasks.cleanEclipse { + delete '.settings' +} +tasks.eclipse.dependsOn(cleanEclipse, copyEclipseSettings) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index a1b6e1972c2f..598be546f26e 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -32,8 +32,7 @@ import org.gradle.api.artifacts.ModuleVersionIdentifier import org.gradle.api.artifacts.ProjectDependency import org.gradle.api.artifacts.ResolvedArtifact import org.gradle.api.artifacts.dsl.RepositoryHandler -import org.gradle.api.publish.maven.MavenPublication -import org.gradle.api.publish.maven.plugins.MavenPublishPlugin +import org.gradle.api.artifacts.maven.MavenPom import org.gradle.api.tasks.bundling.Jar import org.gradle.api.tasks.compile.JavaCompile import org.gradle.internal.jvm.Jvm @@ -61,6 +60,7 @@ class BuildPlugin implements Plugin { project.pluginManager.apply('nebula.info-java') project.pluginManager.apply('nebula.info-scm') project.pluginManager.apply('nebula.info-jar') + project.pluginManager.apply('com.bmuschko.nexus') project.pluginManager.apply(ProvidedBasePlugin) globalBuildInfo(project) @@ -68,7 +68,6 @@ class BuildPlugin implements Plugin { configureConfigurations(project) project.ext.versions = VersionProperties.versions configureCompile(project) - configurePublishing(project) configureTest(project) configurePrecommit(project) @@ -261,6 +260,48 @@ class BuildPlugin implements Plugin { project.configurations.compile.dependencies.all(disableTransitiveDeps) project.configurations.testCompile.dependencies.all(disableTransitiveDeps) project.configurations.provided.dependencies.all(disableTransitiveDeps) + + // add exclusions to the pom directly, for each of the transitive deps of this project's deps + project.modifyPom { MavenPom pom -> + pom.withXml { XmlProvider xml -> + // first find if we have dependencies at all, and grab the node + NodeList depsNodes = xml.asNode().get('dependencies') + if (depsNodes.isEmpty()) { + return + } + + // check each dependency for any transitive deps + for (Node depNode : depsNodes.get(0).children()) { + String groupId = depNode.get('groupId').get(0).text() + String artifactId = depNode.get('artifactId').get(0).text() + String version = depNode.get('version').get(0).text() + + // collect the transitive deps now that we know what this dependency is + String depConfig = transitiveDepConfigName(groupId, artifactId, version) + Configuration configuration = project.configurations.findByName(depConfig) + if (configuration == null) { + continue // we did not make this dep non-transitive + } + Set artifacts = configuration.resolvedConfiguration.resolvedArtifacts + if (artifacts.size() <= 1) { + // this dep has no transitive deps (or the only artifact is itself) + continue + } + + // we now know we have something to exclude, so add the exclusion elements + Node exclusions = depNode.appendNode('exclusions') + for (ResolvedArtifact transitiveArtifact : artifacts) { + ModuleVersionIdentifier transitiveDep = transitiveArtifact.moduleVersion.id + if (transitiveDep.group == groupId && transitiveDep.name == artifactId) { + continue; // don't exclude the dependency itself! + } + Node exclusion = exclusions.appendNode('exclusion') + exclusion.appendNode('groupId', transitiveDep.group) + exclusion.appendNode('artifactId', transitiveDep.name) + } + } + } + } } /** Adds repositores used by ES dependencies */ @@ -340,59 +381,6 @@ class BuildPlugin implements Plugin { } } - /** - * Adds a hook to all publications that will effectively make the maven pom transitive dependency free. - */ - private static void configurePublishing(Project project) { - project.plugins.withType(MavenPublishPlugin.class).whenPluginAdded { - project.publishing { - publications { - all { MavenPublication publication -> // we only deal with maven - // add exclusions to the pom directly, for each of the transitive deps of this project's deps - publication.pom.withXml { XmlProvider xml -> - // first find if we have dependencies at all, and grab the node - NodeList depsNodes = xml.asNode().get('dependencies') - if (depsNodes.isEmpty()) { - return - } - - // check each dependency for any transitive deps - for (Node depNode : depsNodes.get(0).children()) { - String groupId = depNode.get('groupId').get(0).text() - String artifactId = depNode.get('artifactId').get(0).text() - String version = depNode.get('version').get(0).text() - - // collect the transitive deps now that we know what this dependency is - String depConfig = transitiveDepConfigName(groupId, artifactId, version) - Configuration configuration = project.configurations.findByName(depConfig) - if (configuration == null) { - continue // we did not make this dep non-transitive - } - Set artifacts = configuration.resolvedConfiguration.resolvedArtifacts - if (artifacts.size() <= 1) { - // this dep has no transitive deps (or the only artifact is itself) - continue - } - - // we now know we have something to exclude, so add the exclusion elements - Node exclusions = depNode.appendNode('exclusions') - for (ResolvedArtifact transitiveArtifact : artifacts) { - ModuleVersionIdentifier transitiveDep = transitiveArtifact.moduleVersion.id - if (transitiveDep.group == groupId && transitiveDep.name == artifactId) { - continue; // don't exclude the dependency itself! - } - Node exclusion = exclusions.appendNode('exclusion') - exclusion.appendNode('groupId', transitiveDep.group) - exclusion.appendNode('artifactId', transitiveDep.name) - } - } - } - } - } - } - } - } - /** Returns a closure of common configuration shared by unit and integration tests. */ static Closure commonTestConfig(Project project) { return { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index f1123678eae3..b04f959e0681 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -18,14 +18,11 @@ */ package org.elasticsearch.gradle.plugin -import nebula.plugin.publishing.maven.MavenManifestPlugin -import nebula.plugin.publishing.maven.MavenScmPlugin import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.test.RunTask import org.gradle.api.Project import org.gradle.api.artifacts.Dependency -import org.gradle.api.publish.maven.MavenPublication import org.gradle.api.tasks.SourceSet import org.gradle.api.tasks.bundling.Zip @@ -37,7 +34,6 @@ public class PluginBuildPlugin extends BuildPlugin { @Override public void apply(Project project) { super.apply(project) - configureDependencies(project) // this afterEvaluate must happen before the afterEvaluate added by integTest creation, // so that the file name resolution for installing the plugin will be setup @@ -54,10 +50,6 @@ public class PluginBuildPlugin extends BuildPlugin { } else { project.integTest.clusterConfig.plugin(name, project.bundlePlugin.outputs.files) project.tasks.run.clusterConfig.plugin(name, project.bundlePlugin.outputs.files) - - if (project.pluginProperties.extension.publish) { - configurePublishing(project) - } } project.namingConventions { @@ -67,7 +59,6 @@ public class PluginBuildPlugin extends BuildPlugin { } createIntegTestTask(project) createBundleTask(project) - configurePublishing(project) project.tasks.create('run', RunTask) // allow running ES with this plugin in the foreground of a build } @@ -134,32 +125,4 @@ public class PluginBuildPlugin extends BuildPlugin { project.configurations.getByName('default').extendsFrom = [] project.artifacts.add('default', bundle) } - - /** - * Adds the plugin jar and zip as publications. - */ - private static void configurePublishing(Project project) { - project.plugins.apply(MavenScmPlugin.class) - project.plugins.apply(MavenManifestPlugin.class) - - project.publishing { - publications { - nebula { - artifact project.bundlePlugin - pom.withXml { - // overwrite the name/description in the pom nebula set up - Node root = asNode() - for (Node node : root.children()) { - if (node.name() == 'name') { - node.setValue(name) - } else if (node.name() == 'description') { - node.setValue(project.pluginProperties.extension.description) - } - } - } - } - } - } - - } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy index 9429b3a4d00b..7b949b3e1da1 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy @@ -42,10 +42,6 @@ class PluginPropertiesExtension { @Input boolean isolated = true - /** Whether the plugin should be published to maven. */ - @Input - boolean publish = false - PluginPropertiesExtension(Project project) { name = project.name version = project.version diff --git a/core/build.gradle b/core/build.gradle index 3f66db767b59..ab3754e72ff6 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -22,20 +22,10 @@ import com.carrotsearch.gradle.junit4.RandomizedTestingTask import org.elasticsearch.gradle.BuildPlugin apply plugin: 'elasticsearch.build' +apply plugin: 'com.bmuschko.nexus' apply plugin: 'nebula.optional-base' -apply plugin: 'nebula.maven-base-publish' -apply plugin: 'nebula.maven-scm' -apply plugin: 'nebula.source-jar' -apply plugin: 'nebula.javadoc-jar' archivesBaseName = 'elasticsearch' -publishing { - publications { - nebula { - artifactId 'elasticsearch' - } - } -} dependencies { diff --git a/distribution/build.gradle b/distribution/build.gradle index eb12100fa625..6936f898d957 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -157,19 +157,6 @@ subprojects { MavenFilteringHack.filter(it, expansions) } } - - /***************************************************************************** - * Publishing setup * - *****************************************************************************/ - apply plugin: 'nebula.maven-base-publish' - apply plugin: 'nebula.maven-scm' - publishing { - publications { - nebula { - artifactId 'elasticsearch' - } - } - } } /***************************************************************************** diff --git a/distribution/deb/build.gradle b/distribution/deb/build.gradle index 073039c50174..d9bd8447ab90 100644 --- a/distribution/deb/build.gradle +++ b/distribution/deb/build.gradle @@ -36,14 +36,7 @@ task buildDeb(type: Deb) { artifacts { 'default' buildDeb -} - -publishing { - publications { - nebula { - artifact buildDeb - } - } + archives buildDeb } integTest { diff --git a/distribution/integ-test-zip/build.gradle b/distribution/integ-test-zip/build.gradle index 8507be0e18e9..23191ff03a4b 100644 --- a/distribution/integ-test-zip/build.gradle +++ b/distribution/integ-test-zip/build.gradle @@ -24,14 +24,7 @@ task buildZip(type: Zip) { artifacts { 'default' buildZip -} - -publishing { - publications { - nebula { - artifact buildZip - } - } + archives buildZip } integTest.dependsOn buildZip diff --git a/distribution/rpm/build.gradle b/distribution/rpm/build.gradle index 599c1d95ecef..2ab78fe7e41b 100644 --- a/distribution/rpm/build.gradle +++ b/distribution/rpm/build.gradle @@ -33,14 +33,7 @@ task buildRpm(type: Rpm) { artifacts { 'default' buildRpm -} - -publishing { - publications { - nebula { - artifact buildRpm - } - } + archives buildRpm } integTest { diff --git a/distribution/tar/build.gradle b/distribution/tar/build.gradle index 8b3823b35137..7230ab50799e 100644 --- a/distribution/tar/build.gradle +++ b/distribution/tar/build.gradle @@ -26,12 +26,5 @@ task buildTar(type: Tar) { artifacts { 'default' buildTar -} - -publishing { - publications { - nebula { - artifact buildTar - } - } + archives buildTar } diff --git a/distribution/zip/build.gradle b/distribution/zip/build.gradle index 8507be0e18e9..23191ff03a4b 100644 --- a/distribution/zip/build.gradle +++ b/distribution/zip/build.gradle @@ -24,14 +24,7 @@ task buildZip(type: Zip) { artifacts { 'default' buildZip -} - -publishing { - publications { - nebula { - artifact buildZip - } - } + archives buildZip } integTest.dependsOn buildZip diff --git a/modules/build.gradle b/modules/build.gradle index 4b88dfd703f6..3cafe7d903f7 100644 --- a/modules/build.gradle +++ b/modules/build.gradle @@ -40,4 +40,8 @@ subprojects { throw new InvalidModelException("Modules cannot disable isolation") } } + + // these are implementation details of our build, no need to publish them! + install.enabled = false + uploadArchives.enabled = false } diff --git a/plugins/build.gradle b/plugins/build.gradle index 4c33260eba59..e49b08c6015c 100644 --- a/plugins/build.gradle +++ b/plugins/build.gradle @@ -27,7 +27,5 @@ configure(subprojects.findAll { it.parent.path == project.path }) { esplugin { // for local ES plugins, the name of the plugin is the same as the directory name project.name - // only publish non examples - publish project.name.contains('example') == false } } diff --git a/settings.gradle b/settings.gradle index e0b9df865d64..d03cac653eee 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1,7 +1,6 @@ rootProject.name = 'elasticsearch' List projects = [ - 'build-tools', 'rest-api-spec', 'core', 'distribution:integ-test-zip', @@ -60,8 +59,6 @@ if (isEclipse) { include projects.toArray(new String[0]) -project(':build-tools').projectDir = new File(rootProject.projectDir, 'buildSrc') - if (isEclipse) { project(":core").projectDir = new File(rootProject.projectDir, 'core/src/main') project(":core").buildFileName = 'eclipse-build.gradle' diff --git a/test/build.gradle b/test/build.gradle index d80125338f78..7feb332b717f 100644 --- a/test/build.gradle +++ b/test/build.gradle @@ -40,9 +40,4 @@ subprojects { // TODO: why is the test framework pulled in... forbiddenApisMain.enabled = false jarHell.enabled = false - - apply plugin: 'nebula.maven-base-publish' - apply plugin: 'nebula.maven-scm' - apply plugin: 'nebula.source-jar' - apply plugin: 'nebula.javadoc-jar' } From ee95c0a3843434b6fa43c8e448a192118368e6f4 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Sat, 19 Mar 2016 11:35:05 +0100 Subject: [PATCH 312/320] Don't use PosixPermission on Windows, when creating temp plugin folders --- .../plugins/InstallPluginCommand.java | 100 ++++++++++-------- 1 file changed, 53 insertions(+), 47 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index b83ca5c5fc62..7a3b254c42db 100644 --- a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -21,6 +21,7 @@ package org.elasticsearch.plugins; import joptsimple.OptionSet; import joptsimple.OptionSpec; +import org.apache.lucene.util.Constants; import org.apache.lucene.util.IOUtils; import org.elasticsearch.Build; import org.elasticsearch.Version; @@ -68,9 +69,9 @@ import static org.elasticsearch.common.util.set.Sets.newHashSet; * * The install command takes a plugin id, which may be any of the following: *
          - *
        • An official elasticsearch plugin name
        • - *
        • Maven coordinates to a plugin zip
        • - *
        • A URL to a plugin zip
        • + *
        • An official elasticsearch plugin name
        • + *
        • Maven coordinates to a plugin zip
        • + *
        • A URL to a plugin zip
        • *
        * * Plugins are packaged as zip files. Each packaged plugin must contain a @@ -79,9 +80,9 @@ import static org.elasticsearch.common.util.set.Sets.newHashSet; * The installation process first extracts the plugin files into a temporary * directory in order to verify the plugin satisfies the following requirements: *
          - *
        • Jar hell does not exist, either between the plugin's own jars, or with elasticsearch
        • - *
        • The plugin is not a module already provided with elasticsearch
        • - *
        • If the plugin contains extra security permissions, the policy file is validated
        • + *
        • Jar hell does not exist, either between the plugin's own jars, or with elasticsearch
        • + *
        • The plugin is not a module already provided with elasticsearch
        • + *
        • If the plugin contains extra security permissions, the policy file is validated
        • *
        *

        * A plugin may also contain an optional {@code bin} directory which contains scripts. The @@ -99,34 +100,34 @@ class InstallPluginCommand extends Command { // TODO: make this a resource file generated by gradle static final Set MODULES = unmodifiableSet(newHashSet( - "ingest-grok", - "lang-expression", - "lang-groovy", - "lang-painless", - "reindex")); + "ingest-grok", + "lang-expression", + "lang-groovy", + "lang-painless", + "reindex")); // TODO: make this a resource file generated by gradle static final Set OFFICIAL_PLUGINS = unmodifiableSet(new LinkedHashSet<>(Arrays.asList( - "analysis-icu", - "analysis-kuromoji", - "analysis-phonetic", - "analysis-smartcn", - "analysis-stempel", - "delete-by-query", - "discovery-azure", - "discovery-ec2", - "discovery-gce", - "ingest-attachment", - "ingest-geoip", - "lang-javascript", - "lang-python", - "mapper-attachments", - "mapper-murmur3", - "mapper-size", - "repository-azure", - "repository-hdfs", - "repository-s3", - "store-smb"))); + "analysis-icu", + "analysis-kuromoji", + "analysis-phonetic", + "analysis-smartcn", + "analysis-stempel", + "delete-by-query", + "discovery-azure", + "discovery-ec2", + "discovery-gce", + "ingest-attachment", + "ingest-geoip", + "lang-javascript", + "lang-python", + "mapper-attachments", + "mapper-murmur3", + "mapper-size", + "repository-azure", + "repository-hdfs", + "repository-s3", + "store-smb"))); private final Environment env; private final OptionSpec batchOption; @@ -136,7 +137,7 @@ class InstallPluginCommand extends Command { super("Install a plugin"); this.env = env; this.batchOption = parser.acceptsAll(Arrays.asList("b", "batch"), - "Enable batch mode explicitly, automatic confirmation of security permission"); + "Enable batch mode explicitly, automatic confirmation of security permission"); this.arguments = parser.nonOptions("plugin id"); } @@ -182,10 +183,10 @@ class InstallPluginCommand extends Command { final String url; if (System.getProperty(PROPERTY_SUPPORT_STAGING_URLS, "false").equals("true")) { url = String.format(Locale.ROOT, "https://download.elastic.co/elasticsearch/staging/%1$s-%2$s/org/elasticsearch/plugin/%3$s/%1$s/%3$s-%1$s.zip", - version, Build.CURRENT.shortHash(), pluginId); + version, Build.CURRENT.shortHash(), pluginId); } else { url = String.format(Locale.ROOT, "https://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/%1$s/%2$s/%1$s-%2$s.zip", - pluginId, version); + pluginId, version); } terminal.println("-> Downloading " + pluginId + " from elastic"); return downloadZipAndChecksum(url, tmpDir); @@ -195,7 +196,7 @@ class InstallPluginCommand extends Command { String[] coordinates = pluginId.split(":"); if (coordinates.length == 3 && pluginId.contains("/") == false) { String mavenUrl = String.format(Locale.ROOT, "https://repo1.maven.org/maven2/%1$s/%2$s/%3$s/%2$s-%3$s.zip", - coordinates[0].replace(".", "/") /* groupId */, coordinates[1] /* artifactId */, coordinates[2] /* version */); + coordinates[0].replace(".", "/") /* groupId */, coordinates[1] /* artifactId */, coordinates[2] /* version */); terminal.println("-> Downloading " + pluginId + " from maven central"); return downloadZipAndChecksum(mavenUrl, tmpDir); } @@ -241,15 +242,20 @@ class InstallPluginCommand extends Command { private Path unzip(Path zip, Path pluginsDir) throws IOException, UserError { // unzip plugin to a staging temp dir - Set perms = new HashSet<>(); - perms.add(PosixFilePermission.OWNER_EXECUTE); - perms.add(PosixFilePermission.OWNER_READ); - perms.add(PosixFilePermission.OWNER_WRITE); - perms.add(PosixFilePermission.GROUP_READ); - perms.add(PosixFilePermission.GROUP_EXECUTE); - perms.add(PosixFilePermission.OTHERS_READ); - perms.add(PosixFilePermission.OTHERS_EXECUTE); - Path target = Files.createTempDirectory(pluginsDir, ".installing-", PosixFilePermissions.asFileAttribute(perms)); + final Path target; + if (Constants.WINDOWS) { + target = Files.createTempDirectory(pluginsDir, ".installing-"); + } else { + Set perms = new HashSet<>(); + perms.add(PosixFilePermission.OWNER_EXECUTE); + perms.add(PosixFilePermission.OWNER_READ); + perms.add(PosixFilePermission.OWNER_WRITE); + perms.add(PosixFilePermission.GROUP_READ); + perms.add(PosixFilePermission.GROUP_EXECUTE); + perms.add(PosixFilePermission.OTHERS_READ); + perms.add(PosixFilePermission.OTHERS_EXECUTE); + target = Files.createTempDirectory(pluginsDir, ".installing-", PosixFilePermissions.asFileAttribute(perms)); + } Files.createDirectories(target); boolean hasEsDir = false; @@ -279,7 +285,7 @@ class InstallPluginCommand extends Command { if (entry.isDirectory() == false) { try (OutputStream out = Files.newOutputStream(targetFile)) { int len; - while((len = zipInput.read(buffer)) >= 0) { + while ((len = zipInput.read(buffer)) >= 0) { out.write(buffer, 0, len); } } @@ -408,7 +414,7 @@ class InstallPluginCommand extends Command { perms.add(PosixFilePermission.OTHERS_EXECUTE); } - try (DirectoryStream stream = Files.newDirectoryStream(tmpBinDir)) { + try (DirectoryStream stream = Files.newDirectoryStream(tmpBinDir)) { for (Path srcFile : stream) { if (Files.isDirectory(srcFile)) { throw new UserError(ExitCodes.DATA_ERROR, "Directories not allowed in bin dir for plugin " + info.getName() + ", found " + srcFile.getFileName()); @@ -442,7 +448,7 @@ class InstallPluginCommand extends Command { Files.getFileAttributeView(destConfigDir.getParent(), PosixFileAttributeView.class).readAttributes(); setOwnerGroup(destConfigDir, destConfigDirAttributes); - try (DirectoryStream stream = Files.newDirectoryStream(tmpConfigDir)) { + try (DirectoryStream stream = Files.newDirectoryStream(tmpConfigDir)) { for (Path srcFile : stream) { if (Files.isDirectory(srcFile)) { throw new UserError(ExitCodes.DATA_ERROR, "Directories not allowed in config dir for plugin " + info.getName()); From ef4293a993014379c54e2399e692c6387d80f0ca Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Sat, 19 Mar 2016 12:02:42 +0100 Subject: [PATCH 313/320] Disable more usage of PosixPermission on Windows in InstallPluginCommand Releates to #17201 --- .../plugins/InstallPluginCommand.java | 34 ++++++++++++------- 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index 7a3b254c42db..18e996f6f37d 100644 --- a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -403,15 +403,17 @@ class InstallPluginCommand extends Command { } Files.createDirectory(destBinDir); - // setup file attributes for the installed files to those of the parent dir Set perms = new HashSet<>(); - PosixFileAttributeView binAttrs = Files.getFileAttributeView(destBinDir.getParent(), PosixFileAttributeView.class); - if (binAttrs != null) { - perms = new HashSet<>(binAttrs.readAttributes().permissions()); - // setting execute bits, since this just means "the file is executable", and actual execution requires read - perms.add(PosixFilePermission.OWNER_EXECUTE); - perms.add(PosixFilePermission.GROUP_EXECUTE); - perms.add(PosixFilePermission.OTHERS_EXECUTE); + if (Constants.WINDOWS == false) { + // setup file attributes for the installed files to those of the parent dir + PosixFileAttributeView binAttrs = Files.getFileAttributeView(destBinDir.getParent(), PosixFileAttributeView.class); + if (binAttrs != null) { + perms = new HashSet<>(binAttrs.readAttributes().permissions()); + // setting execute bits, since this just means "the file is executable", and actual execution requires read + perms.add(PosixFilePermission.OWNER_EXECUTE); + perms.add(PosixFilePermission.GROUP_EXECUTE); + perms.add(PosixFilePermission.OTHERS_EXECUTE); + } } try (DirectoryStream stream = Files.newDirectoryStream(tmpBinDir)) { @@ -444,9 +446,15 @@ class InstallPluginCommand extends Command { // create the plugin's config dir "if necessary" Files.createDirectories(destConfigDir); - final PosixFileAttributes destConfigDirAttributes = - Files.getFileAttributeView(destConfigDir.getParent(), PosixFileAttributeView.class).readAttributes(); - setOwnerGroup(destConfigDir, destConfigDirAttributes); + final PosixFileAttributes destConfigDirAttributes; + if (Constants.WINDOWS) { + destConfigDirAttributes = null; + } else { + destConfigDirAttributes = + Files.getFileAttributeView(destConfigDir.getParent(), PosixFileAttributeView.class).readAttributes(); + setOwnerGroup(destConfigDir, destConfigDirAttributes); + + } try (DirectoryStream stream = Files.newDirectoryStream(tmpConfigDir)) { for (Path srcFile : stream) { @@ -457,7 +465,9 @@ class InstallPluginCommand extends Command { Path destFile = destConfigDir.resolve(tmpConfigDir.relativize(srcFile)); if (Files.exists(destFile) == false) { Files.copy(srcFile, destFile); - setOwnerGroup(destFile, destConfigDirAttributes); + if (Constants.WINDOWS == false) { + setOwnerGroup(destFile, destConfigDirAttributes); + } } } } From 032678f0c38766b47351e12502a497ebfa5cb54c Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Sun, 20 Mar 2016 21:22:56 +0100 Subject: [PATCH 314/320] Remove unused refreshScheduledFuture in IndexShard It has been replaced by the logic in IndexService --- .../java/org/elasticsearch/index/shard/IndexShard.java | 8 -------- 1 file changed, 8 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 5e2df0300012..59b3f367c65a 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -48,9 +48,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.Callback; import org.elasticsearch.common.util.concurrent.AbstractRunnable; -import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.util.concurrent.SuspendableRefContainer; -import org.elasticsearch.gateway.MetaDataStateFormat; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.NodeServicesProvider; @@ -122,7 +120,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; @@ -159,7 +156,6 @@ public class IndexShard extends AbstractIndexShardComponent { * being indexed/deleted. */ private final AtomicLong writingBytes = new AtomicLong(); - private volatile ScheduledFuture refreshScheduledFuture; protected volatile ShardRouting shardRouting; protected volatile IndexShardState state; protected final AtomicReference currentEngineReference = new AtomicReference<>(); @@ -784,10 +780,6 @@ public class IndexShard extends AbstractIndexShardComponent { public void close(String reason, boolean flushEngine) throws IOException { synchronized (mutex) { try { - if (state != IndexShardState.CLOSED) { - FutureUtils.cancel(refreshScheduledFuture); - refreshScheduledFuture = null; - } changeState(IndexShardState.CLOSED, reason); } finally { final Engine engine = this.currentEngineReference.getAndSet(null); From a7793f7271baf3707f249eb9747dac71170ba800 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 18 Mar 2016 13:33:58 -0400 Subject: [PATCH 315/320] Cutover to elastic Vagrant boxes This commit cuts the Vagrant tests over to the elastic Vagrant boxes. --- Vagrantfile | 39 +++++++++++++++------------------------ qa/vagrant/build.gradle | 13 +++++++++++-- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/Vagrantfile b/Vagrantfile index 454d114f1a25..4f8ee7164f65 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -23,15 +23,15 @@ Vagrant.configure(2) do |config| config.vm.define "ubuntu-1204" do |config| - config.vm.box = "ubuntu/precise64" + config.vm.box = "elastic/ubuntu-12.04-x86_64" ubuntu_common config end config.vm.define "ubuntu-1404" do |config| - config.vm.box = "ubuntu/trusty64" + config.vm.box = "elastic/ubuntu-14.04-x86_64" ubuntu_common config end config.vm.define "ubuntu-1504" do |config| - config.vm.box = "ubuntu/vivid64" + config.vm.box = "elastic/ubuntu-15.04-x86_64" ubuntu_common config, extra: <<-SHELL # Install Jayatana so we can work around it being present. [ -f /usr/share/java/jayatanaag.jar ] || install jayatana @@ -41,44 +41,35 @@ Vagrant.configure(2) do |config| # get the sun jdk on there just aren't worth it. We have jessie for testing # debian and it works fine. config.vm.define "debian-8" do |config| - config.vm.box = "debian/jessie64" - deb_common config, - 'echo deb http://http.debian.net/debian jessie-backports main > /etc/apt/sources.list.d/backports.list', 'backports' + config.vm.box = "elastic/debian-8-x86_64" + deb_common config, 'echo deb http://http.debian.net/debian jessie-backports main > /etc/apt/sources.list.d/backports.list', 'backports' end config.vm.define "centos-6" do |config| - config.vm.box = "boxcutter/centos67" + config.vm.box = "elastic/centos-6-x86_64" rpm_common config end config.vm.define "centos-7" do |config| - # There is a centos/7 box but it doesn't have rsync or virtualbox guest - # stuff on there so its slow to use. So chef it is.... - config.vm.box = "boxcutter/centos71" + config.vm.box = "elastic/centos-7-x86_64" + rpm_common config + end + config.vm.define "oel-6" do |config| + config.vm.box = "elastic/oraclelinux-6-x86_64" rpm_common config end - # This box hangs _forever_ on ```yum check-update```. I have no idea why. - # config.vm.define "oel-6", autostart: false do |config| - # config.vm.box = "boxcutter/oel66" - # rpm_common(config) - # end config.vm.define "oel-7" do |config| - config.vm.box = "boxcutter/oel70" + config.vm.box = "elastic/oraclelinux-7-x86_64" rpm_common config end config.vm.define "fedora-22" do |config| - # Fedora hosts their own 'cloud' images that aren't in Vagrant's Atlas but - # and are missing required stuff like rsync. It'd be nice if we could use - # them but they much slower to get up and running then the boxcutter image. - config.vm.box = "boxcutter/fedora22" + config.vm.box = "elastic/fedora-22-x86_64" dnf_common config end config.vm.define "opensuse-13" do |config| - config.vm.box = "chef/opensuse-13" - config.vm.box_url = "http://opscode-vm-bento.s3.amazonaws.com/vagrant/virtualbox/opscode_opensuse-13.2-x86_64_chef-provisionerless.box" + config.vm.box = "elastic/opensuse-13-x86_64" opensuse_common config end - # The SLES boxes are not considered to be highest quality, but seem to be sufficient for a test run config.vm.define "sles-12" do |config| - config.vm.box = "idar/sles12" + config.vm.box = "elastic/sles-12-x86_64" sles_common config end # Switch the default share for the project root from /vagrant to diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index d0be6f139465..1df5b7dc402a 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -26,8 +26,17 @@ String testScripts = '*.bats' String testCommand = "cd \$TESTROOT && sudo bats --tap \$BATS/$testScripts" String smokeTestCommand = 'echo I work' List representativeBoxes = ['ubuntu-1404', 'centos-7'] -List boxes = representativeBoxes + ['ubuntu-1204', 'ubuntu-1504', - 'debian-8', 'centos-6', 'oel-7', 'fedora-22', 'opensuse-13', 'sles-12'] +List boxes = representativeBoxes + [ + 'ubuntu-1204', + 'ubuntu-1504', + 'debian-8', + 'centos-6', + 'oel-6', + 'oel-7', + 'fedora-22', + 'opensuse-13', + 'sles-12' +] /* The version of elasticsearch that we upgrade *from* as part of testing * upgrades. */ From e3b7e5d75a876cd21846248b64532f49bce8203e Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 15 Mar 2016 15:07:53 +0100 Subject: [PATCH 316/320] percolator: Replace percolate api with the new percolator query Also replaced the PercolatorQueryRegistry with the new PercolatorQueryCache. The PercolatorFieldMapper stores the rewritten form of each percolator query's xcontext in a binary doc values field. This make sure that the query rewrite happens only during indexing (some queries for example fetch shapes, terms in remote indices) and the speed up the loading of the queries in the percolator query cache. Because the percolator now works inside the search infrastructure a number of features (sorting fields, pagination, fetch features) are available out of the box. The following feature requests are automatically implemented via this refactoring: Closes #10741 Closes #7297 Closes #13176 Closes #13978 Closes #11264 Closes #10741 Closes #4317 --- .../resources/checkstyle_suppressions.xml | 8 - .../index/memory/ExtendedMemoryIndex.java | 31 - .../elasticsearch/ElasticsearchException.java | 2 - .../elasticsearch/action/ActionModule.java | 3 +- .../cluster/stats/ClusterStatsIndices.java | 18 +- .../stats/TransportClusterStatsAction.java | 4 +- .../admin/indices/stats/CommonStats.java | 45 +- .../admin/indices/stats/CommonStatsFlags.java | 2 +- .../indices/stats/IndicesStatsRequest.java | 4 +- .../stats/TransportIndicesStatsAction.java | 4 +- .../percolate/MultiPercolateRequest.java | 6 +- .../action/percolate/PercolateRequest.java | 62 +- .../percolate/PercolateRequestBuilder.java | 14 +- .../action/percolate/PercolateResponse.java | 16 +- .../percolate/PercolateShardRequest.java | 130 ---- .../percolate/PercolateShardResponse.java | 180 ----- .../TransportMultiPercolateAction.java | 354 +++------ .../percolate/TransportPercolateAction.java | 319 ++++---- .../TransportShardMultiPercolateAction.java | 281 ------- .../metadata/MetaDataMappingService.java | 4 +- .../common/settings/IndexScopedSettings.java | 4 +- .../org/elasticsearch/index/IndexService.java | 14 +- .../elasticsearch/index/cache/IndexCache.java | 12 +- .../index/mapper/MapperService.java | 6 +- .../index/percolator/PercolateStats.java | 164 ----- .../percolator/PercolatorFieldMapper.java | 97 ++- .../PercolatorHighlightSubFetchPhase.java | 141 ++++ .../percolator/PercolatorQueriesRegistry.java | 236 ------ .../percolator/PercolatorQueryCache.java | 266 +++++++ .../percolator/PercolatorQueryCacheStats.java | 90 +++ .../percolator/QueriesLoaderCollector.java | 94 --- .../query}/PercolatorQuery.java | 144 ++-- .../index/query/PercolatorQueryBuilder.java | 375 ++++++++++ .../index/query/PercolatorQueryParser.java | 134 ++++ .../index/query/QueryBuilders.java | 12 + .../index/query/QueryShardContext.java | 17 +- .../elasticsearch/index/shard/IndexShard.java | 34 +- .../elasticsearch/indices/IndicesService.java | 2 +- .../indices/NodeIndicesStats.java | 6 +- .../java/org/elasticsearch/node/Node.java | 5 - .../MultiDocumentPercolatorIndex.java | 150 ---- .../percolator/PercolateContext.java | 691 ------------------ .../percolator/PercolateDocumentParser.java | 215 ------ .../percolator/PercolateException.java | 58 -- .../percolator/PercolatorIndex.java | 37 - .../percolator/PercolatorModule.java | 33 - .../percolator/PercolatorService.java | 376 ---------- .../SingleDocumentPercolatorIndex.java | 95 --- .../indices/stats/RestIndicesStatsAction.java | 2 +- .../rest/action/cat/RestIndicesAction.java | 28 +- .../rest/action/cat/RestNodesAction.java | 15 +- .../rest/action/cat/RestShardsAction.java | 10 +- .../rest/action/cat/RestThreadPoolAction.java | 2 - .../elasticsearch/search/SearchModule.java | 4 + .../search/internal/DefaultSearchContext.java | 6 + .../internal/FilteredSearchContext.java | 6 + .../search/internal/SearchContext.java | 3 + .../elasticsearch/threadpool/ThreadPool.java | 3 - .../ExceptionSerializationTests.java | 15 - .../admin/cluster/node/tasks/TasksIT.java | 14 +- .../PercolatorFieldMapperTests.java | 29 +- ...PercolatorHighlightSubFetchPhaseTests.java | 85 +++ .../percolator/PercolatorQueryCacheTests.java | 352 +++++++++ .../index/query/AbstractQueryTestCase.java | 4 +- .../query/PercolatorQueryBuilderTests.java | 216 ++++++ .../query}/PercolatorQueryTests.java | 68 +- .../index/query/QueryShardContextTests.java | 2 +- .../index/shard/IndexShardTests.java | 9 +- .../indices/IndicesOptionsIntegrationIT.java | 8 + .../indices/stats/IndexStatsIT.java | 8 +- .../percolator/ConcurrentPercolatorIT.java | 394 ---------- .../percolator/MultiPercolatorIT.java | 42 +- .../PercolateDocumentParserTests.java | 188 ----- .../percolator/PercolatorAggregationsIT.java | 7 +- .../percolator/PercolatorIT.java | 260 +++---- .../percolator/PercolatorServiceTests.java | 172 ----- .../percolator/RecoveryPercolatorIT.java | 341 --------- .../percolator/TTLPercolatorIT.java | 206 ------ .../highlight/HighlightBuilderTests.java | 2 +- .../percolator/PercolatorQuerySearchIT.java | 131 ++++ .../rescore/QueryRescoreBuilderTests.java | 3 +- .../suggest/CompletionSuggestSearchIT.java | 4 +- .../threadpool/ThreadPoolStatsTests.java | 2 - docs/java-api/index.asciidoc | 2 - .../percolator-query.asciidoc} | 26 +- .../query-dsl/special-queries.asciidoc | 2 + .../migration/migrate_5_0/percolator.asciidoc | 26 +- .../query-dsl/percolator-query.asciidoc | 424 +++++++++++ .../query-dsl/special-queries.asciidoc | 6 + docs/reference/search/percolate.asciidoc | 507 +------------ .../messy/tests/IndicesRequestTests.java | 50 -- .../messy/tests/TemplateQueryParserTests.java | 2 +- .../test/cat.shards/10_basic.yaml | 4 - .../test/cat.thread_pool/10_basic.yaml | 16 +- .../test/indices.stats/11_metric.yaml | 5 - .../test/percolate/16_existing_doc.yaml | 2 +- .../elasticsearch/test/TestSearchContext.java | 9 + 97 files changed, 3089 insertions(+), 5628 deletions(-) delete mode 100644 core/src/main/java/org/apache/lucene/index/memory/ExtendedMemoryIndex.java delete mode 100644 core/src/main/java/org/elasticsearch/action/percolate/PercolateShardRequest.java delete mode 100644 core/src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java delete mode 100644 core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java delete mode 100644 core/src/main/java/org/elasticsearch/index/percolator/PercolateStats.java create mode 100644 core/src/main/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhase.java delete mode 100644 core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java create mode 100644 core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCache.java create mode 100644 core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCacheStats.java delete mode 100644 core/src/main/java/org/elasticsearch/index/percolator/QueriesLoaderCollector.java rename core/src/main/java/org/elasticsearch/{percolator => index/query}/PercolatorQuery.java (60%) create mode 100644 core/src/main/java/org/elasticsearch/index/query/PercolatorQueryBuilder.java create mode 100644 core/src/main/java/org/elasticsearch/index/query/PercolatorQueryParser.java delete mode 100644 core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java delete mode 100644 core/src/main/java/org/elasticsearch/percolator/PercolateContext.java delete mode 100644 core/src/main/java/org/elasticsearch/percolator/PercolateDocumentParser.java delete mode 100644 core/src/main/java/org/elasticsearch/percolator/PercolateException.java delete mode 100644 core/src/main/java/org/elasticsearch/percolator/PercolatorIndex.java delete mode 100644 core/src/main/java/org/elasticsearch/percolator/PercolatorModule.java delete mode 100644 core/src/main/java/org/elasticsearch/percolator/PercolatorService.java delete mode 100644 core/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java create mode 100644 core/src/test/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhaseTests.java create mode 100644 core/src/test/java/org/elasticsearch/index/percolator/PercolatorQueryCacheTests.java create mode 100644 core/src/test/java/org/elasticsearch/index/query/PercolatorQueryBuilderTests.java rename core/src/test/java/org/elasticsearch/{percolator => index/query}/PercolatorQueryTests.java (82%) delete mode 100644 core/src/test/java/org/elasticsearch/percolator/ConcurrentPercolatorIT.java delete mode 100644 core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java delete mode 100644 core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java delete mode 100644 core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java delete mode 100644 core/src/test/java/org/elasticsearch/percolator/TTLPercolatorIT.java create mode 100644 core/src/test/java/org/elasticsearch/search/percolator/PercolatorQuerySearchIT.java rename docs/java-api/{percolate.asciidoc => query-dsl/percolator-query.asciidoc} (63%) create mode 100644 docs/reference/query-dsl/percolator-query.asciidoc diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 87c049ae0b14..287e14b347be 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -663,10 +663,7 @@ - - - @@ -1326,14 +1323,9 @@ - - - - - diff --git a/core/src/main/java/org/apache/lucene/index/memory/ExtendedMemoryIndex.java b/core/src/main/java/org/apache/lucene/index/memory/ExtendedMemoryIndex.java deleted file mode 100644 index aec1bc755193..000000000000 --- a/core/src/main/java/org/apache/lucene/index/memory/ExtendedMemoryIndex.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.lucene.index.memory; - -/** - * This class overwrites {@link MemoryIndex} to make the reuse constructor visible. - */ -public final class ExtendedMemoryIndex extends MemoryIndex { - - public ExtendedMemoryIndex(boolean storeOffsets, boolean storePayloads, long maxReusedBytes) { - super(storeOffsets, storePayloads, maxReusedBytes); - } - -} diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index d069bddfdfe6..7fd81f5ddfee 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -645,8 +645,6 @@ public class ElasticsearchException extends RuntimeException implements ToXConte // 87 used to be for MergeMappingException INVALID_INDEX_TEMPLATE_EXCEPTION(org.elasticsearch.indices.InvalidIndexTemplateException.class, org.elasticsearch.indices.InvalidIndexTemplateException::new, 88), - PERCOLATE_EXCEPTION(org.elasticsearch.percolator.PercolateException.class, - org.elasticsearch.percolator.PercolateException::new, 89), REFRESH_FAILED_ENGINE_EXCEPTION(org.elasticsearch.index.engine.RefreshFailedEngineException.class, org.elasticsearch.index.engine.RefreshFailedEngineException::new, 90), AGGREGATION_INITIALIZATION_EXCEPTION(org.elasticsearch.search.aggregations.AggregationInitializationException.class, diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index 491202e7c7ae..be9387f9a8ac 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -165,7 +165,6 @@ import org.elasticsearch.action.percolate.MultiPercolateAction; import org.elasticsearch.action.percolate.PercolateAction; import org.elasticsearch.action.percolate.TransportMultiPercolateAction; import org.elasticsearch.action.percolate.TransportPercolateAction; -import org.elasticsearch.action.percolate.TransportShardMultiPercolateAction; import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.MultiSearchAction; import org.elasticsearch.action.search.SearchAction; @@ -331,7 +330,7 @@ public class ActionModule extends AbstractModule { registerAction(SearchScrollAction.INSTANCE, TransportSearchScrollAction.class); registerAction(MultiSearchAction.INSTANCE, TransportMultiSearchAction.class); registerAction(PercolateAction.INSTANCE, TransportPercolateAction.class); - registerAction(MultiPercolateAction.INSTANCE, TransportMultiPercolateAction.class, TransportShardMultiPercolateAction.class); + registerAction(MultiPercolateAction.INSTANCE, TransportMultiPercolateAction.class); registerAction(ExplainAction.INSTANCE, TransportExplainAction.class); registerAction(ClearScrollAction.INSTANCE, TransportClearScrollAction.class); registerAction(RecoveryAction.INSTANCE, TransportRecoveryAction.class); diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java index ec7017160c0e..f8304bf76a99 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java @@ -31,7 +31,7 @@ import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.fielddata.FieldDataStats; -import org.elasticsearch.index.percolator.PercolateStats; +import org.elasticsearch.index.percolator.PercolatorQueryCacheStats; import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.index.store.StoreStats; import org.elasticsearch.search.suggest.completion.CompletionStats; @@ -48,7 +48,7 @@ public class ClusterStatsIndices implements ToXContent, Streamable { private QueryCacheStats queryCache; private CompletionStats completion; private SegmentsStats segments; - private PercolateStats percolate; + private PercolatorQueryCacheStats percolatorCache; private ClusterStatsIndices() { } @@ -62,7 +62,7 @@ public class ClusterStatsIndices implements ToXContent, Streamable { this.queryCache = new QueryCacheStats(); this.completion = new CompletionStats(); this.segments = new SegmentsStats(); - this.percolate = new PercolateStats(); + this.percolatorCache = new PercolatorQueryCacheStats(); for (ClusterStatsNodeResponse r : nodeResponses) { for (org.elasticsearch.action.admin.indices.stats.ShardStats shardStats : r.shardsStats()) { @@ -85,7 +85,7 @@ public class ClusterStatsIndices implements ToXContent, Streamable { queryCache.add(shardCommonStats.queryCache); completion.add(shardCommonStats.completion); segments.add(shardCommonStats.segments); - percolate.add(shardCommonStats.percolate); + percolatorCache.add(shardCommonStats.percolatorCache); } } @@ -128,8 +128,8 @@ public class ClusterStatsIndices implements ToXContent, Streamable { return segments; } - public PercolateStats getPercolate() { - return percolate; + public PercolatorQueryCacheStats getPercolatorCache() { + return percolatorCache; } @Override @@ -142,7 +142,7 @@ public class ClusterStatsIndices implements ToXContent, Streamable { queryCache = QueryCacheStats.readQueryCacheStats(in); completion = CompletionStats.readCompletionStats(in); segments = SegmentsStats.readSegmentsStats(in); - percolate = PercolateStats.readPercolateStats(in); + percolatorCache = PercolatorQueryCacheStats.readPercolateStats(in); } @Override @@ -155,7 +155,7 @@ public class ClusterStatsIndices implements ToXContent, Streamable { queryCache.writeTo(out); completion.writeTo(out); segments.writeTo(out); - percolate.writeTo(out); + percolatorCache.writeTo(out); } public static ClusterStatsIndices readIndicesStats(StreamInput in) throws IOException { @@ -178,7 +178,7 @@ public class ClusterStatsIndices implements ToXContent, Streamable { queryCache.toXContent(builder, params); completion.toXContent(builder, params); segments.toXContent(builder, params); - percolate.toXContent(builder, params); + percolatorCache.toXContent(builder, params); return builder; } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index c70122920f50..0055d31748b6 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -56,7 +56,7 @@ public class TransportClusterStatsAction extends TransportNodesAction { } public IndicesStatsRequest percolate(boolean percolate) { - flags.set(Flag.Percolate, percolate); + flags.set(Flag.PercolatorCache, percolate); return this; } public boolean percolate() { - return flags.isSet(Flag.Percolate); + return flags.isSet(Flag.PercolatorCache); } public IndicesStatsRequest segments(boolean segments) { diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java index 2189973d9b76..6ab9bc074b32 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java @@ -140,7 +140,7 @@ public class TransportIndicesStatsAction extends TransportBroadcastByNodeAction< flags.fieldDataFields(request.fieldDataFields()); } if (request.percolate()) { - flags.set(CommonStatsFlags.Flag.Percolate); + flags.set(CommonStatsFlags.Flag.PercolatorCache); } if (request.segments()) { flags.set(CommonStatsFlags.Flag.Segments); @@ -163,6 +163,6 @@ public class TransportIndicesStatsAction extends TransportBroadcastByNodeAction< flags.set(CommonStatsFlags.Flag.Recovery); } - return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexShard, flags), indexShard.commitStats()); + return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexService.cache().getPercolatorQueryCache(), indexShard, flags), indexShard.commitStats()); } } diff --git a/core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateRequest.java b/core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateRequest.java index 74537379d1d0..ac49fed763a2 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateRequest.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateRequest.java @@ -163,11 +163,7 @@ public class MultiPercolateRequest extends ActionRequest @Override public List subRequests() { - List indicesRequests = new ArrayList<>(); - for (PercolateRequest percolateRequest : this.requests) { - indicesRequests.addAll(percolateRequest.subRequests()); - } - return indicesRequests; + return requests; } private void parsePercolateAction(XContentParser parser, PercolateRequest percolateRequest, boolean allowExplicitIndex) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java index e69da6bf519d..c9887cba03fa 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java @@ -19,10 +19,12 @@ package org.elasticsearch.action.percolate; import org.elasticsearch.ElasticsearchGenerationException; +import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.CompositeIndicesRequest; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.common.bytes.BytesArray; @@ -43,49 +45,37 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * A request to execute a percolate operation. */ -public class PercolateRequest extends BroadcastRequest implements CompositeIndicesRequest { +public class PercolateRequest extends ActionRequest implements IndicesRequest.Replaceable { + protected String[] indices; + private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpenAndForbidClosed(); private String documentType; private String routing; private String preference; - private GetRequest getRequest; private boolean onlyCount; + private GetRequest getRequest; private BytesReference source; - private BytesReference docSource; - - // Used internally in order to compute tookInMillis, TransportBroadcastAction itself doesn't allow - // to hold it temporarily in an easy way - long startTime; - - /** - * Constructor only for internal usage. - */ - public PercolateRequest() { + public String[] indices() { + return indices; } - PercolateRequest(PercolateRequest request, BytesReference docSource) { - this.indices = request.indices(); - this.documentType = request.documentType(); - this.routing = request.routing(); - this.preference = request.preference(); - this.source = request.source; - this.docSource = docSource; - this.onlyCount = request.onlyCount; - this.startTime = request.startTime; + public final PercolateRequest indices(String... indices) { + this.indices = indices; + return this; } - @Override - public List subRequests() { - List requests = new ArrayList<>(); - requests.add(this); - if (getRequest != null) { - requests.add(getRequest); - } - return requests; + public IndicesOptions indicesOptions() { + return indicesOptions; } + public PercolateRequest indicesOptions(IndicesOptions indicesOptions) { + this.indicesOptions = indicesOptions; + return this; + } + + /** * Getter for {@link #documentType(String)} */ @@ -244,13 +234,9 @@ public class PercolateRequest extends BroadcastRequest impleme return this; } - BytesReference docSource() { - return docSource; - } - @Override public ActionRequestValidationException validate() { - ActionRequestValidationException validationException = super.validate(); + ActionRequestValidationException validationException = null; if (documentType == null) { validationException = addValidationError("type is missing", validationException); } @@ -266,12 +252,12 @@ public class PercolateRequest extends BroadcastRequest impleme @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - startTime = in.readVLong(); + indices = in.readStringArray(); + indicesOptions = IndicesOptions.readIndicesOptions(in); documentType = in.readString(); routing = in.readOptionalString(); preference = in.readOptionalString(); source = in.readBytesReference(); - docSource = in.readBytesReference(); if (in.readBoolean()) { getRequest = new GetRequest(); getRequest.readFrom(in); @@ -282,12 +268,12 @@ public class PercolateRequest extends BroadcastRequest impleme @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeVLong(startTime); + out.writeStringArrayNullable(indices); + indicesOptions.writeIndicesOptions(out); out.writeString(documentType); out.writeOptionalString(routing); out.writeOptionalString(preference); out.writeBytesReference(source); - out.writeBytesReference(docSource); if (getRequest != null) { out.writeBoolean(true); getRequest.writeTo(out); diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java index 472938cfbf1a..83757dab089b 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java @@ -18,7 +18,9 @@ */ package org.elasticsearch.action.percolate; +import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.Strings; @@ -36,7 +38,7 @@ import java.util.Map; /** * A builder the easy to use of defining a percolate request. */ -public class PercolateRequestBuilder extends BroadcastOperationRequestBuilder { +public class PercolateRequestBuilder extends ActionRequestBuilder { private PercolateSourceBuilder sourceBuilder; @@ -44,6 +46,16 @@ public class PercolateRequestBuilder extends BroadcastOperationRequestBuilder, ToXContent { public static final Match[] EMPTY = new Match[0]; + // PercolatorQuery emits this score if no 'query' is defined in the percolate request + public final static float NO_SCORE = 0.0f; private long tookInMillis; private Match[] matches; @@ -65,15 +66,6 @@ public class PercolateResponse extends BroadcastResponse implements Iterable shardFailures, long tookInMillis, Match[] matches) { - super(totalShards, successfulShards, failedShards, shardFailures); - if (tookInMillis < 0) { - throw new IllegalArgumentException("tookInMillis must be positive but was: " + tookInMillis); - } - this.tookInMillis = tookInMillis; - this.matches = matches; - } - PercolateResponse() { } @@ -136,10 +128,10 @@ public class PercolateResponse extends BroadcastResponse implements Iterable ids; - private Map> hls; - private boolean onlyCount; - private int requestedSize; - - private InternalAggregations aggregations; - private List pipelineAggregators; - - PercolateShardResponse() { - } - - public PercolateShardResponse(TopDocs topDocs, Map ids, Map> hls, PercolateContext context) { - super(context.indexShard().shardId()); - this.topDocs = topDocs; - this.ids = ids; - this.hls = hls; - this.onlyCount = context.isOnlyCount(); - this.requestedSize = context.size(); - QuerySearchResult result = context.queryResult(); - if (result != null) { - if (result.aggregations() != null) { - this.aggregations = (InternalAggregations) result.aggregations(); - } - this.pipelineAggregators = result.pipelineAggregators(); - } - } - - public TopDocs topDocs() { - return topDocs; - } - - /** - * Returns per match the percolator query id. The key is the Lucene docId of the matching percolator query. - */ - public Map ids() { - return ids; - } - - public int requestedSize() { - return requestedSize; - } - - /** - * Returns per match the highlight snippets. The key is the Lucene docId of the matching percolator query. - */ - public Map> hls() { - return hls; - } - - public InternalAggregations aggregations() { - return aggregations; - } - - public List pipelineAggregators() { - return pipelineAggregators; - } - - public boolean onlyCount() { - return onlyCount; - } - - public boolean isEmpty() { - return topDocs.totalHits == 0; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - onlyCount = in.readBoolean(); - requestedSize = in.readVInt(); - topDocs = Lucene.readTopDocs(in); - int size = in.readVInt(); - ids = new HashMap<>(size); - for (int i = 0; i < size; i++) { - ids.put(in.readVInt(), in.readString()); - } - size = in.readVInt(); - hls = new HashMap<>(size); - for (int i = 0; i < size; i++) { - int docId = in.readVInt(); - int mSize = in.readVInt(); - Map fields = new HashMap<>(); - for (int j = 0; j < mSize; j++) { - fields.put(in.readString(), HighlightField.readHighlightField(in)); - } - hls.put(docId, fields); - } - aggregations = InternalAggregations.readOptionalAggregations(in); - if (in.readBoolean()) { - int pipelineAggregatorsSize = in.readVInt(); - List pipelineAggregators = new ArrayList<>(pipelineAggregatorsSize); - for (int i = 0; i < pipelineAggregatorsSize; i++) { - BytesReference type = in.readBytesReference(); - PipelineAggregator pipelineAggregator = PipelineAggregatorStreams.stream(type).readResult(in); - pipelineAggregators.add((SiblingPipelineAggregator) pipelineAggregator); - } - this.pipelineAggregators = pipelineAggregators; - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeBoolean(onlyCount); - out.writeVLong(requestedSize); - Lucene.writeTopDocs(out, topDocs); - out.writeVInt(ids.size()); - for (Map.Entry entry : ids.entrySet()) { - out.writeVInt(entry.getKey()); - out.writeString(entry.getValue()); - } - out.writeVInt(hls.size()); - for (Map.Entry> entry1 : hls.entrySet()) { - out.writeVInt(entry1.getKey()); - out.writeVInt(entry1.getValue().size()); - for (Map.Entry entry2 : entry1.getValue().entrySet()) { - out.writeString(entry2.getKey()); - entry2.getValue().writeTo(out); - } - } - out.writeOptionalStreamable(aggregations); - if (pipelineAggregators == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - out.writeVInt(pipelineAggregators.size()); - for (PipelineAggregator pipelineAggregator : pipelineAggregators) { - out.writeBytesReference(pipelineAggregator.type().stream()); - pipelineAggregator.writeTo(out); - } - } - } -} diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java b/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java index 907cae969b47..bf0d79d884ea 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java @@ -19,114 +19,91 @@ package org.elasticsearch.action.percolate; -import com.carrotsearch.hppc.IntArrayList; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetItemResponse; import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.get.MultiGetResponse; -import org.elasticsearch.action.get.TransportMultiGetAction; +import org.elasticsearch.action.search.MultiSearchRequest; +import org.elasticsearch.action.search.MultiSearchResponse; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; -import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.AtomicArray; -import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.index.engine.DocumentMissingException; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.percolator.PercolatorService; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReferenceArray; -/** - */ public class TransportMultiPercolateAction extends HandledTransportAction { - private final ClusterService clusterService; - private final PercolatorService percolatorService; - - private final TransportMultiGetAction multiGetAction; - private final TransportShardMultiPercolateAction shardMultiPercolateAction; + private final Client client; + private final ParseFieldMatcher parseFieldMatcher; + private final IndicesQueriesRegistry queryRegistry; + private final AggregatorParsers aggParsers; @Inject - public TransportMultiPercolateAction(Settings settings, ThreadPool threadPool, TransportShardMultiPercolateAction shardMultiPercolateAction, - ClusterService clusterService, TransportService transportService, PercolatorService percolatorService, - TransportMultiGetAction multiGetAction, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { + public TransportMultiPercolateAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, Client client, IndicesQueriesRegistry queryRegistry, + AggregatorParsers aggParsers) { super(settings, MultiPercolateAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, MultiPercolateRequest::new); - this.shardMultiPercolateAction = shardMultiPercolateAction; - this.clusterService = clusterService; - this.percolatorService = percolatorService; - this.multiGetAction = multiGetAction; + this.client = client; + this.aggParsers = aggParsers; + this.parseFieldMatcher = new ParseFieldMatcher(settings); + this.queryRegistry = queryRegistry; } @Override - protected void doExecute(final MultiPercolateRequest request, final ActionListener listener) { - final ClusterState clusterState = clusterService.state(); - clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.READ); - - final List percolateRequests = new ArrayList<>(request.requests().size()); - // Can have a mixture of percolate requests. (normal percolate requests & percolate existing doc), - // so we need to keep track for what percolate request we had a get request - final IntArrayList getRequestSlots = new IntArrayList(); - List existingDocsRequests = new ArrayList<>(); - for (int slot = 0; slot < request.requests().size(); slot++) { - PercolateRequest percolateRequest = request.requests().get(slot); - percolateRequest.startTime = System.currentTimeMillis(); - percolateRequests.add(percolateRequest); - if (percolateRequest.getRequest() != null) { - existingDocsRequests.add(percolateRequest.getRequest()); - getRequestSlots.add(slot); + protected void doExecute(MultiPercolateRequest request, ActionListener listener) { + List> getRequests = new ArrayList<>(); + for (int i = 0; i < request.requests().size(); i++) { + GetRequest getRequest = request.requests().get(i).getRequest(); + if (getRequest != null) { + getRequests.add(new Tuple<>(i, getRequest)); } } - - if (!existingDocsRequests.isEmpty()) { - final MultiGetRequest multiGetRequest = new MultiGetRequest(); - for (GetRequest getRequest : existingDocsRequests) { - multiGetRequest.add( - new MultiGetRequest.Item(getRequest.index(), getRequest.type(), getRequest.id()) - .routing(getRequest.routing()) - ); + if (getRequests.isEmpty()) { + innerDoExecute(request, listener, Collections.emptyMap(), new HashMap<>()); + } else { + MultiGetRequest multiGetRequest = new MultiGetRequest(); + for (Tuple tuple : getRequests) { + GetRequest getRequest = tuple.v2(); + multiGetRequest.add(new MultiGetRequest.Item(getRequest.index(), getRequest.type(), getRequest.id())); } - - multiGetAction.execute(multiGetRequest, new ActionListener() { - + client.multiGet(multiGetRequest, new ActionListener() { @Override - public void onResponse(MultiGetResponse multiGetItemResponses) { - for (int i = 0; i < multiGetItemResponses.getResponses().length; i++) { - MultiGetItemResponse itemResponse = multiGetItemResponses.getResponses()[i]; - int slot = getRequestSlots.get(i); - if (!itemResponse.isFailed()) { - GetResponse getResponse = itemResponse.getResponse(); - if (getResponse.isExists()) { - PercolateRequest originalRequest = (PercolateRequest) percolateRequests.get(slot); - percolateRequests.set(slot, new PercolateRequest(originalRequest, getResponse.getSourceAsBytesRef())); - } else { - logger.trace("mpercolate existing doc, item[{}] doesn't exist", slot); - percolateRequests.set(slot, new DocumentMissingException(null, getResponse.getType(), getResponse.getId())); - } + public void onResponse(MultiGetResponse response) { + Map getResponseSources = new HashMap<>(response.getResponses().length); + Map preFailures = new HashMap<>(); + for (int i = 0; i < response.getResponses().length; i++) { + MultiGetItemResponse itemResponse = response.getResponses()[i]; + int originalSlot = getRequests.get(i).v1(); + if (itemResponse.isFailed()) { + preFailures.put(originalSlot, new MultiPercolateResponse.Item(itemResponse.getFailure().getFailure())); } else { - logger.trace("mpercolate existing doc, item[{}] failure {}", slot, itemResponse.getFailure()); - percolateRequests.set(slot, itemResponse.getFailure()); + if (itemResponse.getResponse().isExists()) { + getResponseSources.put(originalSlot, itemResponse.getResponse().getSourceAsBytesRef()); + } else { + GetRequest getRequest = getRequests.get(i).v2(); + preFailures.put(originalSlot, new MultiPercolateResponse.Item(new ResourceNotFoundException("percolate document [{}/{}/{}] doesn't exist", getRequest.index(), getRequest.type(), getRequest.id()))); + } } } - new ASyncAction(request, percolateRequests, listener, clusterState).run(); + innerDoExecute(request, listener, getResponseSources, preFailures); } @Override @@ -134,200 +111,81 @@ public class TransportMultiPercolateAction extends HandledTransportAction finalListener; - final Map requestsByShard; - final MultiPercolateRequest multiPercolateRequest; - final List percolateRequests; - - final Map shardToSlots; - final AtomicInteger expectedOperations; - final AtomicArray reducedResponses; - final AtomicReferenceArray expectedOperationsPerItem; - final AtomicReferenceArray responsesByItemAndShard; - - ASyncAction(MultiPercolateRequest multiPercolateRequest, List percolateRequests, ActionListener finalListener, ClusterState clusterState) { - this.finalListener = finalListener; - this.multiPercolateRequest = multiPercolateRequest; - this.percolateRequests = percolateRequests; - responsesByItemAndShard = new AtomicReferenceArray<>(percolateRequests.size()); - expectedOperationsPerItem = new AtomicReferenceArray<>(percolateRequests.size()); - reducedResponses = new AtomicArray<>(percolateRequests.size()); - - // Resolving concrete indices and routing and grouping the requests by shard - requestsByShard = new HashMap<>(); - // Keep track what slots belong to what shard, in case a request to a shard fails on all copies - shardToSlots = new HashMap<>(); - int expectedResults = 0; - for (int slot = 0; slot < percolateRequests.size(); slot++) { - Object element = percolateRequests.get(slot); - assert element != null; - if (element instanceof PercolateRequest) { - PercolateRequest percolateRequest = (PercolateRequest) element; - String[] concreteIndices; - try { - concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, percolateRequest); - } catch (IndexNotFoundException e) { - reducedResponses.set(slot, e); - responsesByItemAndShard.set(slot, new AtomicReferenceArray(0)); - expectedOperationsPerItem.set(slot, new AtomicInteger(0)); - continue; - } - Map> routing = indexNameExpressionResolver.resolveSearchRouting(clusterState, percolateRequest.routing(), percolateRequest.indices()); - // TODO: I only need shardIds, ShardIterator(ShardRouting) is only needed in TransportShardMultiPercolateAction - GroupShardsIterator shards = clusterService.operationRouting().searchShards( - clusterState, concreteIndices, routing, percolateRequest.preference() - ); - if (shards.size() == 0) { - reducedResponses.set(slot, new UnavailableShardsException(null, "No shards available")); - responsesByItemAndShard.set(slot, new AtomicReferenceArray(0)); - expectedOperationsPerItem.set(slot, new AtomicInteger(0)); - continue; - } - - // The shard id is used as index in the atomic ref array, so we need to find out how many shards there are regardless of routing: - int numShards = clusterService.operationRouting().searchShardsCount(clusterState, concreteIndices, null); - responsesByItemAndShard.set(slot, new AtomicReferenceArray(numShards)); - expectedOperationsPerItem.set(slot, new AtomicInteger(shards.size())); - for (ShardIterator shard : shards) { - ShardId shardId = shard.shardId(); - TransportShardMultiPercolateAction.Request requests = requestsByShard.get(shardId); - if (requests == null) { - requestsByShard.put(shardId, requests = new TransportShardMultiPercolateAction.Request(shardId.getIndexName(), shardId.getId(), percolateRequest.preference())); - } - logger.trace("Adding shard[{}] percolate request for item[{}]", shardId, slot); - requests.add(new TransportShardMultiPercolateAction.Request.Item(slot, new PercolateShardRequest(shardId, percolateRequest))); - - IntArrayList items = shardToSlots.get(shardId); - if (items == null) { - shardToSlots.put(shardId, items = new IntArrayList()); - } - items.add(slot); - } - expectedResults++; - } else if (element instanceof Throwable || element instanceof MultiGetResponse.Failure) { - logger.trace("item[{}] won't be executed, reason: {}", slot, element); - reducedResponses.set(slot, element); - responsesByItemAndShard.set(slot, new AtomicReferenceArray(0)); - expectedOperationsPerItem.set(slot, new AtomicInteger(0)); - } - } - expectedOperations = new AtomicInteger(expectedResults); - } - - void run() { - if (expectedOperations.get() == 0) { - finish(); - return; - } - - logger.trace("mpercolate executing for shards {}", requestsByShard.keySet()); - for (Map.Entry entry : requestsByShard.entrySet()) { - final ShardId shardId = entry.getKey(); - TransportShardMultiPercolateAction.Request shardRequest = entry.getValue(); - shardMultiPercolateAction.execute(shardRequest, new ActionListener() { - + private void innerDoExecute(MultiPercolateRequest request, ActionListener listener, Map getResponseSources, Map preFailures) { + try { + MultiSearchRequest multiSearchRequest = createMultiSearchRequest(request, getResponseSources, preFailures); + if (multiSearchRequest.requests().isEmpty()) { + // we may failed to turn all percolate requests into search requests, + // in that case just return the response... + listener.onResponse( + createMultiPercolateResponse(new MultiSearchResponse(new MultiSearchResponse.Item[0]), request, preFailures) + ); + } else { + client.multiSearch(multiSearchRequest, new ActionListener() { @Override - public void onResponse(TransportShardMultiPercolateAction.Response response) { - onShardResponse(shardId, response); + public void onResponse(MultiSearchResponse response) { + try { + listener.onResponse(createMultiPercolateResponse(response, request, preFailures)); + } catch (Exception e) { + onFailure(e); + } } @Override public void onFailure(Throwable e) { - onShardFailure(shardId, e); + listener.onFailure(e); } - }); } + } catch (Exception e) { + listener.onFailure(e); } + } - @SuppressWarnings("unchecked") - void onShardResponse(ShardId shardId, TransportShardMultiPercolateAction.Response response) { - logger.trace("{} Percolate shard response", shardId); + private MultiSearchRequest createMultiSearchRequest(MultiPercolateRequest multiPercolateRequest, Map getResponseSources, Map preFailures) throws IOException { + MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); + multiSearchRequest.indicesOptions(multiPercolateRequest.indicesOptions()); + + for (int i = 0; i < multiPercolateRequest.requests().size(); i++) { + if (preFailures.keySet().contains(i)) { + continue; + } + + PercolateRequest percolateRequest = multiPercolateRequest.requests().get(i); + BytesReference docSource = getResponseSources.get(i); try { - for (TransportShardMultiPercolateAction.Response.Item item : response.items()) { - AtomicReferenceArray shardResults = responsesByItemAndShard.get(item.slot()); - if (shardResults == null) { - assert false : "shardResults can't be null"; - continue; - } - - if (item.failed()) { - shardResults.set(shardId.id(), new BroadcastShardOperationFailedException(shardId, item.error())); - } else { - shardResults.set(shardId.id(), item.response()); - } - - assert expectedOperationsPerItem.get(item.slot()).get() >= 1 : "slot[" + item.slot() + "] can't be lower than one"; - if (expectedOperationsPerItem.get(item.slot()).decrementAndGet() == 0) { - // Failure won't bubble up, since we fail the whole request now via the catch clause below, - // so expectedOperationsPerItem will not be decremented twice. - reduce(item.slot()); - } - } - } catch (Throwable e) { - logger.error("{} Percolate original reduce error", e, shardId); - finalListener.onFailure(e); + SearchRequest searchRequest = TransportPercolateAction.createSearchRequest( + percolateRequest, docSource, queryRegistry, aggParsers, parseFieldMatcher + ); + multiSearchRequest.add(searchRequest); + } catch (Exception e) { + preFailures.put(i, new MultiPercolateResponse.Item(e)); } } - @SuppressWarnings("unchecked") - void onShardFailure(ShardId shardId, Throwable e) { - logger.debug("{} Shard multi percolate failure", e, shardId); - try { - IntArrayList slots = shardToSlots.get(shardId); - for (int i = 0; i < slots.size(); i++) { - int slot = slots.get(i); - AtomicReferenceArray shardResults = responsesByItemAndShard.get(slot); - if (shardResults == null) { - continue; - } + return multiSearchRequest; + } - shardResults.set(shardId.id(), new BroadcastShardOperationFailedException(shardId, e)); - assert expectedOperationsPerItem.get(slot).get() >= 1 : "slot[" + slot + "] can't be lower than one. Caused by: " + e.getMessage(); - if (expectedOperationsPerItem.get(slot).decrementAndGet() == 0) { - reduce(slot); - } - } - } catch (Throwable t) { - logger.error("{} Percolate original reduce error, original error {}", t, shardId, e); - finalListener.onFailure(t); - } - } - - void reduce(int slot) { - AtomicReferenceArray shardResponses = responsesByItemAndShard.get(slot); - PercolateResponse reducedResponse = TransportPercolateAction.reduce((PercolateRequest) percolateRequests.get(slot), shardResponses, percolatorService); - reducedResponses.set(slot, reducedResponse); - assert expectedOperations.get() >= 1 : "slot[" + slot + "] expected options should be >= 1 but is " + expectedOperations.get(); - if (expectedOperations.decrementAndGet() == 0) { - finish(); - } - } - - void finish() { - MultiPercolateResponse.Item[] finalResponse = new MultiPercolateResponse.Item[reducedResponses.length()]; - for (int slot = 0; slot < reducedResponses.length(); slot++) { - Object element = reducedResponses.get(slot); - assert element != null : "Element[" + slot + "] shouldn't be null"; - if (element instanceof PercolateResponse) { - finalResponse[slot] = new MultiPercolateResponse.Item((PercolateResponse) element); - } else if (element instanceof Throwable) { - finalResponse[slot] = new MultiPercolateResponse.Item((Throwable)element); - } else if (element instanceof MultiGetResponse.Failure) { - finalResponse[slot] = new MultiPercolateResponse.Item(((MultiGetResponse.Failure)element).getFailure()); + private MultiPercolateResponse createMultiPercolateResponse(MultiSearchResponse multiSearchResponse, MultiPercolateRequest request, Map preFailures) { + int searchResponseIndex = 0; + MultiPercolateResponse.Item[] percolateItems = new MultiPercolateResponse.Item[request.requests().size()]; + for (int i = 0; i < percolateItems.length; i++) { + if (preFailures.keySet().contains(i)) { + percolateItems[i] = preFailures.get(i); + } else { + MultiSearchResponse.Item searchItem = multiSearchResponse.getResponses()[searchResponseIndex++]; + if (searchItem.isFailure()) { + percolateItems[i] = new MultiPercolateResponse.Item(searchItem.getFailure()); + } else { + PercolateRequest percolateRequest = request.requests().get(i); + percolateItems[i] = new MultiPercolateResponse.Item(TransportPercolateAction.createPercolateResponse(searchItem.getResponse(), percolateRequest.onlyCount())); } } - finalListener.onResponse(new MultiPercolateResponse(finalResponse)); } - + return new MultiPercolateResponse(percolateItems); } } diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java b/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java index 2a8f1a4ed242..b23ef04021e3 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java @@ -18,71 +18,74 @@ */ package org.elasticsearch.action.percolate; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.get.TransportGetAction; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; -import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.engine.DocumentMissingException; -import org.elasticsearch.percolator.PercolateException; -import org.elasticsearch.percolator.PercolatorService; -import org.elasticsearch.tasks.Task; +import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.PercolatorQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.TemplateQueryParser; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.rest.action.support.RestActions; +import org.elasticsearch.script.Template; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.aggregations.AggregatorParsers; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReferenceArray; +import java.util.Arrays; -/** - * - */ -public class TransportPercolateAction extends TransportBroadcastAction { +public class TransportPercolateAction extends HandledTransportAction { - private final PercolatorService percolatorService; - private final TransportGetAction getAction; + private final Client client; + private final ParseFieldMatcher parseFieldMatcher; + private final IndicesQueriesRegistry queryRegistry; + private final AggregatorParsers aggParsers; @Inject - public TransportPercolateAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, - TransportService transportService, PercolatorService percolatorService, - TransportGetAction getAction, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, PercolateAction.NAME, threadPool, clusterService, transportService, actionFilters, - indexNameExpressionResolver, PercolateRequest::new, PercolateShardRequest::new, ThreadPool.Names.PERCOLATE); - this.percolatorService = percolatorService; - this.getAction = getAction; + public TransportPercolateAction(Settings settings, ThreadPool threadPool, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + Client client, IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers) { + super(settings, PercolateAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, PercolateRequest::new); + this.client = client; + this.aggParsers = aggParsers; + this.parseFieldMatcher = new ParseFieldMatcher(settings); + this.queryRegistry = indicesQueriesRegistry; } @Override - protected void doExecute(Task task, final PercolateRequest request, final ActionListener listener) { - request.startTime = System.currentTimeMillis(); + protected void doExecute(PercolateRequest request, ActionListener listener) { if (request.getRequest() != null) { - getAction.execute(request.getRequest(), new ActionListener() { + client.get(request.getRequest(), new ActionListener() { @Override public void onResponse(GetResponse getResponse) { - if (!getResponse.isExists()) { - onFailure(new DocumentMissingException(null, request.getRequest().type(), request.getRequest().id())); - return; + if (getResponse.isExists()) { + innerDoExecute(request, getResponse.getSourceAsBytesRef(), listener); + } else { + onFailure(new ResourceNotFoundException("percolate document [{}/{}/{}] doesn't exist", request.getRequest().index(), request.getRequest().type(), request.getRequest().id())); } - - BytesReference docSource = getResponse.getSourceAsBytesRef(); - TransportPercolateAction.super.doExecute(task, new PercolateRequest(request, docSource), listener); } @Override @@ -91,99 +94,153 @@ public class TransportPercolateAction extends TransportBroadcastAction shardResults = null; - List shardFailures = null; - - boolean onlyCount = false; - for (int i = 0; i < shardsResponses.length(); i++) { - Object shardResponse = shardsResponses.get(i); - if (shardResponse == null) { - // simply ignore non active shards - } else if (shardResponse instanceof BroadcastShardOperationFailedException) { - failedShards++; - if (shardFailures == null) { - shardFailures = new ArrayList<>(); - } - shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); - } else { - PercolateShardResponse percolateShardResponse = (PercolateShardResponse) shardResponse; - successfulShards++; - if (!percolateShardResponse.isEmpty()) { - if (shardResults == null) { - onlyCount = percolateShardResponse.onlyCount(); - shardResults = new ArrayList<>(); - } - shardResults.add(percolateShardResponse); - } - } - } - - if (shardResults == null) { - long tookInMillis = Math.max(1, System.currentTimeMillis() - request.startTime); - PercolateResponse.Match[] matches = request.onlyCount() ? null : PercolateResponse.EMPTY; - return new PercolateResponse(shardsResponses.length(), successfulShards, failedShards, shardFailures, tookInMillis, matches); - } else { - PercolatorService.ReduceResult result = null; - try { - result = percolatorService.reduce(onlyCount, shardResults); - } catch (IOException e) { - throw new ElasticsearchException("error during reduce phase", e); - } - long tookInMillis = Math.max(1, System.currentTimeMillis() - request.startTime); - return new PercolateResponse( - shardsResponses.length(), successfulShards, failedShards, shardFailures, - result.matches(), result.count(), tookInMillis, result.reducedAggregations() - ); - } - } - - @Override - protected PercolateShardRequest newShardRequest(int numShards, ShardRouting shard, PercolateRequest request) { - return new PercolateShardRequest(shard.shardId(), numShards, request); - } - - @Override - protected PercolateShardResponse newShardResponse() { - return new PercolateShardResponse(); - } - - @Override - protected GroupShardsIterator shards(ClusterState clusterState, PercolateRequest request, String[] concreteIndices) { - Map> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, request.routing(), request.indices()); - return clusterService.operationRouting().searchShards(clusterState, concreteIndices, routingMap, request.preference()); - } - - @Override - protected PercolateShardResponse shardOperation(PercolateShardRequest request) { + private void innerDoExecute(PercolateRequest request, BytesReference docSource, ActionListener listener) { + SearchRequest searchRequest; try { - return percolatorService.percolate(request); - } catch (Throwable e) { - logger.trace("{} failed to percolate", e, request.shardId()); - throw new PercolateException(request.shardId(), "failed to percolate", e); + searchRequest = createSearchRequest(request, docSource, queryRegistry, aggParsers, parseFieldMatcher); + } catch (IOException e) { + listener.onFailure(e); + return; } + client.search(searchRequest, new ActionListener() { + @Override + public void onResponse(SearchResponse searchResponse) { + try { + listener.onResponse(createPercolateResponse(searchResponse, request.onlyCount())); + } catch (Exception e) { + onFailure(e); + } + } + + @Override + public void onFailure(Throwable e) { + listener.onFailure(e); + } + }); + } + + public static SearchRequest createSearchRequest(PercolateRequest percolateRequest, BytesReference documentSource, IndicesQueriesRegistry queryRegistry, AggregatorParsers aggParsers, ParseFieldMatcher parseFieldMatcher) throws IOException { + SearchRequest searchRequest = new SearchRequest(); + if (percolateRequest.indices() != null) { + searchRequest.indices(percolateRequest.indices()); + } + searchRequest.indicesOptions(percolateRequest.indicesOptions()); + searchRequest.routing(percolateRequest.routing()); + searchRequest.preference(percolateRequest.preference()); + + BytesReference querySource = null; + XContentBuilder searchSource = XContentFactory.jsonBuilder().startObject(); + if (percolateRequest.source() != null && percolateRequest.source().length() > 0) { + try (XContentParser parser = XContentHelper.createParser(percolateRequest.source())) { + String currentFieldName = null; + XContentParser.Token token = parser.nextToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new IllegalArgumentException("Unknown token [" + token+ "]"); + } + + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + if ("doc".equals(currentFieldName)) { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.copyCurrentStructure(parser); + builder.flush(); + documentSource = builder.bytes(); + } else if ("query".equals(currentFieldName) || "filter".equals(currentFieldName)) { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.copyCurrentStructure(parser); + builder.flush(); + querySource = builder.bytes(); + } else if ("sort".equals(currentFieldName)) { + searchSource.field("sort"); + searchSource.copyCurrentStructure(parser); + } else if ("aggregations".equals(currentFieldName)) { + searchSource.field("aggregations"); + searchSource.copyCurrentStructure(parser); + } else if ("highlight".equals(currentFieldName)) { + searchSource.field("highlight"); + searchSource.copyCurrentStructure(parser); + } else { + throw new IllegalArgumentException("Unknown field [" + currentFieldName+ "]"); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if ("sort".equals(currentFieldName)) { + searchSource.field("sort"); + searchSource.copyCurrentStructure(parser); + } else { + throw new IllegalArgumentException("Unknown field [" + currentFieldName+ "]"); + } + } else if (token.isValue()) { + if ("size".equals(currentFieldName)) { + searchSource.field("size", parser.intValue()); + } else if ("sort".equals(currentFieldName)) { + searchSource.field("sort", parser.text()); + } else if ("track_scores".equals(currentFieldName) || "trackScores".equals(currentFieldName)) { + searchSource.field("track_scores", parser.booleanValue()); + } else { + throw new IllegalArgumentException("Unknown field [" + currentFieldName+ "]"); + } + } else { + throw new IllegalArgumentException("Unknown token [" + token + "]"); + } + } + } + } + + if (percolateRequest.onlyCount()) { + searchSource.field("size", 0); + } + + PercolatorQueryBuilder percolatorQueryBuilder = new PercolatorQueryBuilder(percolateRequest.documentType(), documentSource); + if (querySource != null) { + QueryParseContext queryParseContext = new QueryParseContext(queryRegistry); + queryParseContext.reset(XContentHelper.createParser(querySource)); + queryParseContext.parseFieldMatcher(parseFieldMatcher); + QueryBuilder queryBuilder = queryParseContext.parseInnerQueryBuilder(); + BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); + boolQueryBuilder.must(queryBuilder); + boolQueryBuilder.filter(percolatorQueryBuilder); + searchSource.field("query", boolQueryBuilder); + } else { + searchSource.field("query", percolatorQueryBuilder); + } + + searchSource.endObject(); + searchSource.flush(); + BytesReference source = searchSource.bytes(); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + QueryParseContext context = new QueryParseContext(queryRegistry); + try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(source)) { + context.reset(parser); + context.parseFieldMatcher(parseFieldMatcher); + searchSourceBuilder.parseXContent(parser, context, aggParsers, null); + searchRequest.source(searchSourceBuilder); + return searchRequest; + } + } + + public static PercolateResponse createPercolateResponse(SearchResponse searchResponse, boolean onlyCount) { + SearchHits hits = searchResponse.getHits(); + PercolateResponse.Match[] matches; + if (onlyCount) { + matches = null; + } else { + matches = new PercolateResponse.Match[hits.getHits().length]; + for (int i = 0; i < hits.getHits().length; i++) { + SearchHit hit = hits.getHits()[i]; + matches[i] = new PercolateResponse.Match(new Text(hit.getIndex()), new Text(hit.getId()), hit.getScore(), hit.getHighlightFields()); + } + } + + return new PercolateResponse( + searchResponse.getTotalShards(), searchResponse.getSuccessfulShards(), searchResponse.getFailedShards(), + Arrays.asList(searchResponse.getShardFailures()), matches, hits.getTotalHits(), searchResponse.getTookInMillis(), (InternalAggregations) searchResponse.getAggregations() + ); } } diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java b/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java deleted file mode 100644 index 0732d4d40664..000000000000 --- a/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java +++ /dev/null @@ -1,281 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.percolate; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.IndicesRequest; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.TransportActions; -import org.elasticsearch.action.support.single.shard.SingleShardRequest; -import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.ShardIterator; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.percolator.PercolatorService; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -/** - */ -public class TransportShardMultiPercolateAction extends TransportSingleShardAction { - - private final PercolatorService percolatorService; - - private static final String ACTION_NAME = MultiPercolateAction.NAME + "[shard]"; - - @Inject - public TransportShardMultiPercolateAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, - TransportService transportService, PercolatorService percolatorService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, ACTION_NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, - Request::new, ThreadPool.Names.PERCOLATE); - this.percolatorService = percolatorService; - } - - @Override - protected boolean isSubAction() { - return true; - } - - @Override - protected Response newResponse() { - return new Response(); - } - - @Override - protected boolean resolveIndex(Request request) { - return false; - } - - @Override - protected ShardIterator shards(ClusterState state, InternalRequest request) { - return clusterService.operationRouting().getShards( - state, request.concreteIndex(), request.request().shardId(), request.request().preference - ); - } - - @Override - protected Response shardOperation(Request request, ShardId shardId) { - // TODO: Look into combining the shard req's docs into one in memory index. - Response response = new Response(); - response.items = new ArrayList<>(request.items.size()); - for (Request.Item item : request.items) { - Response.Item responseItem; - int slot = item.slot; - try { - responseItem = new Response.Item(slot, percolatorService.percolate(item.request)); - } catch (Throwable t) { - if (TransportActions.isShardNotAvailableException(t)) { - throw (ElasticsearchException) t; - } else { - logger.debug("{} failed to multi percolate", t, request.shardId()); - responseItem = new Response.Item(slot, t); - } - } - response.items.add(responseItem); - } - return response; - } - - - public static class Request extends SingleShardRequest implements IndicesRequest { - - private int shardId; - private String preference; - private List items; - - public Request() { - } - - Request(String concreteIndex, int shardId, String preference) { - super(concreteIndex); - this.shardId = shardId; - this.preference = preference; - this.items = new ArrayList<>(); - } - - @Override - public ActionRequestValidationException validate() { - return super.validateNonNullIndex(); - } - - @Override - public String[] indices() { - List indices = new ArrayList<>(); - for (Item item : items) { - Collections.addAll(indices, item.request.indices()); - } - return indices.toArray(new String[indices.size()]); - } - - public int shardId() { - return shardId; - } - - public void add(Item item) { - items.add(item); - } - - public List items() { - return items; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - shardId = in.readVInt(); - preference = in.readOptionalString(); - int size = in.readVInt(); - items = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - int slot = in.readVInt(); - PercolateShardRequest shardRequest = new PercolateShardRequest(); - shardRequest.readFrom(in); - Item item = new Item(slot, shardRequest); - items.add(item); - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVInt(shardId); - out.writeOptionalString(preference); - out.writeVInt(items.size()); - for (Item item : items) { - out.writeVInt(item.slot); - item.request.writeTo(out); - } - } - - static class Item { - - private final int slot; - private final PercolateShardRequest request; - - public Item(int slot, PercolateShardRequest request) { - this.slot = slot; - this.request = request; - } - - public int slot() { - return slot; - } - - public PercolateShardRequest request() { - return request; - } - - } - - } - - public static class Response extends ActionResponse { - - private List items; - - public List items() { - return items; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVInt(items.size()); - for (Item item : items) { - out.writeVInt(item.slot); - if (item.response != null) { - out.writeBoolean(true); - item.response.writeTo(out); - } else { - out.writeBoolean(false); - out.writeThrowable(item.error); - } - } - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - int size = in.readVInt(); - items = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - int slot = in.readVInt(); - if (in.readBoolean()) { - PercolateShardResponse shardResponse = new PercolateShardResponse(); - shardResponse.readFrom(in); - items.add(new Item(slot, shardResponse)); - } else { - items.add(new Item(slot, in.readThrowable())); - } - } - } - - public static class Item { - - private final int slot; - private final PercolateShardResponse response; - private final Throwable error; - - public Item(Integer slot, PercolateShardResponse response) { - this.slot = slot; - this.response = response; - this.error = null; - } - - public Item(Integer slot, Throwable error) { - this.slot = slot; - this.error = error; - this.response = null; - } - - public int slot() { - return slot; - } - - public PercolateShardResponse response() { - return response; - } - - public Throwable error() { - return error; - } - - public boolean failed() { - return error != null; - } - } - - } - -} diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index a4dbe058395a..4857fcc27b9d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -43,9 +43,9 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.InvalidTypeNameException; -import org.elasticsearch.percolator.PercolatorService; import java.io.IOException; import java.util.ArrayList; @@ -297,7 +297,7 @@ public class MetaDataMappingService extends AbstractComponent { } assert mappingType != null; - if (!MapperService.DEFAULT_MAPPING.equals(mappingType) && !PercolatorService.TYPE_NAME.equals(mappingType) && mappingType.charAt(0) == '_') { + if (!MapperService.DEFAULT_MAPPING.equals(mappingType) && !PercolatorFieldMapper.TYPE_NAME.equals(mappingType) && mappingType.charAt(0) == '_') { throw new InvalidTypeNameException("Document mapping type name can't start with '_'"); } MetaData.Builder builder = MetaData.builder(metaData); diff --git a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index e08e4fc49c5e..322ac4de799b 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -35,7 +35,7 @@ import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; +import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.FsDirectoryService; import org.elasticsearch.index.store.IndexStore; @@ -125,7 +125,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { FieldMapper.IGNORE_MALFORMED_SETTING, FieldMapper.COERCE_SETTING, Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING, - PercolatorQueriesRegistry.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING, + PercolatorQueryCache.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING, MapperService.INDEX_MAPPER_DYNAMIC_SETTING, MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING, BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING, diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index 6b4a1851ab51..bb73e212a773 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -61,6 +61,7 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexEventListener; @@ -140,8 +141,9 @@ public final class IndexService extends AbstractIndexComponent implements IndexC this.indexStore = indexStore; indexFieldData.setListener(new FieldDataCacheListener(this)); this.bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetCacheListener(this)); - this.warmer = new IndexWarmer(indexSettings.getSettings(), nodeServicesProvider.getThreadPool(), bitsetFilterCache.createListener(nodeServicesProvider.getThreadPool())); - this.indexCache = new IndexCache(indexSettings, queryCache, bitsetFilterCache); + PercolatorQueryCache percolatorQueryCache = new PercolatorQueryCache(indexSettings, IndexService.this::newQueryShardContext); + this.warmer = new IndexWarmer(indexSettings.getSettings(), nodeServicesProvider.getThreadPool(), bitsetFilterCache.createListener(nodeServicesProvider.getThreadPool()), percolatorQueryCache.createListener(nodeServicesProvider.getThreadPool())); + this.indexCache = new IndexCache(indexSettings, queryCache, bitsetFilterCache, percolatorQueryCache); this.engineFactory = engineFactory; // initialize this last -- otherwise if the wrapper requires any other member to be non-null we fail with an NPE this.searcherWrapper = wrapperFactory.newWrapper(this); @@ -230,7 +232,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC } } } finally { - IOUtils.close(bitsetFilterCache, indexCache, mapperService, indexFieldData, analysisService, refreshTask, fsyncTask); + IOUtils.close(bitsetFilterCache, indexCache, mapperService, indexFieldData, analysisService, refreshTask, fsyncTask, cache().getPercolatorQueryCache()); } } } @@ -420,7 +422,11 @@ public final class IndexService extends AbstractIndexComponent implements IndexC * Creates a new QueryShardContext. The context has not types set yet, if types are required set them via {@link QueryShardContext#setTypes(String...)} */ public QueryShardContext newQueryShardContext() { - return new QueryShardContext(indexSettings, indexCache.bitsetFilterCache(), indexFieldData, mapperService(), similarityService(), nodeServicesProvider.getScriptService(), nodeServicesProvider.getIndicesQueriesRegistry()); + return new QueryShardContext( + indexSettings, indexCache.bitsetFilterCache(), indexFieldData, mapperService(), + similarityService(), nodeServicesProvider.getScriptService(), nodeServicesProvider.getIndicesQueriesRegistry(), + indexCache.getPercolatorQueryCache() + ); } ThreadPool getThreadPool() { diff --git a/core/src/main/java/org/elasticsearch/index/cache/IndexCache.java b/core/src/main/java/org/elasticsearch/index/cache/IndexCache.java index 61733f246955..b41f5bc01250 100644 --- a/core/src/main/java/org/elasticsearch/index/cache/IndexCache.java +++ b/core/src/main/java/org/elasticsearch/index/cache/IndexCache.java @@ -24,6 +24,7 @@ import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.cache.query.QueryCache; +import org.elasticsearch.index.percolator.PercolatorQueryCache; import java.io.Closeable; import java.io.IOException; @@ -35,11 +36,14 @@ public class IndexCache extends AbstractIndexComponent implements Closeable { private final QueryCache queryCache; private final BitsetFilterCache bitsetFilterCache; + private final PercolatorQueryCache percolatorQueryCache; - public IndexCache(IndexSettings indexSettings, QueryCache queryCache, BitsetFilterCache bitsetFilterCache) { + public IndexCache(IndexSettings indexSettings, QueryCache queryCache, BitsetFilterCache bitsetFilterCache, + PercolatorQueryCache percolatorQueryCache) { super(indexSettings); this.queryCache = queryCache; this.bitsetFilterCache = bitsetFilterCache; + this.percolatorQueryCache = percolatorQueryCache; } public QueryCache query() { @@ -53,9 +57,13 @@ public class IndexCache extends AbstractIndexComponent implements Closeable { return bitsetFilterCache; } + public PercolatorQueryCache getPercolatorQueryCache() { + return percolatorQueryCache; + } + @Override public void close() throws IOException { - IOUtils.close(queryCache, bitsetFilterCache); + IOUtils.close(queryCache, bitsetFilterCache, percolatorQueryCache); } public void clear(String reason) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 414ea0f7e9c2..eaf897e7fbd0 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -33,12 +33,12 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.object.ObjectMapper; +import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.InvalidTypeNameException; import org.elasticsearch.indices.TypeMissingException; import org.elasticsearch.indices.mapper.MapperRegistry; -import org.elasticsearch.percolator.PercolatorService; import org.elasticsearch.script.ScriptService; import java.io.Closeable; @@ -333,7 +333,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } private boolean typeNameStartsWithIllegalDot(DocumentMapper mapper) { - return mapper.type().startsWith(".") && !PercolatorService.TYPE_NAME.equals(mapper.type()); + return mapper.type().startsWith(".") && !PercolatorFieldMapper.TYPE_NAME.equals(mapper.type()); } private boolean assertSerialization(DocumentMapper mapper) { @@ -405,7 +405,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException { String defaultMappingSource; - if (PercolatorService.TYPE_NAME.equals(mappingType)) { + if (PercolatorFieldMapper.TYPE_NAME.equals(mappingType)) { defaultMappingSource = this.defaultPercolatorMappingSource; } else { defaultMappingSource = this.defaultMappingSource; diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolateStats.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolateStats.java deleted file mode 100644 index f4c899dff9a5..000000000000 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolateStats.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.percolator; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentBuilderString; - -import java.io.IOException; - -/** - * Exposes percolator related statistics. - */ -public class PercolateStats implements Streamable, ToXContent { - - private long percolateCount; - private long percolateTimeInMillis; - private long current; - private long memorySizeInBytes = -1; - private long numQueries; - - /** - * Noop constructor for serialization purposes. - */ - public PercolateStats() { - } - - PercolateStats(long percolateCount, long percolateTimeInMillis, long current, long memorySizeInBytes, long numQueries) { - this.percolateCount = percolateCount; - this.percolateTimeInMillis = percolateTimeInMillis; - this.current = current; - this.memorySizeInBytes = memorySizeInBytes; - this.numQueries = numQueries; - } - - /** - * @return The number of times the percolate api has been invoked. - */ - public long getCount() { - return percolateCount; - } - - /** - * @return The total amount of time spend in the percolate api - */ - public long getTimeInMillis() { - return percolateTimeInMillis; - } - - /** - * @return The total amount of time spend in the percolate api - */ - public TimeValue getTime() { - return new TimeValue(getTimeInMillis()); - } - - /** - * @return The total amount of active percolate api invocations. - */ - public long getCurrent() { - return current; - } - - /** - * @return The total number of loaded percolate queries. - */ - public long getNumQueries() { - return numQueries; - } - - /** - * @return Temporarily returns -1, but this used to return the total size the loaded queries take in - * memory, but this is disabled now because the size estimation was too expensive cpu wise. This will be enabled - * again when a cheaper size estimation can be found. - */ - public long getMemorySizeInBytes() { - return memorySizeInBytes; - } - - /** - * @return The total size the loaded queries take in memory. - */ - public ByteSizeValue getMemorySize() { - return new ByteSizeValue(memorySizeInBytes); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(Fields.PERCOLATE); - builder.field(Fields.TOTAL, percolateCount); - builder.timeValueField(Fields.TIME_IN_MILLIS, Fields.TIME, percolateTimeInMillis); - builder.field(Fields.CURRENT, current); - builder.field(Fields.MEMORY_SIZE_IN_BYTES, memorySizeInBytes); - builder.field(Fields.MEMORY_SIZE, getMemorySize()); - builder.field(Fields.QUERIES, getNumQueries()); - builder.endObject(); - return builder; - } - - public void add(PercolateStats percolate) { - if (percolate == null) { - return; - } - - percolateCount += percolate.getCount(); - percolateTimeInMillis += percolate.getTimeInMillis(); - current += percolate.getCurrent(); - numQueries += percolate.getNumQueries(); - } - - static final class Fields { - static final XContentBuilderString PERCOLATE = new XContentBuilderString("percolate"); - static final XContentBuilderString TOTAL = new XContentBuilderString("total"); - static final XContentBuilderString TIME = new XContentBuilderString("time"); - static final XContentBuilderString TIME_IN_MILLIS = new XContentBuilderString("time_in_millis"); - static final XContentBuilderString CURRENT = new XContentBuilderString("current"); - static final XContentBuilderString MEMORY_SIZE_IN_BYTES = new XContentBuilderString("memory_size_in_bytes"); - static final XContentBuilderString MEMORY_SIZE = new XContentBuilderString("memory_size"); - static final XContentBuilderString QUERIES = new XContentBuilderString("queries"); - } - - public static PercolateStats readPercolateStats(StreamInput in) throws IOException { - PercolateStats stats = new PercolateStats(); - stats.readFrom(in); - return stats; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - percolateCount = in.readVLong(); - percolateTimeInMillis = in.readVLong(); - current = in.readVLong(); - numQueries = in.readVLong(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVLong(percolateCount); - out.writeVLong(percolateTimeInMillis); - out.writeVLong(current); - out.writeVLong(numQueries); - } -} diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java index f44d454655e3..338de5c333d3 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java @@ -19,34 +19,50 @@ package org.elasticsearch.index.percolator; import org.apache.lucene.document.Field; +import org.apache.lucene.document.FieldType; +import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.Query; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.core.BinaryFieldMapper; import org.elasticsearch.index.mapper.core.KeywordFieldMapper; +import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.query.ParsedQuery; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.util.Arrays; +import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; public class PercolatorFieldMapper extends FieldMapper { + public static final String TYPE_NAME = ".percolator"; public static final String NAME = "query"; public static final String CONTENT_TYPE = "percolator"; public static final PercolatorFieldType FIELD_TYPE = new PercolatorFieldType(); private static final String EXTRACTED_TERMS_FIELD_NAME = "extracted_terms"; private static final String UNKNOWN_QUERY_FIELD_NAME = "unknown_query"; + private static final String QUERY_BUILDER_FIELD_NAME = "query_builder_field"; + public static final String EXTRACTED_TERMS_FULL_FIELD_NAME = NAME + "." + EXTRACTED_TERMS_FIELD_NAME; public static final String UNKNOWN_QUERY_FULL_FIELD_NAME = NAME + "." + UNKNOWN_QUERY_FIELD_NAME; + public static final String QUERY_BUILDER_FULL_FIELD_NAME = NAME + "." + QUERY_BUILDER_FIELD_NAME; public static class Builder extends FieldMapper.Builder { @@ -60,19 +76,29 @@ public class PercolatorFieldMapper extends FieldMapper { @Override public PercolatorFieldMapper build(BuilderContext context) { context.path().add(name); - KeywordFieldMapper extractedTermsField = createStringFieldBuilder(EXTRACTED_TERMS_FIELD_NAME).build(context); - KeywordFieldMapper unknownQueryField = createStringFieldBuilder(UNKNOWN_QUERY_FIELD_NAME).build(context); + KeywordFieldMapper extractedTermsField = createExtractQueryFieldBuilder(EXTRACTED_TERMS_FIELD_NAME).build(context); + KeywordFieldMapper unknownQueryField = createExtractQueryFieldBuilder(UNKNOWN_QUERY_FIELD_NAME).build(context); + BinaryFieldMapper queryBuilderField = createQueryBuilderFieldBuilder().build(context); context.path().remove(); - return new PercolatorFieldMapper(name(), fieldType, defaultFieldType, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo, queryShardContext, extractedTermsField, unknownQueryField); + return new PercolatorFieldMapper(name(), fieldType, defaultFieldType, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo, queryShardContext, extractedTermsField, unknownQueryField, queryBuilderField); } - static KeywordFieldMapper.Builder createStringFieldBuilder(String name) { + static KeywordFieldMapper.Builder createExtractQueryFieldBuilder(String name) { KeywordFieldMapper.Builder queryMetaDataFieldBuilder = new KeywordFieldMapper.Builder(name); queryMetaDataFieldBuilder.docValues(false); queryMetaDataFieldBuilder.store(false); queryMetaDataFieldBuilder.indexOptions(IndexOptions.DOCS); return queryMetaDataFieldBuilder; } + + static BinaryFieldMapper.Builder createQueryBuilderFieldBuilder() { + BinaryFieldMapper.Builder builder = new BinaryFieldMapper.Builder(QUERY_BUILDER_FIELD_NAME); + builder.docValues(true); + builder.indexOptions(IndexOptions.NONE); + builder.store(false); + builder.fieldType().setDocValuesType(DocValuesType.BINARY); + return builder; + } } public static class TypeParser implements FieldMapper.TypeParser { @@ -111,26 +137,81 @@ public class PercolatorFieldMapper extends FieldMapper { private final QueryShardContext queryShardContext; private final KeywordFieldMapper queryTermsField; private final KeywordFieldMapper unknownQueryField; + private final BinaryFieldMapper queryBuilderField; - public PercolatorFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo, QueryShardContext queryShardContext, KeywordFieldMapper queryTermsField, KeywordFieldMapper unknownQueryField) { + public PercolatorFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, + Settings indexSettings, MultiFields multiFields, CopyTo copyTo, QueryShardContext queryShardContext, + KeywordFieldMapper queryTermsField, KeywordFieldMapper unknownQueryField, + BinaryFieldMapper queryBuilderField) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); this.queryShardContext = queryShardContext; this.queryTermsField = queryTermsField; this.unknownQueryField = unknownQueryField; - this.mapUnmappedFieldAsString = PercolatorQueriesRegistry.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING.get(indexSettings); + this.queryBuilderField = queryBuilderField; + this.mapUnmappedFieldAsString = PercolatorQueryCache.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING.get(indexSettings); } @Override public Mapper parse(ParseContext context) throws IOException { QueryShardContext queryShardContext = new QueryShardContext(this.queryShardContext); - Query query = PercolatorQueriesRegistry.parseQuery(queryShardContext, mapUnmappedFieldAsString, context.parser()); + QueryBuilder queryBuilder = parseQueryBuilder(queryShardContext.parseContext(), context.parser()); + // Fetching of terms, shapes and indexed scripts happen during this rewrite: + queryBuilder = queryBuilder.rewrite(queryShardContext); + + try (XContentBuilder builder = XContentFactory.contentBuilder(PercolatorQueryCache.QUERY_BUILDER_CONTENT_TYPE)) { + queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap())); + builder.flush(); + byte[] queryBuilderAsBytes = builder.bytes().toBytes(); + context.doc().add(new Field(queryBuilderField.name(), queryBuilderAsBytes, queryBuilderField.fieldType())); + } + + Query query = toQuery(queryShardContext, mapUnmappedFieldAsString, queryBuilder); ExtractQueryTermsService.extractQueryTerms(query, context.doc(), queryTermsField.name(), unknownQueryField.name(), queryTermsField.fieldType()); return null; } + public static Query parseQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, XContentParser parser) throws IOException { + return toQuery(context, mapUnmappedFieldsAsString, parseQueryBuilder(context.parseContext(), parser)); + } + + static Query toQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, QueryBuilder queryBuilder) throws IOException { + context.reset(); + // This means that fields in the query need to exist in the mapping prior to registering this query + // The reason that this is required, is that if a field doesn't exist then the query assumes defaults, which may be undesired. + // + // Even worse when fields mentioned in percolator queries do go added to map after the queries have been registered + // then the percolator queries don't work as expected any more. + // + // Query parsing can't introduce new fields in mappings (which happens when registering a percolator query), + // because field type can't be inferred from queries (like document do) so the best option here is to disallow + // the usage of unmapped fields in percolator queries to avoid unexpected behaviour + // + // if index.percolator.map_unmapped_fields_as_string is set to true, query can contain unmapped fields which will be mapped + // as an analyzed string. + context.setAllowUnmappedFields(false); + context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString); + context.parseFieldMatcher(context.getIndexSettings().getParseFieldMatcher()); + try { + return queryBuilder.toQuery(context); + } finally { + context.reset(); + } + } + + static QueryBuilder parseQueryBuilder(QueryParseContext context, XContentParser parser) { + context.reset(parser); + try { + return context.parseInnerQueryBuilder(); + } catch (IOException e) { + throw new ParsingException(parser.getTokenLocation(), "Failed to parse", e); + } finally { + context.reset(null); + } + } + @Override public Iterator iterator() { - return Arrays.asList(queryTermsField, unknownQueryField).iterator(); + return Arrays.asList(queryTermsField, unknownQueryField, queryBuilderField).iterator(); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhase.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhase.java new file mode 100644 index 000000000000..c1f9720b53b6 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhase.java @@ -0,0 +1,141 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.percolator; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.ReaderUtil; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.text.Text; +import org.elasticsearch.index.percolator.PercolatorFieldMapper; +import org.elasticsearch.index.percolator.PercolatorQueryCache; +import org.elasticsearch.index.query.ParsedQuery; +import org.elasticsearch.index.query.PercolatorQuery; +import org.elasticsearch.search.SearchParseElement; +import org.elasticsearch.search.fetch.FetchSubPhase; +import org.elasticsearch.search.highlight.HighlightPhase; +import org.elasticsearch.search.highlight.SearchContextHighlight; +import org.elasticsearch.search.internal.InternalSearchHit; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.internal.SubSearchContext; + +import java.util.Collections; +import java.util.List; +import java.util.Map; + +// Highlighting in the case of the percolator query is a bit different, because the PercolatorQuery itself doesn't get highlighted, +// but the source of the PercolatorQuery gets highlighted by each hit with type '.percolator' (percolator queries). +public class PercolatorHighlightSubFetchPhase implements FetchSubPhase { + + private final HighlightPhase highlightPhase; + + @Inject + public PercolatorHighlightSubFetchPhase(HighlightPhase highlightPhase) { + this.highlightPhase = highlightPhase; + } + + @Override + public boolean hitsExecutionNeeded(SearchContext context) { + return context.highlight() != null && locatePercolatorQuery(context.query()) != null; + } + + @Override + public void hitsExecute(SearchContext context, InternalSearchHit[] hits) { + PercolatorQuery percolatorQuery = locatePercolatorQuery(context.query()); + if (percolatorQuery == null) { + // shouldn't happen as we checked for the existence of a percolator query in hitsExecutionNeeded(...) + throw new IllegalStateException("couldn't locate percolator query"); + } + + List ctxs = context.searcher().getIndexReader().leaves(); + PercolatorQueryCache queriesRegistry = context.percolatorQueryCache(); + IndexSearcher percolatorIndexSearcher = percolatorQuery.getPercolatorIndexSearcher(); + + LeafReaderContext percolatorLeafReaderContext = percolatorIndexSearcher.getIndexReader().leaves().get(0); + FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext(); + SubSearchContext subSearchContext = + createSubSearchContext(context, percolatorLeafReaderContext, percolatorQuery.getDocumentSource()); + + for (InternalSearchHit hit : hits) { + if (PercolatorFieldMapper.TYPE_NAME.equals(hit.getType())) { + LeafReaderContext ctx = ctxs.get(ReaderUtil.subIndex(hit.docId(), ctxs)); + Query query = queriesRegistry.getQueries(ctx).getQuery(hit.docId() - ctx.docBase); + subSearchContext.parsedQuery(new ParsedQuery(query)); + hitContext.reset( + new InternalSearchHit(0, "unknown", new Text(percolatorQuery.getDocumentType()), Collections.emptyMap()), + percolatorLeafReaderContext, 0, percolatorIndexSearcher + ); + hitContext.cache().clear(); + highlightPhase.hitExecute(subSearchContext, hitContext); + hit.highlightFields().putAll(hitContext.hit().getHighlightFields()); + } + } + + } + + @Override + public Map parseElements() { + return Collections.emptyMap(); + } + + @Override + public boolean hitExecutionNeeded(SearchContext context) { + return false; + } + + @Override + public void hitExecute(SearchContext context, HitContext hitContext) { + } + + static PercolatorQuery locatePercolatorQuery(Query query) { + if (query instanceof PercolatorQuery) { + return (PercolatorQuery) query; + } else if (query instanceof BooleanQuery) { + for (BooleanClause clause : ((BooleanQuery) query).clauses()) { + PercolatorQuery result = locatePercolatorQuery(clause.getQuery()); + if (result != null) { + return result; + } + } + } else if (query instanceof ConstantScoreQuery) { + return locatePercolatorQuery(((ConstantScoreQuery) query).getQuery()); + } else if (query instanceof BoostQuery) { + return locatePercolatorQuery(((BoostQuery) query).getQuery()); + } + + return null; + } + + private SubSearchContext createSubSearchContext(SearchContext context, LeafReaderContext leafReaderContext, BytesReference source) { + SubSearchContext subSearchContext = new SubSearchContext(context); + subSearchContext.highlight(new SearchContextHighlight(context.highlight().fields())); + // Enforce highlighting by source, because MemoryIndex doesn't support stored fields. + subSearchContext.highlight().globalForceSource(true); + subSearchContext.lookup().source().setSegmentAndDocument(leafReaderContext, 0); + subSearchContext.lookup().source().setSource(source); + return subSearchContext; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java deleted file mode 100644 index 0a0cb9e96d9c..000000000000 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java +++ /dev/null @@ -1,236 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.percolator; - -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.metrics.CounterMetric; -import org.elasticsearch.common.metrics.MeanMetric; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Property; -import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.mapper.Uid; -import org.elasticsearch.index.mapper.internal.TypeFieldMapper; -import org.elasticsearch.index.mapper.internal.UidFieldMapper; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.shard.AbstractIndexShardComponent; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.percolator.PercolatorService; - -import java.io.Closeable; -import java.io.IOException; -import java.util.Map; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.TimeUnit; - -/** - * Each shard will have a percolator registry even if there isn't a {@link PercolatorService#TYPE_NAME} document type in the index. - * For shards with indices that have no {@link PercolatorService#TYPE_NAME} document type, this will hold no percolate queries. - *

        - * Once a document type has been created, the real-time percolator will start to listen to write events and update the - * this registry with queries in real time. - */ -public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent implements Closeable { - - public final static Setting INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING = - Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, Property.IndexScope); - - private final ConcurrentMap percolateQueries = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency(); - private final QueryShardContext queryShardContext; - private boolean mapUnmappedFieldsAsString; - private final MeanMetric percolateMetric = new MeanMetric(); - private final CounterMetric currentMetric = new CounterMetric(); - private final CounterMetric numberOfQueries = new CounterMetric(); - - public PercolatorQueriesRegistry(ShardId shardId, IndexSettings indexSettings, QueryShardContext queryShardContext) { - super(shardId, indexSettings); - this.queryShardContext = queryShardContext; - this.mapUnmappedFieldsAsString = indexSettings.getValue(INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING); - } - - public ConcurrentMap getPercolateQueries() { - return percolateQueries; - } - - @Override - public void close() { - clear(); - } - - public void clear() { - percolateQueries.clear(); - } - - - public void addPercolateQuery(String idAsString, BytesReference source) { - Query newquery = parsePercolatorDocument(idAsString, source); - BytesRef id = new BytesRef(idAsString); - percolateQueries.put(id, newquery); - numberOfQueries.inc(); - - } - - public void removePercolateQuery(String idAsString) { - BytesRef id = new BytesRef(idAsString); - Query query = percolateQueries.remove(id); - if (query != null) { - numberOfQueries.dec(); - } - } - - public Query parsePercolatorDocument(String id, BytesReference source) { - try (XContentParser sourceParser = XContentHelper.createParser(source)) { - String currentFieldName = null; - XContentParser.Token token = sourceParser.nextToken(); // move the START_OBJECT - if (token != XContentParser.Token.START_OBJECT) { - throw new ElasticsearchException("failed to parse query [" + id + "], not starting with OBJECT"); - } - while ((token = sourceParser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = sourceParser.currentName(); - } else if (token == XContentParser.Token.START_OBJECT) { - if ("query".equals(currentFieldName)) { - return parseQuery(queryShardContext, mapUnmappedFieldsAsString, sourceParser); - } else { - sourceParser.skipChildren(); - } - } else if (token == XContentParser.Token.START_ARRAY) { - sourceParser.skipChildren(); - } - } - } catch (Exception e) { - throw new PercolatorException(shardId().getIndex(), "failed to parse query [" + id + "]", e); - } - return null; - } - - public static Query parseQuery(QueryShardContext queryShardContext, boolean mapUnmappedFieldsAsString, XContentParser parser) { - QueryShardContext context = new QueryShardContext(queryShardContext); - try { - context.reset(parser); - // This means that fields in the query need to exist in the mapping prior to registering this query - // The reason that this is required, is that if a field doesn't exist then the query assumes defaults, which may be undesired. - // - // Even worse when fields mentioned in percolator queries do go added to map after the queries have been registered - // then the percolator queries don't work as expected any more. - // - // Query parsing can't introduce new fields in mappings (which happens when registering a percolator query), - // because field type can't be inferred from queries (like document do) so the best option here is to disallow - // the usage of unmapped fields in percolator queries to avoid unexpected behaviour - // - // if index.percolator.map_unmapped_fields_as_string is set to true, query can contain unmapped fields which will be mapped - // as an analyzed string. - context.setAllowUnmappedFields(false); - context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString); - return context.parseInnerQuery(); - } catch (IOException e) { - throw new ParsingException(parser.getTokenLocation(), "Failed to parse", e); - } finally { - context.reset(null); - } - } - - public void loadQueries(IndexReader reader) { - logger.trace("loading percolator queries..."); - final int loadedQueries; - try { - Query query = new TermQuery(new Term(TypeFieldMapper.NAME, PercolatorService.TYPE_NAME)); - QueriesLoaderCollector queryCollector = new QueriesLoaderCollector(PercolatorQueriesRegistry.this, logger); - IndexSearcher indexSearcher = new IndexSearcher(reader); - indexSearcher.setQueryCache(null); - indexSearcher.search(query, queryCollector); - Map queries = queryCollector.queries(); - for (Map.Entry entry : queries.entrySet()) { - percolateQueries.put(entry.getKey(), entry.getValue()); - numberOfQueries.inc(); - } - loadedQueries = queries.size(); - } catch (Exception e) { - throw new PercolatorException(shardId.getIndex(), "failed to load queries from percolator index", e); - } - logger.debug("done loading [{}] percolator queries", loadedQueries); - } - - public boolean isPercolatorQuery(Engine.Index operation) { - if (PercolatorService.TYPE_NAME.equals(operation.type())) { - parsePercolatorDocument(operation.id(), operation.source()); - return true; - } - return false; - } - - public boolean isPercolatorQuery(Engine.Delete operation) { - return PercolatorService.TYPE_NAME.equals(operation.type()); - } - - public synchronized void updatePercolateQuery(Engine engine, String id) { - // this can be called out of order as long as for every change to a percolator document it's invoked. This will always - // fetch the latest change but might fetch the same change twice if updates / deletes happen concurrently. - try (Engine.GetResult getResult = engine.get(new Engine.Get(true, new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(PercolatorService.TYPE_NAME, id))))) { - if (getResult.exists()) { - addPercolateQuery(id, getResult.source().source); - } else { - removePercolateQuery(id); - } - } - } - - public void prePercolate() { - currentMetric.inc(); - } - - public void postPercolate(long tookInNanos) { - currentMetric.dec(); - percolateMetric.inc(tookInNanos); - } - - /** - * @return The current metrics - */ - public PercolateStats stats() { - return new PercolateStats(percolateMetric.count(), TimeUnit.NANOSECONDS.toMillis(percolateMetric.sum()), currentMetric.count(), -1, numberOfQueries.count()); - } - - // Enable when a more efficient manner is found for estimating the size of a Lucene query. - /*private static long computeSizeInMemory(HashedBytesRef id, Query query) { - long size = (3 * RamUsageEstimator.NUM_BYTES_INT) + RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + id.bytes.bytes.length; - size += RamEstimator.sizeOf(query); - return size; - } - - private static final class RamEstimator { - // we move this into it's own class to exclude it from the forbidden API checks - // it's fine to use here! - static long sizeOf(Query query) { - return RamUsageEstimator.sizeOf(query); - } - }*/ -} diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCache.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCache.java new file mode 100644 index 000000000000..7c9602b49094 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCache.java @@ -0,0 +1,266 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.percolator; + +import com.carrotsearch.hppc.IntObjectHashMap; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.StoredFieldVisitor; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Query; +import org.apache.lucene.util.Bits; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.cache.Cache; +import org.elasticsearch.common.cache.CacheBuilder; +import org.elasticsearch.common.lucene.index.ElasticsearchLeafReader; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.AbstractIndexComponent; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexWarmer; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.mapper.internal.SourceFieldMapper; +import org.elasticsearch.index.mapper.internal.TypeFieldMapper; +import org.elasticsearch.index.query.PercolatorQuery; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardUtils; +import org.elasticsearch.threadpool.ThreadPool; + +import java.io.Closeable; +import java.io.IOException; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Executor; +import java.util.function.Supplier; + +public final class PercolatorQueryCache extends AbstractIndexComponent + implements Closeable, LeafReader.CoreClosedListener, PercolatorQuery.QueryRegistry { + + public final static Setting INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING = + Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, Setting.Property.IndexScope); + + public final static XContentType QUERY_BUILDER_CONTENT_TYPE = XContentType.SMILE; + + private final Supplier queryShardContextSupplier; + private final Cache cache; + private final boolean mapUnmappedFieldsAsString; + + public PercolatorQueryCache(IndexSettings indexSettings, Supplier queryShardContextSupplier) { + super(indexSettings); + this.queryShardContextSupplier = queryShardContextSupplier; + cache = CacheBuilder.builder().build(); + this.mapUnmappedFieldsAsString = indexSettings.getValue(INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING); + } + + @Override + public Leaf getQueries(LeafReaderContext ctx) { + QueriesLeaf percolatorQueries = cache.get(ctx.reader().getCoreCacheKey()); + if (percolatorQueries == null) { + throw new IllegalStateException("queries not loaded, queries should be have been preloaded during index warming..."); + } + return percolatorQueries; + } + + public IndexWarmer.Listener createListener(ThreadPool threadPool) { + return new IndexWarmer.Listener() { + + final Executor executor = threadPool.executor(ThreadPool.Names.WARMER); + + @Override + public IndexWarmer.TerminationHandle warmNewReaders(IndexShard indexShard, Engine.Searcher searcher) { + final CountDownLatch latch = new CountDownLatch(searcher.reader().leaves().size()); + for (final LeafReaderContext ctx : searcher.reader().leaves()) { + executor.execute(() -> { + try { + final long start = System.nanoTime(); + QueriesLeaf queries = loadQueries(ctx, indexShard.indexSettings().getIndexVersionCreated()); + cache.put(ctx.reader().getCoreCacheKey(), queries); + if (indexShard.warmerService().logger().isTraceEnabled()) { + indexShard.warmerService().logger().trace( + "loading percolator queries took [{}]", + TimeValue.timeValueNanos(System.nanoTime() - start) + ); + } + } catch (Throwable t) { + indexShard.warmerService().logger().warn("failed to load percolator queries", t); + } finally { + latch.countDown(); + } + }); + } + return () -> latch.await(); + } + + @Override + public IndexWarmer.TerminationHandle warmTopReader(IndexShard indexShard, Engine.Searcher searcher) { + return IndexWarmer.TerminationHandle.NO_WAIT; + } + }; + } + + QueriesLeaf loadQueries(LeafReaderContext context, Version indexVersionCreated) throws IOException { + LeafReader leafReader = context.reader(); + ShardId shardId = ShardUtils.extractShardId(leafReader); + if (shardId == null) { + throw new IllegalStateException("can't resolve shard id"); + } + if (indexSettings.getIndex().equals(shardId.getIndex()) == false) { + // percolator cache insanity + String message = "Trying to load queries for index " + shardId.getIndex() + " with cache of index " + indexSettings.getIndex(); + throw new IllegalStateException(message); + } + + IntObjectHashMap queries = new IntObjectHashMap<>(); + boolean legacyLoading = indexVersionCreated.before(Version.V_5_0_0); + PostingsEnum postings = leafReader.postings(new Term(TypeFieldMapper.NAME, PercolatorFieldMapper.TYPE_NAME), PostingsEnum.NONE); + if (postings != null) { + if (legacyLoading) { + LegacyQueryFieldVisitor visitor = new LegacyQueryFieldVisitor(); + for (int docId = postings.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = postings.nextDoc()) { + leafReader.document(docId, visitor); + queries.put(docId, parseLegacyPercolatorDocument(docId, visitor.source)); + visitor.source = null; // reset + } + } else { + BinaryDocValues binaryDocValues = leafReader.getBinaryDocValues(PercolatorFieldMapper.QUERY_BUILDER_FULL_FIELD_NAME); + if (binaryDocValues != null) { + for (int docId = postings.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = postings.nextDoc()) { + BytesRef queryBuilder = binaryDocValues.get(docId); + if (queryBuilder.length > 0) { + queries.put(docId, parseQueryBuilder(docId, queryBuilder)); + } + } + } + } + } + leafReader.addCoreClosedListener(this); + return new QueriesLeaf(shardId, queries); + } + + private Query parseQueryBuilder(int docId, BytesRef queryBuilder) { + XContent xContent = QUERY_BUILDER_CONTENT_TYPE.xContent(); + try (XContentParser sourceParser = xContent.createParser(queryBuilder.bytes, queryBuilder.offset, queryBuilder.length)) { + QueryShardContext context = queryShardContextSupplier.get(); + return PercolatorFieldMapper.parseQuery(context, mapUnmappedFieldsAsString, sourceParser); + } catch (IOException e) { + throw new PercolatorException(index(), "failed to parse query builder for document [" + docId + "]", e); + } + } + + private Query parseLegacyPercolatorDocument(int docId, BytesReference source) { + try (XContentParser sourceParser = XContentHelper.createParser(source)) { + String currentFieldName = null; + XContentParser.Token token = sourceParser.nextToken(); // move the START_OBJECT + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchException("failed to parse query [" + docId + "], not starting with OBJECT"); + } + while ((token = sourceParser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = sourceParser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + if ("query".equals(currentFieldName)) { + QueryShardContext context = queryShardContextSupplier.get(); + return PercolatorFieldMapper.parseQuery(context, mapUnmappedFieldsAsString, sourceParser); + } else { + sourceParser.skipChildren(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + sourceParser.skipChildren(); + } + } + } catch (Exception e) { + throw new PercolatorException(index(), "failed to parse query [" + docId + "]", e); + } + return null; + } + + public PercolatorQueryCacheStats getStats(ShardId shardId) { + int numberOfQueries = 0; + for (QueriesLeaf queries : cache.values()) { + if (shardId.equals(queries.shardId)) { + numberOfQueries += queries.queries.size(); + } + } + return new PercolatorQueryCacheStats(numberOfQueries); + } + + @Override + public void onClose(Object cacheKey) throws IOException { + cache.invalidate(cacheKey); + } + + @Override + public void close() throws IOException { + cache.invalidateAll(); + } + + final static class LegacyQueryFieldVisitor extends StoredFieldVisitor { + + private BytesArray source; + + @Override + public void binaryField(FieldInfo fieldInfo, byte[] bytes) throws IOException { + source = new BytesArray(bytes); + } + + @Override + public Status needsField(FieldInfo fieldInfo) throws IOException { + if (source != null) { + return Status.STOP; + } + if (SourceFieldMapper.NAME.equals(fieldInfo.name)) { + return Status.YES; + } else { + return Status.NO; + } + } + + } + + final static class QueriesLeaf implements Leaf { + + final ShardId shardId; + final IntObjectHashMap queries; + + QueriesLeaf(ShardId shardId, IntObjectHashMap queries) { + this.shardId = shardId; + this.queries = queries; + } + + @Override + public Query getQuery(int docId) { + return queries.get(docId); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCacheStats.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCacheStats.java new file mode 100644 index 000000000000..a8e3b7f47992 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCacheStats.java @@ -0,0 +1,90 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.percolator; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; + +import java.io.IOException; + +/** + * Exposes percolator query cache statistics. + */ +public class PercolatorQueryCacheStats implements Streamable, ToXContent { + + private long numQueries; + + /** + * Noop constructor for serialization purposes. + */ + public PercolatorQueryCacheStats() { + } + + PercolatorQueryCacheStats(long numQueries) { + this.numQueries = numQueries; + } + + /** + * @return The total number of loaded percolate queries. + */ + public long getNumQueries() { + return numQueries; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(Fields.PERCOLATOR); + builder.field(Fields.QUERIES, getNumQueries()); + builder.endObject(); + return builder; + } + + public void add(PercolatorQueryCacheStats percolate) { + if (percolate == null) { + return; + } + + numQueries += percolate.getNumQueries(); + } + + static final class Fields { + static final XContentBuilderString PERCOLATOR = new XContentBuilderString("percolator"); + static final XContentBuilderString QUERIES = new XContentBuilderString("num_queries"); + } + + public static PercolatorQueryCacheStats readPercolateStats(StreamInput in) throws IOException { + PercolatorQueryCacheStats stats = new PercolatorQueryCacheStats(); + stats.readFrom(in); + return stats; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + numQueries = in.readVLong(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVLong(numQueries); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/percolator/QueriesLoaderCollector.java b/core/src/main/java/org/elasticsearch/index/percolator/QueriesLoaderCollector.java deleted file mode 100644 index 1bea43e4ea10..000000000000 --- a/core/src/main/java/org/elasticsearch/index/percolator/QueriesLoaderCollector.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.percolator; - -import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.SimpleCollector; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexFieldDataService; -import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import org.elasticsearch.index.fieldvisitor.FieldsVisitor; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.Uid; -import org.elasticsearch.index.mapper.internal.UidFieldMapper; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -/** - */ -final class QueriesLoaderCollector extends SimpleCollector { - - private final Map queries = new HashMap<>(); - private final FieldsVisitor fieldsVisitor = new FieldsVisitor(true); - private final PercolatorQueriesRegistry percolator; - private final ESLogger logger; - - private LeafReader reader; - - QueriesLoaderCollector(PercolatorQueriesRegistry percolator, ESLogger logger) { - this.percolator = percolator; - this.logger = logger; - } - - public Map queries() { - return this.queries; - } - - @Override - public void collect(int doc) throws IOException { - fieldsVisitor.reset(); - reader.document(doc, fieldsVisitor); - final Uid uid = fieldsVisitor.uid(); - - try { - // id is only used for logging, if we fail we log the id in the catch statement - final Query parseQuery = percolator.parsePercolatorDocument(null, fieldsVisitor.source()); - if (parseQuery != null) { - queries.put(new BytesRef(uid.id()), parseQuery); - } else { - logger.warn("failed to add query [{}] - parser returned null", uid); - } - - } catch (Exception e) { - logger.warn("failed to add query [{}]", e, uid); - } - } - - @Override - protected void doSetNextReader(LeafReaderContext context) throws IOException { - reader = context.reader(); - } - - @Override - public void setScorer(Scorer scorer) throws IOException { - } - - @Override - public boolean needsScores() { - return false; - } -} diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolatorQuery.java b/core/src/main/java/org/elasticsearch/index/query/PercolatorQuery.java similarity index 60% rename from core/src/main/java/org/elasticsearch/percolator/PercolatorQuery.java rename to core/src/main/java/org/elasticsearch/index/query/PercolatorQuery.java index 98be7d308af8..0b22b17f65eb 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolatorQuery.java +++ b/core/src/main/java/org/elasticsearch/index/query/PercolatorQuery.java @@ -17,10 +17,9 @@ * under the License. */ -package org.elasticsearch.percolator; +package org.elasticsearch.index.query; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanQuery; @@ -31,51 +30,48 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; -import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.Accountable; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.index.fieldvisitor.SingleFieldsVisitor; -import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.percolator.ExtractQueryTermsService; import java.io.IOException; -import java.util.Map; +import java.util.Collection; +import java.util.Objects; import java.util.Set; import static org.apache.lucene.search.BooleanClause.Occur.FILTER; import static org.apache.lucene.search.BooleanClause.Occur.MUST; -final class PercolatorQuery extends Query { +public final class PercolatorQuery extends Query implements Accountable { - public static final float MATCH_COST = - (1 << 14) // stored field access cost, approximated by the number of bytes in a block - + 1000; // cost of matching the query against the document, arbitrary as it would be really complex to estimate + // cost of matching the query against the document, arbitrary as it would be really complex to estimate + public static final float MATCH_COST = 1000; - static class Builder { + public static class Builder { + private final String docType; + private final QueryRegistry queryRegistry; + private final BytesReference documentSource; private final IndexSearcher percolatorIndexSearcher; - private final Map percolatorQueries; - private Query percolateQuery; private Query queriesMetaDataQuery; private final Query percolateTypeQuery; /** - * @param percolatorIndexSearcher The index searcher on top of the in-memory index that holds the document being percolated - * @param percolatorQueries All the registered percolator queries - * @param percolateTypeQuery A query that identifies all document containing percolator queries + * @param docType The type of the document being percolated + * @param queryRegistry The registry holding all the percolator queries as Lucene queries. + * @param documentSource The source of the document being percolated + * @param percolatorIndexSearcher The index searcher on top of the in-memory index that holds the document being percolated + * @param percolateTypeQuery A query that identifies all document containing percolator queries */ - Builder(IndexSearcher percolatorIndexSearcher, Map percolatorQueries, Query percolateTypeQuery) { - this.percolatorIndexSearcher = percolatorIndexSearcher; - this.percolatorQueries = percolatorQueries; - this.percolateTypeQuery = percolateTypeQuery; - } - - /** - * Optionally sets a query that reduces the number of queries to percolate based on custom metadata attached - * on the percolator documents. - */ - void setPercolateQuery(Query percolateQuery) { - this.percolateQuery = percolateQuery; + public Builder(String docType, QueryRegistry queryRegistry, BytesReference documentSource, IndexSearcher percolatorIndexSearcher, + Query percolateTypeQuery) { + this.docType = Objects.requireNonNull(docType); + this.documentSource = Objects.requireNonNull(documentSource); + this.percolatorIndexSearcher = Objects.requireNonNull(percolatorIndexSearcher); + this.queryRegistry = Objects.requireNonNull(queryRegistry); + this.percolateTypeQuery = Objects.requireNonNull(percolateTypeQuery); } /** @@ -85,39 +81,43 @@ final class PercolatorQuery extends Query { * @param extractedTermsFieldName The name of the field to get the extracted terms from * @param unknownQueryFieldname The field used to mark documents whose queries couldn't all get extracted */ - void extractQueryTermsQuery(String extractedTermsFieldName, String unknownQueryFieldname) throws IOException { - this.queriesMetaDataQuery = ExtractQueryTermsService.createQueryTermsQuery(percolatorIndexSearcher.getIndexReader(), extractedTermsFieldName, unknownQueryFieldname); + public void extractQueryTermsQuery(String extractedTermsFieldName, String unknownQueryFieldname) throws IOException { + this.queriesMetaDataQuery = ExtractQueryTermsService.createQueryTermsQuery( + percolatorIndexSearcher.getIndexReader(), extractedTermsFieldName, unknownQueryFieldname + ); } - PercolatorQuery build() { + public PercolatorQuery build() { BooleanQuery.Builder builder = new BooleanQuery.Builder(); builder.add(percolateTypeQuery, FILTER); if (queriesMetaDataQuery != null) { builder.add(queriesMetaDataQuery, FILTER); } - if (percolateQuery != null){ - builder.add(percolateQuery, MUST); - } - return new PercolatorQuery(builder.build(), percolatorIndexSearcher, percolatorQueries); + return new PercolatorQuery(docType, queryRegistry, documentSource, builder.build(), percolatorIndexSearcher); } } + private final String documentType; + private final QueryRegistry queryRegistry; + private final BytesReference documentSource; private final Query percolatorQueriesQuery; private final IndexSearcher percolatorIndexSearcher; - private final Map percolatorQueries; - private PercolatorQuery(Query percolatorQueriesQuery, IndexSearcher percolatorIndexSearcher, Map percolatorQueries) { + private PercolatorQuery(String documentType, QueryRegistry queryRegistry, BytesReference documentSource, + Query percolatorQueriesQuery, IndexSearcher percolatorIndexSearcher) { + this.documentType = documentType; + this.documentSource = documentSource; this.percolatorQueriesQuery = percolatorQueriesQuery; + this.queryRegistry = queryRegistry; this.percolatorIndexSearcher = percolatorIndexSearcher; - this.percolatorQueries = percolatorQueries; } @Override public Query rewrite(IndexReader reader) throws IOException { Query rewritten = percolatorQueriesQuery.rewrite(reader); if (rewritten != percolatorQueriesQuery) { - return new PercolatorQuery(rewritten, percolatorIndexSearcher, percolatorQueries); + return new PercolatorQuery(documentType, queryRegistry, documentSource, rewritten, percolatorIndexSearcher); } else { return this; } @@ -160,7 +160,7 @@ final class PercolatorQuery extends Query { return null; } - final LeafReader leafReader = leafReaderContext.reader(); + final QueryRegistry.Leaf percolatorQueries = queryRegistry.getQueries(leafReaderContext); return new Scorer(this) { @Override @@ -173,7 +173,7 @@ final class PercolatorQuery extends Query { return new TwoPhaseIterator(approximation.iterator()) { @Override public boolean matches() throws IOException { - return matchDocId(approximation.docID(), leafReader); + return matchDocId(approximation.docID()); } @Override @@ -198,27 +198,30 @@ final class PercolatorQuery extends Query { return approximation.docID(); } - boolean matchDocId(int docId, LeafReader leafReader) throws IOException { - SingleFieldsVisitor singleFieldsVisitor = new SingleFieldsVisitor(UidFieldMapper.NAME); - leafReader.document(docId, singleFieldsVisitor); - BytesRef percolatorQueryId = new BytesRef(singleFieldsVisitor.uid().id()); - return matchQuery(percolatorQueryId); + boolean matchDocId(int docId) throws IOException { + Query query = percolatorQueries.getQuery(docId); + if (query != null) { + return Lucene.exists(percolatorIndexSearcher, query); + } else { + return false; + } } }; } }; } - boolean matchQuery(BytesRef percolatorQueryId) throws IOException { - Query percolatorQuery = percolatorQueries.get(percolatorQueryId); - if (percolatorQuery != null) { - return Lucene.exists(percolatorIndexSearcher, percolatorQuery); - } else { - return false; - } + public IndexSearcher getPercolatorIndexSearcher() { + return percolatorIndexSearcher; } - private final Object instance = new Object(); + public String getDocumentType() { + return documentType; + } + + public BytesReference getDocumentSource() { + return documentSource; + } @Override public boolean equals(Object o) { @@ -228,19 +231,46 @@ final class PercolatorQuery extends Query { PercolatorQuery that = (PercolatorQuery) o; - return instance.equals(that.instance); + if (!documentType.equals(that.documentType)) return false; + return documentSource.equals(that.documentSource); } @Override public int hashCode() { int result = super.hashCode(); - result = 31 * result + instance.hashCode(); + result = 31 * result + documentType.hashCode(); + result = 31 * result + documentSource.hashCode(); return result; } @Override public String toString(String s) { - return "PercolatorQuery{inner={" + percolatorQueriesQuery.toString(s) + "}}"; + return "PercolatorQuery{document_type={" + documentType + "},document_source={" + documentSource.toUtf8() + + "},inner={" + percolatorQueriesQuery.toString(s) + "}}"; } + + @Override + public long ramBytesUsed() { + long sizeInBytes = 0; + if (documentSource.hasArray()) { + sizeInBytes += documentSource.array().length; + } else { + sizeInBytes += documentSource.length(); + } + return sizeInBytes; + } + + public interface QueryRegistry { + + Leaf getQueries(LeafReaderContext ctx); + + interface Leaf { + + Query getQuery(int docId); + + } + + } + } diff --git a/core/src/main/java/org/elasticsearch/index/query/PercolatorQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/PercolatorQueryBuilder.java new file mode 100644 index 000000000000..5cb1e54d2035 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/query/PercolatorQueryBuilder.java @@ -0,0 +1,375 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.MultiReader; +import org.apache.lucene.index.SlowCompositeReaderWrapper; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.memory.MemoryIndex; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.Weight; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.Version; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperForType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.internal.TypeFieldMapper; +import org.elasticsearch.index.mapper.internal.UidFieldMapper; +import org.elasticsearch.index.percolator.PercolatorFieldMapper; +import org.elasticsearch.index.percolator.PercolatorQueryCache; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.index.mapper.SourceToParse.source; + +public class PercolatorQueryBuilder extends AbstractQueryBuilder { + + public static final String NAME = "percolator"; + static final PercolatorQueryBuilder PROTO = new PercolatorQueryBuilder(null, null, null, null, null, null, null, null); + + private final String documentType; + private final BytesReference document; + + private final String indexedDocumentIndex; + private final String indexedDocumentType; + private final String indexedDocumentId; + private final String indexedDocumentRouting; + private final String indexedDocumentPreference; + private final Long indexedDocumentVersion; + + public PercolatorQueryBuilder(String documentType, BytesReference document) { + if (documentType == null) { + throw new IllegalArgumentException("[document_type] is a required argument"); + } + if (document == null) { + throw new IllegalArgumentException("[document] is a required argument"); + } + this.documentType = documentType; + this.document = document; + indexedDocumentIndex = null; + indexedDocumentType = null; + indexedDocumentId = null; + indexedDocumentRouting = null; + indexedDocumentPreference = null; + indexedDocumentVersion = null; + } + + public PercolatorQueryBuilder(String documentType, String indexedDocumentIndex, String indexedDocumentType, + String indexedDocumentId, String indexedDocumentRouting, String indexedDocumentPreference, + Long indexedDocumentVersion) { + if (documentType == null) { + throw new IllegalArgumentException("[document_type] is a required argument"); + } + if (indexedDocumentIndex == null) { + throw new IllegalArgumentException("[index] is a required argument"); + } + if (indexedDocumentType == null) { + throw new IllegalArgumentException("[type] is a required argument"); + } + if (indexedDocumentId == null) { + throw new IllegalArgumentException("[id] is a required argument"); + } + this.documentType = documentType; + this.indexedDocumentIndex = indexedDocumentIndex; + this.indexedDocumentType = indexedDocumentType; + this.indexedDocumentId = indexedDocumentId; + this.indexedDocumentRouting = indexedDocumentRouting; + this.indexedDocumentPreference = indexedDocumentPreference; + this.indexedDocumentVersion = indexedDocumentVersion; + this.document = null; + } + + private PercolatorQueryBuilder(String documentType, BytesReference document, String indexedDocumentIndex, String indexedDocumentType, + String indexedDocumentId, String indexedDocumentRouting, String indexedDocumentPreference, + Long indexedDocumentVersion) { + this.documentType = documentType; + this.document = document; + this.indexedDocumentIndex = indexedDocumentIndex; + this.indexedDocumentType = indexedDocumentType; + this.indexedDocumentId = indexedDocumentId; + this.indexedDocumentRouting = indexedDocumentRouting; + this.indexedDocumentPreference = indexedDocumentPreference; + this.indexedDocumentVersion = indexedDocumentVersion; + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + builder.field(PercolatorQueryParser.DOCUMENT_TYPE_FIELD.getPreferredName(), documentType); + if (document != null) { + XContentType contentType = XContentFactory.xContentType(document); + if (contentType == builder.contentType()) { + builder.rawField(PercolatorQueryParser.DOCUMENT_FIELD.getPreferredName(), document); + } else { + XContentParser parser = XContentFactory.xContent(contentType).createParser(document); + parser.nextToken(); + builder.field(PercolatorQueryParser.DOCUMENT_FIELD.getPreferredName()); + builder.copyCurrentStructure(parser); + } + } + if (indexedDocumentIndex != null || indexedDocumentType != null || indexedDocumentId != null) { + if (indexedDocumentIndex != null) { + builder.field(PercolatorQueryParser.INDEXED_DOCUMENT_FIELD_INDEX.getPreferredName(), indexedDocumentIndex); + } + if (indexedDocumentType != null) { + builder.field(PercolatorQueryParser.INDEXED_DOCUMENT_FIELD_TYPE.getPreferredName(), indexedDocumentType); + } + if (indexedDocumentId != null) { + builder.field(PercolatorQueryParser.INDEXED_DOCUMENT_FIELD_ID.getPreferredName(), indexedDocumentId); + } + if (indexedDocumentRouting != null) { + builder.field(PercolatorQueryParser.INDEXED_DOCUMENT_FIELD_ROUTING.getPreferredName(), indexedDocumentRouting); + } + if (indexedDocumentPreference != null) { + builder.field(PercolatorQueryParser.INDEXED_DOCUMENT_FIELD_PREFERENCE.getPreferredName(), indexedDocumentPreference); + } + if (indexedDocumentVersion != null) { + builder.field(PercolatorQueryParser.INDEXED_DOCUMENT_FIELD_VERSION.getPreferredName(), indexedDocumentVersion); + } + } + printBoostAndQueryName(builder); + builder.endObject(); + } + + @Override + protected PercolatorQueryBuilder doReadFrom(StreamInput in) throws IOException { + String docType = in.readString(); + String documentIndex = in.readOptionalString(); + String documentType = in.readOptionalString(); + String documentId = in.readOptionalString(); + String documentRouting = in.readOptionalString(); + String documentPreference = in.readOptionalString(); + Long documentVersion = null; + if (in.readBoolean()) { + documentVersion = in.readVLong(); + } + BytesReference documentSource = null; + if (in.readBoolean()) { + documentSource = in.readBytesReference(); + } + return new PercolatorQueryBuilder(docType, documentSource, documentIndex, documentType, documentId, + documentRouting, documentPreference, documentVersion); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(documentType); + out.writeOptionalString(indexedDocumentIndex); + out.writeOptionalString(indexedDocumentType); + out.writeOptionalString(indexedDocumentId); + out.writeOptionalString(indexedDocumentRouting); + out.writeOptionalString(indexedDocumentPreference); + if (indexedDocumentVersion != null) { + out.writeBoolean(true); + out.writeVLong(indexedDocumentVersion); + } else { + out.writeBoolean(false); + } + if (document != null) { + out.writeBoolean(true); + out.writeBytesReference(document); + } else { + out.writeBoolean(false); + } + } + + @Override + protected boolean doEquals(PercolatorQueryBuilder other) { + return Objects.equals(documentType, other.documentType) + && Objects.equals(document, other.document) + && Objects.equals(indexedDocumentIndex, other.indexedDocumentIndex) + && Objects.equals(indexedDocumentType, other.indexedDocumentType) + && Objects.equals(indexedDocumentId, other.indexedDocumentId); + } + + @Override + protected int doHashCode() { + return Objects.hash(documentType, document, indexedDocumentIndex, indexedDocumentType, indexedDocumentId); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { + if (document != null) { + return this; + } + + GetRequest getRequest = new GetRequest(indexedDocumentIndex, indexedDocumentType, indexedDocumentId); + getRequest.preference("_local"); + getRequest.routing(indexedDocumentRouting); + getRequest.preference(indexedDocumentPreference); + if (indexedDocumentVersion != null) { + getRequest.version(indexedDocumentVersion); + } + GetResponse getResponse = queryShardContext.getClient().get(getRequest).actionGet(); + if (getResponse.isExists() == false) { + throw new ResourceNotFoundException( + "indexed document [{}/{}/{}] couldn't be found", indexedDocumentIndex, indexedDocumentType, indexedDocumentId + ); + } + return new PercolatorQueryBuilder(documentType, getResponse.getSourceAsBytesRef()); + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + if (indexedDocumentIndex != null || indexedDocumentType != null || indexedDocumentId != null) { + throw new IllegalStateException("query builder must be rewritten first"); + } + + if (document == null) { + throw new IllegalStateException("nothing to percolator"); + } + + MapperService mapperService = context.getMapperService(); + DocumentMapperForType docMapperForType = mapperService.documentMapperWithAutoCreate(documentType); + DocumentMapper docMapper = docMapperForType.getDocumentMapper(); + + ParsedDocument doc = docMapper.parse(source(document) + .index(context.index().getName()) + .id("_temp_id") + .type(documentType)); + + Analyzer defaultAnalyzer = context.getAnalysisService().defaultIndexAnalyzer(); + final IndexSearcher docSearcher; + if (doc.docs().size() > 1) { + assert docMapper.hasNestedObjects(); + docSearcher = createMultiDocumentSearcher(docMapper, defaultAnalyzer, doc); + } else { + // TODO: we may want to bring to MemoryIndex thread local cache back... + // but I'm unsure about the real benefits. + MemoryIndex memoryIndex = new MemoryIndex(true); + indexDoc(docMapper, defaultAnalyzer, doc.rootDoc(), memoryIndex); + docSearcher = memoryIndex.createSearcher(); + docSearcher.setQueryCache(null); + } + + PercolatorQueryCache registry = context.getPercolatorQueryCache(); + if (registry == null) { + throw new QueryShardException(context, "no percolator query registry"); + } + + Query percolateTypeQuery = new TermQuery(new Term(TypeFieldMapper.NAME, PercolatorFieldMapper.TYPE_NAME)); + PercolatorQuery.Builder builder = new PercolatorQuery.Builder( + documentType, registry, document, docSearcher, percolateTypeQuery + ); + Settings indexSettings = registry.getIndexSettings().getSettings(); + if (indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, null).onOrAfter(Version.V_5_0_0)) { + builder.extractQueryTermsQuery( + PercolatorFieldMapper.EXTRACTED_TERMS_FULL_FIELD_NAME, PercolatorFieldMapper.UNKNOWN_QUERY_FULL_FIELD_NAME + ); + } + return builder.build(); + } + + public String getDocumentType() { + return documentType; + } + + public BytesReference getDocument() { + return document; + } + + private IndexSearcher createMultiDocumentSearcher(DocumentMapper docMapper, Analyzer defaultAnalyzer, ParsedDocument doc) { + IndexReader[] memoryIndices = new IndexReader[doc.docs().size()]; + List docs = doc.docs(); + int rootDocIndex = docs.size() - 1; + assert rootDocIndex > 0; + for (int i = 0; i < docs.size(); i++) { + ParseContext.Document d = docs.get(i); + MemoryIndex memoryIndex = new MemoryIndex(true); + indexDoc(docMapper, defaultAnalyzer, d, memoryIndex); + memoryIndices[i] = memoryIndex.createSearcher().getIndexReader(); + } + try { + MultiReader mReader = new MultiReader(memoryIndices, true); + LeafReader slowReader = SlowCompositeReaderWrapper.wrap(mReader); + final IndexSearcher slowSearcher = new IndexSearcher(slowReader) { + + @Override + public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException { + BooleanQuery.Builder bq = new BooleanQuery.Builder(); + bq.add(query, BooleanClause.Occur.MUST); + bq.add(Queries.newNestedFilter(), BooleanClause.Occur.MUST_NOT); + return super.createNormalizedWeight(bq.build(), needsScores); + } + + }; + slowSearcher.setQueryCache(null); + return slowSearcher; + } catch (IOException e) { + throw new ElasticsearchException("Failed to create index for percolator with nested document ", e); + } + } + + private void indexDoc(DocumentMapper documentMapper, Analyzer defaultAnalyzer, ParseContext.Document document, + MemoryIndex memoryIndex) { + for (IndexableField field : document.getFields()) { + if (field.fieldType().indexOptions() == IndexOptions.NONE && field.name().equals(UidFieldMapper.NAME)) { + continue; + } + + Analyzer analyzer = defaultAnalyzer; + if (documentMapper != null && documentMapper.mappers().getMapper(field.name()) != null) { + analyzer = documentMapper.mappers().indexAnalyzer(); + } + try { + try (TokenStream tokenStream = field.tokenStream(analyzer, null)) { + if (tokenStream != null) { + memoryIndex.addField(field.name(), tokenStream, field.boost()); + } + } + } catch (IOException e) { + throw new ElasticsearchException("Failed to create token stream", e); + } + } + } + +} diff --git a/core/src/main/java/org/elasticsearch/index/query/PercolatorQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/PercolatorQueryParser.java new file mode 100644 index 000000000000..a559db599272 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/query/PercolatorQueryParser.java @@ -0,0 +1,134 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; + +public class PercolatorQueryParser implements QueryParser { + + public static final ParseField DOCUMENT_FIELD = new ParseField("document"); + public static final ParseField DOCUMENT_TYPE_FIELD = new ParseField("document_type"); + public static final ParseField INDEXED_DOCUMENT_FIELD_INDEX = new ParseField("index"); + public static final ParseField INDEXED_DOCUMENT_FIELD_TYPE = new ParseField("type"); + public static final ParseField INDEXED_DOCUMENT_FIELD_ID = new ParseField("id"); + public static final ParseField INDEXED_DOCUMENT_FIELD_ROUTING = new ParseField("routing"); + public static final ParseField INDEXED_DOCUMENT_FIELD_PREFERENCE = new ParseField("preference"); + public static final ParseField INDEXED_DOCUMENT_FIELD_VERSION = new ParseField("version"); + + @Override + public String[] names() { + return new String[]{PercolatorQueryBuilder.NAME}; + } + + @Override + public PercolatorQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { + XContentParser parser = parseContext.parser(); + float boost = AbstractQueryBuilder.DEFAULT_BOOST; + + String documentType = null; + + String indexedDocumentIndex = null; + String indexedDocumentType = null; + String indexedDocumentId = null; + String indexedDocumentRouting = null; + String indexedDocumentPreference = null; + Long indexedDocumentVersion = null; + + BytesReference source = null; + + String queryName = null; + String currentFieldName = null; + + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + if (parseContext.parseFieldMatcher().match(currentFieldName, DOCUMENT_FIELD)) { + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + builder.copyCurrentStructure(parser); + builder.flush(); + source = builder.bytes(); + } + } else { + throw new ParsingException(parser.getTokenLocation(), "[" + PercolatorQueryBuilder.NAME + + "] query does not support [" + token + "]"); + } + } else if (token.isValue()) { + if (parseContext.parseFieldMatcher().match(currentFieldName, DOCUMENT_TYPE_FIELD)) { + documentType = parser.text(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_INDEX)) { + indexedDocumentIndex = parser.text(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_TYPE)) { + indexedDocumentType = parser.text(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_ID)) { + indexedDocumentId = parser.text(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_ROUTING)) { + indexedDocumentRouting = parser.text(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_PREFERENCE)) { + indexedDocumentPreference = parser.text(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_VERSION)) { + indexedDocumentVersion = parser.longValue(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) { + boost = parser.floatValue(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) { + queryName = parser.text(); + } else { + throw new ParsingException(parser.getTokenLocation(), "[" + PercolatorQueryBuilder.NAME + + "] query does not support [" + currentFieldName + "]"); + } + } else { + throw new ParsingException(parser.getTokenLocation(), "[" + PercolatorQueryBuilder.NAME + + "] query does not support [" + token + "]"); + } + } + + if (documentType == null) { + throw new IllegalArgumentException("[" + PercolatorQueryBuilder.NAME + "] query is missing required [" + + DOCUMENT_TYPE_FIELD.getPreferredName() + "] parameter"); + } + + PercolatorQueryBuilder queryBuilder; + if (source != null) { + queryBuilder = new PercolatorQueryBuilder(documentType, source); + } else if (indexedDocumentId != null) { + queryBuilder = new PercolatorQueryBuilder(documentType, indexedDocumentIndex, indexedDocumentType, + indexedDocumentId, indexedDocumentRouting, indexedDocumentPreference, indexedDocumentVersion); + } else { + throw new IllegalArgumentException("[" + PercolatorQueryBuilder.NAME + "] query, nothing to percolate"); + } + queryBuilder.queryName(queryName); + queryBuilder.boost(boost); + return queryBuilder; + } + + @Override + public PercolatorQueryBuilder getBuilderPrototype() { + return PercolatorQueryBuilder.PROTO; + } + +} diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java index 21c1f3ff695b..f04f03fcbcd9 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java @@ -832,6 +832,18 @@ public abstract class QueryBuilders { return new ExistsQueryBuilder(name); } + public static PercolatorQueryBuilder percolatorQuery(String documentType, BytesReference document) { + return new PercolatorQueryBuilder(documentType, document); + } + + public static PercolatorQueryBuilder percolatorQuery(String documentType, String indexedDocumentIndex, + String indexedDocumentType, String indexedDocumentId, + String indexedDocumentRouting, String indexedDocumentPreference, + Long indexedDocumentVersion) { + return new PercolatorQueryBuilder(documentType, indexedDocumentIndex, indexedDocumentType, indexedDocumentId, + indexedDocumentRouting, indexedDocumentPreference, indexedDocumentVersion); + } + private QueryBuilders() { } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index 6acd5272f89f..a21b53cdf51b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -44,9 +44,9 @@ import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; +import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper; import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.index.similarity.SimilarityService; @@ -87,13 +87,15 @@ public class QueryShardContext extends QueryRewriteContext { private final Map namedQueries = new HashMap<>(); private final MapperQueryParser queryParser = new MapperQueryParser(this); + private final IndicesQueriesRegistry indicesQueriesRegistry; + private final PercolatorQueryCache percolatorQueryCache; private boolean allowUnmappedFields; private boolean mapUnmappedFieldAsString; private NestedScope nestedScope; boolean isFilter; // pkg private for testing public QueryShardContext(IndexSettings indexSettings, BitsetFilterCache bitsetFilterCache, IndexFieldDataService indexFieldDataService, MapperService mapperService, SimilarityService similarityService, ScriptService scriptService, - final IndicesQueriesRegistry indicesQueriesRegistry) { + final IndicesQueriesRegistry indicesQueriesRegistry, PercolatorQueryCache percolatorQueryCache) { super(indexSettings, scriptService, indicesQueriesRegistry); this.indexSettings = indexSettings; this.similarityService = similarityService; @@ -101,17 +103,18 @@ public class QueryShardContext extends QueryRewriteContext { this.bitsetFilterCache = bitsetFilterCache; this.indexFieldDataService = indexFieldDataService; this.allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields(); - + this.indicesQueriesRegistry = indicesQueriesRegistry; + this.percolatorQueryCache = percolatorQueryCache; } public QueryShardContext(QueryShardContext source) { - this(source.indexSettings, source.bitsetFilterCache, source.indexFieldDataService, source.mapperService, source.similarityService, source.scriptService, source.indicesQueriesRegistry); + this(source.indexSettings, source.bitsetFilterCache, source.indexFieldDataService, source.mapperService, source.similarityService, source.scriptService, source.indicesQueriesRegistry, source.percolatorQueryCache); this.types = source.getTypes(); } public QueryShardContext clone() { - return new QueryShardContext(indexSettings, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry); + return new QueryShardContext(indexSettings, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry, percolatorQueryCache); } public void parseFieldMatcher(ParseFieldMatcher parseFieldMatcher) { @@ -148,6 +151,10 @@ public class QueryShardContext extends QueryRewriteContext { return mapperService; } + public PercolatorQueryCache getPercolatorQueryCache() { + return percolatorQueryCache; + } + public Similarity getSearchSimilarity() { return similarityService != null ? similarityService.similarity(mapperService) : null; } diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 59b3f367c65a..5d54a8c22c31 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -81,9 +81,7 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.merge.MergeStats; -import org.elasticsearch.index.percolator.PercolateStats; -import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; -import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.search.stats.SearchStats; @@ -104,7 +102,6 @@ import org.elasticsearch.index.warmer.WarmerStats; import org.elasticsearch.indices.IndexingMemoryController; import org.elasticsearch.indices.recovery.RecoveryFailedException; import org.elasticsearch.indices.recovery.RecoveryState; -import org.elasticsearch.percolator.PercolatorService; import org.elasticsearch.search.suggest.completion.CompletionFieldStats; import org.elasticsearch.search.suggest.completion.CompletionStats; import org.elasticsearch.threadpool.ThreadPool; @@ -137,7 +134,6 @@ public class IndexShard extends AbstractIndexShardComponent { private final ShardIndexWarmerService shardWarmerService; private final ShardRequestCache shardQueryCache; private final ShardFieldData shardFieldData; - private final PercolatorQueriesRegistry percolatorQueriesRegistry; private final IndexFieldDataService indexFieldDataService; private final ShardSuggestMetric shardSuggestMetric = new ShardSuggestMetric(); private final ShardBitsetFilterCache shardBitsetFilterCache; @@ -197,7 +193,8 @@ public class IndexShard extends AbstractIndexShardComponent { public IndexShard(ShardId shardId, IndexSettings indexSettings, ShardPath path, Store store, IndexCache indexCache, MapperService mapperService, SimilarityService similarityService, IndexFieldDataService indexFieldDataService, @Nullable EngineFactory engineFactory, - IndexEventListener indexEventListener, IndexSearcherWrapper indexSearcherWrapper, NodeServicesProvider provider, SearchSlowLog slowLog, Engine.Warmer warmer, IndexingOperationListener... listeners) { + IndexEventListener indexEventListener, IndexSearcherWrapper indexSearcherWrapper, NodeServicesProvider provider, + SearchSlowLog slowLog, Engine.Warmer warmer, IndexingOperationListener... listeners) { super(shardId, indexSettings); final Settings settings = indexSettings.getSettings(); this.codecService = new CodecService(mapperService, logger); @@ -242,8 +239,6 @@ public class IndexShard extends AbstractIndexShardComponent { this.engineConfig = newEngineConfig(translogConfig, cachingPolicy); this.suspendableRefContainer = new SuspendableRefContainer(); this.searcherWrapper = indexSearcherWrapper; - QueryShardContext queryShardContext = new QueryShardContext(indexSettings, indexCache.bitsetFilterCache(), indexFieldDataService, mapperService, similarityService, provider.getScriptService(), provider.getIndicesQueriesRegistry()); - this.percolatorQueriesRegistry = new PercolatorQueriesRegistry(shardId, indexSettings, queryShardContext); } public Store store() { @@ -472,12 +467,8 @@ public class IndexShard extends AbstractIndexShardComponent { if (logger.isTraceEnabled()) { logger.trace("index [{}][{}]{}", index.type(), index.id(), index.docs()); } - final boolean isPercolatorQuery = percolatorQueriesRegistry.isPercolatorQuery(index); Engine engine = getEngine(); created = engine.index(index); - if (isPercolatorQuery) { - percolatorQueriesRegistry.updatePercolateQuery(engine, index.id()); - } index.endTime(System.nanoTime()); } catch (Throwable ex) { indexingOperationListeners.postIndex(index, ex); @@ -515,12 +506,8 @@ public class IndexShard extends AbstractIndexShardComponent { if (logger.isTraceEnabled()) { logger.trace("delete [{}]", delete.uid().text()); } - final boolean isPercolatorQuery = percolatorQueriesRegistry.isPercolatorQuery(delete); Engine engine = getEngine(); engine.delete(delete); - if (isPercolatorQuery) { - percolatorQueriesRegistry.updatePercolateQuery(engine, delete.id()); - } delete.endTime(System.nanoTime()); } catch (Throwable ex) { indexingOperationListeners.postDelete(delete, ex); @@ -640,10 +627,6 @@ public class IndexShard extends AbstractIndexShardComponent { return shardFieldData.stats(fields); } - public PercolatorQueriesRegistry percolateRegistry() { - return percolatorQueriesRegistry; - } - public TranslogStats translogStats() { return getEngine().getTranslog().stats(); } @@ -788,18 +771,15 @@ public class IndexShard extends AbstractIndexShardComponent { engine.flushAndClose(); } } finally { // playing safe here and close the engine even if the above succeeds - close can be called multiple times - IOUtils.close(engine, percolatorQueriesRegistry); + IOUtils.close(engine); } } } } public IndexShard postRecovery(String reason) throws IndexShardStartedException, IndexShardRelocatedException, IndexShardClosedException { - if (mapperService.hasMapping(PercolatorService.TYPE_NAME)) { + if (mapperService.hasMapping(PercolatorFieldMapper.TYPE_NAME)) { refresh("percolator_load_queries"); - try (Engine.Searcher searcher = getEngine().acquireSearcher("percolator_load_queries")) { - this.percolatorQueriesRegistry.loadQueries(searcher.reader()); - } } synchronized (mutex) { if (state == IndexShardState.CLOSED) { @@ -1096,10 +1076,6 @@ public class IndexShard extends AbstractIndexShardComponent { return getEngine().getTranslog(); } - public PercolateStats percolateStats() { - return percolatorQueriesRegistry.stats(); - } - public IndexEventListener getIndexEventListener() { return indexEventListener; } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index f587e7212cf1..58f128242dce 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -265,7 +265,7 @@ public class IndicesService extends AbstractLifecycleComponent i if (indexShard.routingEntry() == null) { continue; } - IndexShardStats indexShardStats = new IndexShardStats(indexShard.shardId(), new ShardStats[] { new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesQueryCache, indexShard, flags), indexShard.commitStats()) }); + IndexShardStats indexShardStats = new IndexShardStats(indexShard.shardId(), new ShardStats[] { new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesQueryCache, indexService.cache().getPercolatorQueryCache(), indexShard, flags), indexShard.commitStats()) }); if (!statsByShard.containsKey(indexService.index())) { statsByShard.put(indexService.index(), arrayAsArrayList(indexShardStats)); } else { diff --git a/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java b/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java index da8e617759da..fce4e8411dbb 100644 --- a/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java +++ b/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java @@ -38,7 +38,7 @@ import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.shard.IndexingStats; import org.elasticsearch.index.merge.MergeStats; -import org.elasticsearch.index.percolator.PercolateStats; +import org.elasticsearch.index.percolator.PercolatorQueryCacheStats; import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.search.stats.SearchStats; @@ -105,8 +105,8 @@ public class NodeIndicesStats implements Streamable, ToXContent { } @Nullable - public PercolateStats getPercolate() { - return stats.getPercolate(); + public PercolatorQueryCacheStats getPercolate() { + return stats.getPercolatorCache(); } @Nullable diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index 3388fb1e2c02..7052ca63189c 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -84,8 +84,6 @@ import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.node.service.NodeService; -import org.elasticsearch.percolator.PercolatorModule; -import org.elasticsearch.percolator.PercolatorService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsModule; import org.elasticsearch.plugins.PluginsService; @@ -226,7 +224,6 @@ public class Node implements Closeable { modules.add(new ActionModule(DiscoveryNode.ingestNode(settings), false)); modules.add(new GatewayModule(settings)); modules.add(new NodeClientModule()); - modules.add(new PercolatorModule()); modules.add(new ResourceWatcherModule()); modules.add(new RepositoriesModule()); modules.add(new TribeModule()); @@ -486,8 +483,6 @@ public class Node implements Closeable { toClose.add(injector.getInstance(RestController.class)); toClose.add(() -> stopWatch.stop().start("transport")); toClose.add(injector.getInstance(TransportService.class)); - toClose.add(() -> stopWatch.stop().start("percolator_service")); - toClose.add(injector.getInstance(PercolatorService.class)); for (Class plugin : pluginsService.nodeServices()) { toClose.add(() -> stopWatch.stop().start("plugin(" + plugin.getName() + ")")); diff --git a/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java b/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java deleted file mode 100644 index 9d091a4c0bd0..000000000000 --- a/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -package org.elasticsearch.percolator; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.MultiReader; -import org.apache.lucene.index.SlowCompositeReaderWrapper; -import org.apache.lucene.index.memory.MemoryIndex; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.Collector; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.Weight; -import org.apache.lucene.util.CloseableThreadLocal; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.internal.UidFieldMapper; - -import java.io.IOException; -import java.util.List; - - -/** - * Implementation of {@link PercolatorIndex} that can hold multiple Lucene documents by - * opening multiple {@link MemoryIndex} based IndexReaders and wrapping them via a single top level reader. - */ -class MultiDocumentPercolatorIndex implements PercolatorIndex { - - private final CloseableThreadLocal cache; - - MultiDocumentPercolatorIndex(CloseableThreadLocal cache) { - this.cache = cache; - } - - @Override - public void prepare(PercolateContext context, ParsedDocument parsedDocument) { - IndexReader[] memoryIndices = new IndexReader[parsedDocument.docs().size()]; - List docs = parsedDocument.docs(); - int rootDocIndex = docs.size() - 1; - assert rootDocIndex > 0; - MemoryIndex rootDocMemoryIndex = null; - for (int i = 0; i < docs.size(); i++) { - ParseContext.Document d = docs.get(i); - MemoryIndex memoryIndex; - if (rootDocIndex == i) { - // the last doc is always the rootDoc, since that is usually the biggest document it make sense - // to reuse the MemoryIndex it uses - memoryIndex = rootDocMemoryIndex = cache.get(); - } else { - memoryIndex = new MemoryIndex(true); - } - memoryIndices[i] = indexDoc(d, memoryIndex, context, parsedDocument).createSearcher().getIndexReader(); - } - try { - MultiReader mReader = new MultiReader(memoryIndices, true); - LeafReader slowReader = SlowCompositeReaderWrapper.wrap(mReader); - final IndexSearcher slowSearcher = new IndexSearcher(slowReader) { - - @Override - public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException { - BooleanQuery.Builder bq = new BooleanQuery.Builder(); - bq.add(query, BooleanClause.Occur.MUST); - bq.add(Queries.newNestedFilter(), BooleanClause.Occur.MUST_NOT); - return super.createNormalizedWeight(bq.build(), needsScores); - } - - }; - slowSearcher.setQueryCache(null); - DocSearcher docSearcher = new DocSearcher(slowSearcher, rootDocMemoryIndex); - context.initialize(docSearcher, parsedDocument); - } catch (IOException e) { - throw new ElasticsearchException("Failed to create index for percolator with nested document ", e); - } - } - - MemoryIndex indexDoc(ParseContext.Document d, MemoryIndex memoryIndex, PercolateContext context, ParsedDocument parsedDocument) { - for (IndexableField field : d.getFields()) { - Analyzer analyzer = context.analysisService().defaultIndexAnalyzer(); - DocumentMapper documentMapper = context.mapperService().documentMapper(parsedDocument.type()); - if (documentMapper != null && documentMapper.mappers().getMapper(field.name()) != null) { - analyzer = documentMapper.mappers().indexAnalyzer(); - } - if (field.fieldType().indexOptions() == IndexOptions.NONE && field.name().equals(UidFieldMapper.NAME)) { - continue; - } - try { - // TODO: instead of passing null here, we can have a CTL> and pass previous, - // like the indexer does - try (TokenStream tokenStream = field.tokenStream(analyzer, null)) { - if (tokenStream != null) { - memoryIndex.addField(field.name(), tokenStream, field.boost()); - } - } - } catch (IOException e) { - throw new ElasticsearchException("Failed to create token stream", e); - } - } - return memoryIndex; - } - - private class DocSearcher extends Engine.Searcher { - - private final MemoryIndex rootDocMemoryIndex; - - private DocSearcher(IndexSearcher searcher, MemoryIndex rootDocMemoryIndex) { - super("percolate", searcher); - this.rootDocMemoryIndex = rootDocMemoryIndex; - } - - @Override - public void close() { - try { - this.reader().close(); - rootDocMemoryIndex.reset(); - } catch (IOException e) { - throw new ElasticsearchException("failed to close IndexReader in percolator with nested doc", e); - } - } - - } -} diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java deleted file mode 100644 index f73c8f31a071..000000000000 --- a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java +++ /dev/null @@ -1,691 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.percolator; - - -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.Collector; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.Sort; -import org.apache.lucene.search.FieldDoc; -import org.apache.lucene.util.Counter; -import org.elasticsearch.action.percolate.PercolateShardRequest; -import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.cache.recycler.PageCacheRecycler; -import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.lease.Releasables; -import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.analysis.AnalysisService; -import org.elasticsearch.index.cache.bitset.BitsetFilterCache; -import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.fielddata.IndexFieldDataService; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.query.ParsedQuery; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.similarity.SimilarityService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.SearchHitField; -import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.aggregations.SearchContextAggregations; -import org.elasticsearch.search.dfs.DfsSearchResult; -import org.elasticsearch.search.fetch.FetchPhase; -import org.elasticsearch.search.fetch.FetchSearchResult; -import org.elasticsearch.search.fetch.FetchSubPhase; -import org.elasticsearch.search.fetch.FetchSubPhaseContext; -import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; -import org.elasticsearch.search.fetch.script.ScriptFieldsContext; -import org.elasticsearch.search.fetch.source.FetchSourceContext; -import org.elasticsearch.search.highlight.SearchContextHighlight; -import org.elasticsearch.search.internal.ContextIndexSearcher; -import org.elasticsearch.search.internal.InternalSearchHit; -import org.elasticsearch.search.internal.InternalSearchHitField; -import org.elasticsearch.search.internal.ScrollContext; -import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.ShardSearchRequest; -import org.elasticsearch.search.lookup.LeafSearchLookup; -import org.elasticsearch.search.lookup.SearchLookup; -import org.elasticsearch.search.profile.Profilers; -import org.elasticsearch.search.query.QuerySearchResult; -import org.elasticsearch.search.rescore.RescoreSearchContext; -import org.elasticsearch.search.suggest.SuggestionSearchContext; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - */ -public class PercolateContext extends SearchContext { - - private int size = 10; - private boolean trackScores; - - private final SearchShardTarget searchShardTarget; - private final IndexService indexService; - private final IndexFieldDataService fieldDataService; - private final IndexShard indexShard; - private final PageCacheRecycler pageCacheRecycler; - private final BigArrays bigArrays; - private final ScriptService scriptService; - private final MapperService mapperService; - private final int numberOfShards; - private final Query aliasFilter; - private final long originNanoTime = System.nanoTime(); - private final long startTime; - private final boolean onlyCount; - private Engine.Searcher docSearcher; - private Engine.Searcher engineSearcher; - private ContextIndexSearcher searcher; - - private SearchContextHighlight highlight; - private ParsedQuery parsedQuery; - private Query query; - private Query percolateQuery; - private FetchSubPhase.HitContext hitContext; - private SearchContextAggregations aggregations; - private QuerySearchResult querySearchResult; - private Sort sort; - private final Map subPhaseContexts = new HashMap<>(); - private final QueryShardContext queryShardContext; - private final Map, Collector> queryCollectors = new HashMap<>(); - private SearchLookup searchLookup; - private final FetchPhase fetchPhase; - - public PercolateContext(PercolateShardRequest request, SearchShardTarget searchShardTarget, IndexShard indexShard, - IndexService indexService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, ScriptService scriptService, - Query aliasFilter, ParseFieldMatcher parseFieldMatcher, FetchPhase fetchPhase) { - super(parseFieldMatcher); - this.indexShard = indexShard; - this.indexService = indexService; - this.fetchPhase = fetchPhase; - this.fieldDataService = indexService.fieldData(); - this.mapperService = indexService.mapperService(); - this.searchShardTarget = searchShardTarget; - this.pageCacheRecycler = pageCacheRecycler; - this.bigArrays = bigArrays.withCircuitBreaking(); - this.querySearchResult = new QuerySearchResult(0, searchShardTarget); - this.engineSearcher = indexShard.acquireSearcher("percolate"); - this.searcher = new ContextIndexSearcher(engineSearcher, indexService.cache().query(), indexShard.getQueryCachingPolicy()); - this.scriptService = scriptService; - this.numberOfShards = request.getNumberOfShards(); - this.aliasFilter = aliasFilter; - this.startTime = request.getStartTime(); - this.onlyCount = request.onlyCount(); - queryShardContext = indexService.newQueryShardContext(); - queryShardContext.setTypes(request.documentType()); - } - - // for testing: - PercolateContext(PercolateShardRequest request, SearchShardTarget searchShardTarget, MapperService mapperService, QueryShardContext queryShardContext) { - super(null); - this.searchShardTarget = searchShardTarget; - this.mapperService = mapperService; - this.indexService = null; - this.indexShard = null; - this.fieldDataService = null; - this.pageCacheRecycler = null; - this.bigArrays = null; - this.scriptService = null; - this.aliasFilter = null; - this.startTime = 0; - this.numberOfShards = 0; - this.onlyCount = true; - this.queryShardContext = queryShardContext; - this.fetchPhase = null; - } - - public IndexSearcher docSearcher() { - return docSearcher.searcher(); - } - - public void initialize(Engine.Searcher docSearcher, ParsedDocument parsedDocument) { - this.docSearcher = docSearcher; - IndexReader indexReader = docSearcher.reader(); - LeafReaderContext atomicReaderContext = indexReader.leaves().get(0); - this.searchLookup = new SearchLookup(mapperService(), fieldData(), queryShardContext.getTypes()); - LeafSearchLookup leafLookup = searchLookup.getLeafSearchLookup(atomicReaderContext); - leafLookup.setDocument(0); - leafLookup.source().setSource(parsedDocument.source()); - - Map fields = new HashMap<>(); - for (IndexableField field : parsedDocument.rootDoc().getFields()) { - fields.put(field.name(), new InternalSearchHitField(field.name(), Collections.emptyList())); - } - hitContext().reset( - new InternalSearchHit(0, "unknown", new Text(parsedDocument.type()), fields), - atomicReaderContext, 0, docSearcher.searcher() - ); - } - - @Override - public IndexShard indexShard() { - return indexShard; - } - - public IndexService indexService() { - return indexService; - } - - public Query percolateQuery() { - return percolateQuery; - } - - public void percolateQuery(Query percolateQuery) { - this.percolateQuery = percolateQuery; - } - - public FetchSubPhase.HitContext hitContext() { - if (hitContext == null) { - hitContext = new FetchSubPhase.HitContext(); - } - return hitContext; - } - - public boolean isOnlyCount() { - return onlyCount; - } - - public Query percolatorTypeFilter(){ - return indexService().mapperService().documentMapper(PercolatorService.TYPE_NAME).typeFilter(); - } - - @Override - public SearchContextHighlight highlight() { - return highlight; - } - - @Override - public void highlight(SearchContextHighlight highlight) { - if (highlight != null) { - // Enforce highlighting by source, because MemoryIndex doesn't support stored fields. - highlight.globalForceSource(true); - } - this.highlight = highlight; - } - - @Override - public SearchShardTarget shardTarget() { - return searchShardTarget; - } - - @Override - public SearchLookup lookup() { - // we cache this since it's really just a single document lookup - check the init method for details - assert searchLookup != null : "context is not initialized"; - assert Arrays.equals(searchLookup.doc().getTypes(), getQueryShardContext().getTypes()) : "types mismatch - can't return lookup"; - return this.searchLookup; - } - - @Override - protected void doClose() { - Releasables.close(engineSearcher, docSearcher); - } - - @Override - public MapperService mapperService() { - return mapperService; - } - - @Override - public SearchContext parsedQuery(ParsedQuery query) { - this.parsedQuery = query; - this.query = query.query(); - return this; - } - - @Override - public ParsedQuery parsedQuery() { - return parsedQuery; - } - - @Override - public Query query() { - return query; - } - - @Override - public IndexFieldDataService fieldData() { - return fieldDataService; - } - - @Override - public SearchContextAggregations aggregations() { - return aggregations; - } - - @Override - public SearchContext aggregations(SearchContextAggregations aggregations) { - this.aggregations = aggregations; - return this; - } - - @Override - public SubPhaseContext getFetchSubPhaseContext(FetchSubPhase.ContextFactory contextFactory) { - String subPhaseName = contextFactory.getName(); - if (subPhaseContexts.get(subPhaseName) == null) { - subPhaseContexts.put(subPhaseName, contextFactory.newContextInstance()); - } - return (SubPhaseContext) subPhaseContexts.get(subPhaseName); - } - - // Unused: - @Override - public void preProcess() { - throw new UnsupportedOperationException(); - } - - @Override - public Query searchFilter(String[] types) { - return aliasFilter(); - } - - @Override - public long id() { - throw new UnsupportedOperationException(); - } - - @Override - public String source() { - throw new UnsupportedOperationException(); - } - - @Override - public ShardSearchRequest request() { - throw new UnsupportedOperationException(); - } - - @Override - public SearchType searchType() { - throw new UnsupportedOperationException(); - } - - @Override - public SearchContext searchType(SearchType searchType) { - throw new UnsupportedOperationException(); - } - - @Override - public int numberOfShards() { - return numberOfShards; - } - - @Override - public float queryBoost() { - throw new UnsupportedOperationException(); - } - - @Override - public SearchContext queryBoost(float queryBoost) { - throw new UnsupportedOperationException(); - } - - @Override - public long getOriginNanoTime() { - return originNanoTime; - } - - @Override - protected long nowInMillisImpl() { - return startTime; - } - - @Override - public ScrollContext scrollContext() { - throw new UnsupportedOperationException(); - } - - @Override - public SearchContext scrollContext(ScrollContext scroll) { - throw new UnsupportedOperationException(); - } - - @Override - public SuggestionSearchContext suggest() { - throw new UnsupportedOperationException(); - } - - @Override - public void suggest(SuggestionSearchContext suggest) { - throw new UnsupportedOperationException(); - } - - @Override - public List rescore() { - throw new UnsupportedOperationException(); - } - - @Override - public void addRescore(RescoreSearchContext rescore) { - throw new UnsupportedOperationException(); - } - - @Override - public boolean hasScriptFields() { - throw new UnsupportedOperationException(); - } - - @Override - public ScriptFieldsContext scriptFields() { - throw new UnsupportedOperationException(); - } - - @Override - public boolean sourceRequested() { - throw new UnsupportedOperationException(); - } - - @Override - public boolean hasFetchSourceContext() { - throw new UnsupportedOperationException(); - } - - @Override - public FetchSourceContext fetchSourceContext() { - throw new UnsupportedOperationException(); - } - - @Override - public SearchContext fetchSourceContext(FetchSourceContext fetchSourceContext) { - throw new UnsupportedOperationException(); - } - - @Override - public ContextIndexSearcher searcher() { - return searcher; - } - - @Override - public AnalysisService analysisService() { - return indexService.analysisService(); - } - - @Override - public SimilarityService similarityService() { - return indexService.similarityService(); - } - - @Override - public ScriptService scriptService() { - return scriptService; - } - - @Override - public PageCacheRecycler pageCacheRecycler() { - return pageCacheRecycler; - } - - @Override - public BigArrays bigArrays() { - return bigArrays; - } - - @Override - public BitsetFilterCache bitsetFilterCache() { - return indexService.cache().bitsetFilterCache(); - } - - @Override - public long timeoutInMillis() { - return -1; - } - - @Override - public void timeoutInMillis(long timeoutInMillis) { - throw new UnsupportedOperationException(); - } - - @Override - public int terminateAfter() { - return DEFAULT_TERMINATE_AFTER; - } - - @Override - public void terminateAfter(int terminateAfter) { - throw new UnsupportedOperationException(); - } - - @Override - public SearchContext minimumScore(float minimumScore) { - throw new UnsupportedOperationException(); - } - - @Override - public Float minimumScore() { - return null; - } - - @Override - public SearchContext sort(Sort sort) { - this.sort = sort; - return this; - } - - @Override - public Sort sort() { - return sort; - } - - @Override - public SearchContext trackScores(boolean trackScores) { - this.trackScores = trackScores; - return this; - } - - @Override - public boolean trackScores() { - return trackScores; - } - - @Override - public SearchContext searchAfter(FieldDoc searchAfter) { - throw new UnsupportedOperationException(); - } - - @Override - public FieldDoc searchAfter() { - return null; - } - - @Override - public SearchContext parsedPostFilter(ParsedQuery postFilter) { - throw new UnsupportedOperationException(); - } - - @Override - public ParsedQuery parsedPostFilter() { - return null; - } - - @Override - public Query aliasFilter() { - return aliasFilter; - } - - @Override - public int from() { - return 0; - } - - @Override - public SearchContext from(int from) { - throw new UnsupportedOperationException(); - } - - @Override - public int size() { - return size; - } - - @Override - public SearchContext size(int size) { - this.size = size; - return this; - } - - @Override - public boolean hasFieldNames() { - throw new UnsupportedOperationException(); - } - - @Override - public List fieldNames() { - throw new UnsupportedOperationException(); - } - - @Override - public void emptyFieldNames() { - throw new UnsupportedOperationException(); - } - - @Override - public boolean explain() { - throw new UnsupportedOperationException(); - } - - @Override - public void explain(boolean explain) { - throw new UnsupportedOperationException(); - } - - @Override - public List groupStats() { - throw new UnsupportedOperationException(); - } - - @Override - public void groupStats(List groupStats) { - throw new UnsupportedOperationException(); - } - - @Override - public boolean version() { - throw new UnsupportedOperationException(); - } - - @Override - public void version(boolean version) { - throw new UnsupportedOperationException(); - } - - @Override - public int[] docIdsToLoad() { - throw new UnsupportedOperationException(); - } - - @Override - public int docIdsToLoadFrom() { - throw new UnsupportedOperationException(); - } - - @Override - public int docIdsToLoadSize() { - throw new UnsupportedOperationException(); - } - - @Override - public SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int docsIdsToLoadSize) { - throw new UnsupportedOperationException(); - } - - @Override - public void accessed(long accessTime) { - throw new UnsupportedOperationException(); - } - - @Override - public long lastAccessTime() { - throw new UnsupportedOperationException(); - } - - @Override - public long keepAlive() { - throw new UnsupportedOperationException(); - } - - @Override - public void keepAlive(long keepAlive) { - throw new UnsupportedOperationException(); - } - - @Override - public DfsSearchResult dfsResult() { - throw new UnsupportedOperationException(); - } - - @Override - public QuerySearchResult queryResult() { - return querySearchResult; - } - - @Override - public FetchSearchResult fetchResult() { - throw new UnsupportedOperationException(); - } - - @Override - public FetchPhase fetchPhase() { - return fetchPhase; - } - - @Override - public MappedFieldType smartNameFieldType(String name) { - return mapperService().fullName(name); - } - - @Override - public ObjectMapper getObjectMapper(String name) { - throw new UnsupportedOperationException(); - } - - @Override - public Counter timeEstimateCounter() { - throw new UnsupportedOperationException(); - } - - @Override - public InnerHitsContext innerHits() { - throw new UnsupportedOperationException(); - } - - @Override - public Map, Collector> queryCollectors() { - return queryCollectors; - } - - @Override - public QueryShardContext getQueryShardContext() { - return queryShardContext; - } - - @Override - public Profilers getProfilers() { - throw new UnsupportedOperationException(); - } -} diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolateDocumentParser.java b/core/src/main/java/org/elasticsearch/percolator/PercolateDocumentParser.java deleted file mode 100644 index 50db3cecaa60..000000000000 --- a/core/src/main/java/org/elasticsearch/percolator/PercolateDocumentParser.java +++ /dev/null @@ -1,215 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.percolator; - -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.Query; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.action.percolate.PercolateShardRequest; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.mapper.DocumentMapperForType; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.search.SearchParseElement; -import org.elasticsearch.search.aggregations.AggregationPhase; -import org.elasticsearch.search.highlight.HighlightPhase; -import org.elasticsearch.search.sort.SortParseElement; - -import java.util.Map; - -import static org.elasticsearch.index.mapper.SourceToParse.source; - -public class PercolateDocumentParser { - - private final HighlightPhase highlightPhase; - private final SortParseElement sortParseElement; - private final AggregationPhase aggregationPhase; - - @Inject - public PercolateDocumentParser(HighlightPhase highlightPhase, SortParseElement sortParseElement, - AggregationPhase aggregationPhase) { - this.highlightPhase = highlightPhase; - this.sortParseElement = sortParseElement; - this.aggregationPhase = aggregationPhase; - } - - public ParsedDocument parse(final PercolateShardRequest request, final PercolateContext context, final MapperService mapperService) { - BytesReference source = request.source(); - if (source == null || source.length() == 0) { - if (request.docSource() != null && request.docSource().length() != 0) { - return parseFetchedDoc(context, request.docSource(), mapperService, request.shardId().getIndexName(), request.documentType()); - } else { - return null; - } - } - - // TODO: combine all feature parse elements into one map - Map hlElements = highlightPhase.parseElements(); - Map aggregationElements = aggregationPhase.parseElements(); - final QueryShardContext queryShardContext = context.getQueryShardContext(); - ParsedDocument doc = null; - // Some queries (function_score query when for decay functions) rely on a SearchContext being set: - // We switch types because this context needs to be in the context of the percolate queries in the shard and - // not the in memory percolate doc - final String[] previousTypes = queryShardContext.getTypes(); - queryShardContext.setTypes(PercolatorService.TYPE_NAME); - try (XContentParser parser = XContentFactory.xContent(source).createParser(source);) { - String currentFieldName = null; - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - // we need to check the "doc" here, so the next token will be START_OBJECT which is - // the actual document starting - if ("doc".equals(currentFieldName)) { - if (doc != null) { - throw new ElasticsearchParseException("Either specify doc or get, not both"); - } - - DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(request.documentType()); - String index = context.shardTarget().index(); - doc = docMapper.getDocumentMapper().parse(source(parser).index(index).type(request.documentType()).id("_id_for_percolate_api")); - if (docMapper.getMapping() != null) { - doc.addDynamicMappingsUpdate(docMapper.getMapping()); - } - // the document parsing exists the "doc" object, so we need to set the new current field. - currentFieldName = parser.currentName(); - } - } else if (token == XContentParser.Token.START_OBJECT) { - SearchParseElement element = hlElements.get(currentFieldName); - if (element == null) { - element = aggregationElements.get(currentFieldName); - } - - if ("query".equals(currentFieldName)) { - if (context.percolateQuery() != null) { - throw new ElasticsearchParseException("Either specify query or filter, not both"); - } - context.percolateQuery(queryShardContext.parse(parser).query()); - } else if ("filter".equals(currentFieldName)) { - if (context.percolateQuery() != null) { - throw new ElasticsearchParseException("Either specify query or filter, not both"); - } - Query filter = queryShardContext.parseInnerFilter(parser).query(); - context.percolateQuery(new ConstantScoreQuery(filter)); - } else if ("sort".equals(currentFieldName)) { - parseSort(parser, context); - } else if (element != null) { - element.parse(parser, context); - } - } else if (token == XContentParser.Token.START_ARRAY) { - if ("sort".equals(currentFieldName)) { - parseSort(parser, context); - } - } else if (token == null) { - break; - } else if (token.isValue()) { - if ("size".equals(currentFieldName)) { - context.size(parser.intValue()); - if (context.size() < 0) { - throw new ElasticsearchParseException("size is set to [{}] and is expected to be higher or equal to 0", context.size()); - } - } else if ("sort".equals(currentFieldName)) { - parseSort(parser, context); - } else if ("track_scores".equals(currentFieldName) || "trackScores".equals(currentFieldName)) { - context.trackScores(parser.booleanValue()); - } - } - } - - // We need to get the actual source from the request body for highlighting, so parse the request body again - // and only get the doc source. - if (context.highlight() != null) { - parser.close(); - currentFieldName = null; - try (XContentParser parserForHighlighter = XContentFactory.xContent(source).createParser(source)) { - token = parserForHighlighter.nextToken(); - assert token == XContentParser.Token.START_OBJECT; - while ((token = parserForHighlighter.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parserForHighlighter.currentName(); - } else if (token == XContentParser.Token.START_OBJECT) { - if ("doc".equals(currentFieldName)) { - BytesStreamOutput bStream = new BytesStreamOutput(); - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.SMILE, bStream); - builder.copyCurrentStructure(parserForHighlighter); - builder.close(); - doc.setSource(bStream.bytes()); - break; - } else { - parserForHighlighter.skipChildren(); - } - } else if (token == null) { - break; - } - } - } - } - - } catch (Throwable e) { - throw new ElasticsearchParseException("failed to parse request", e); - } finally { - queryShardContext.setTypes(previousTypes); - } - - if (request.docSource() != null && request.docSource().length() != 0) { - if (doc != null) { - throw new IllegalArgumentException("Can't specify the document to percolate in the source of the request and as document id"); - } - - doc = parseFetchedDoc(context, request.docSource(), mapperService, request.shardId().getIndexName(), request.documentType()); - } - - if (doc == null) { - throw new IllegalArgumentException("Nothing to percolate"); - } - - return doc; - } - - private void parseSort(XContentParser parser, PercolateContext context) throws Exception { - context.trackScores(true); - sortParseElement.parse(parser, context); - // null, means default sorting by relevancy - if (context.sort() != null) { - throw new ElasticsearchParseException("Only _score desc is supported"); - } - } - - private ParsedDocument parseFetchedDoc(PercolateContext context, BytesReference fetchedDoc, MapperService mapperService, String index, String type) { - DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(type); - ParsedDocument doc = docMapper.getDocumentMapper().parse(source(fetchedDoc).index(index).type(type).id("_id_for_percolate_api")); - if (doc == null) { - throw new ElasticsearchParseException("No doc to percolate in the request"); - } - if (context.highlight() != null) { - doc.setSource(fetchedDoc); - } - return doc; - } - -} diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolateException.java b/core/src/main/java/org/elasticsearch/percolator/PercolateException.java deleted file mode 100644 index 81a708a75ecd..000000000000 --- a/core/src/main/java/org/elasticsearch/percolator/PercolateException.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.percolator; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ElasticsearchWrapperException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.shard.ShardId; - -import java.io.IOException; -import java.util.Objects; - -/** - * Exception during percolating document(s) at runtime. - */ -public class PercolateException extends ElasticsearchException implements ElasticsearchWrapperException { - - private final ShardId shardId; - - public PercolateException(ShardId shardId, String msg, Throwable cause) { - super(msg, cause); - Objects.requireNonNull(shardId, "shardId must not be null"); - this.shardId = shardId; - } - - public ShardId getShardId() { - return shardId; - } - - public PercolateException(StreamInput in) throws IOException{ - super(in); - shardId = ShardId.readShardId(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - shardId.writeTo(out); - } -} diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolatorIndex.java b/core/src/main/java/org/elasticsearch/percolator/PercolatorIndex.java deleted file mode 100644 index 6f9a71048341..000000000000 --- a/core/src/main/java/org/elasticsearch/percolator/PercolatorIndex.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.percolator; - -import org.elasticsearch.index.mapper.ParsedDocument; - -/** - * Abstraction on how to index the percolator document. - */ -interface PercolatorIndex { - - /** - * Indexes the document(s) and initializes the PercolateContext - * - * @param context Initialized with document related properties for fetch phase. - * @param document Document that is percolated. Can contain several documents. - * */ - void prepare(PercolateContext context, ParsedDocument document); - -} diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolatorModule.java b/core/src/main/java/org/elasticsearch/percolator/PercolatorModule.java deleted file mode 100644 index 68b8db55e319..000000000000 --- a/core/src/main/java/org/elasticsearch/percolator/PercolatorModule.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.percolator; - -import org.elasticsearch.common.inject.AbstractModule; - -/** - * - */ -public class PercolatorModule extends AbstractModule { - - @Override - protected void configure() { - bind(PercolateDocumentParser.class).asEagerSingleton(); - bind(PercolatorService.class).asEagerSingleton(); - } -} diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java deleted file mode 100644 index 1160aec969bc..000000000000 --- a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java +++ /dev/null @@ -1,376 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.percolator; - - -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.ReaderUtil; -import org.apache.lucene.index.memory.ExtendedMemoryIndex; -import org.apache.lucene.index.memory.MemoryIndex; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.MultiCollector; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.ScoreDoc; -import org.apache.lucene.search.TopDocs; -import org.apache.lucene.search.TopScoreDocCollector; -import org.apache.lucene.search.TotalHitCountCollector; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.CloseableThreadLocal; -import org.elasticsearch.Version; -import org.elasticsearch.action.percolate.PercolateResponse; -import org.elasticsearch.action.percolate.PercolateShardRequest; -import org.elasticsearch.action.percolate.PercolateShardResponse; -import org.elasticsearch.cache.recycler.PageCacheRecycler; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lease.Releasable; -import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.unit.ByteSizeUnit; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.fieldvisitor.SingleFieldsVisitor; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.internal.UidFieldMapper; -import org.elasticsearch.index.percolator.PercolatorFieldMapper; -import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; -import org.elasticsearch.index.query.ParsedQuery; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.aggregations.AggregationPhase; -import org.elasticsearch.search.aggregations.Aggregator; -import org.elasticsearch.search.aggregations.BucketCollector; -import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregator; -import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator; -import org.elasticsearch.search.aggregations.support.AggregationContext; -import org.elasticsearch.search.fetch.FetchPhase; -import org.elasticsearch.search.highlight.HighlightField; -import org.elasticsearch.search.highlight.HighlightPhase; -import org.elasticsearch.search.internal.SearchContext; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; - -import static org.apache.lucene.search.BooleanClause.Occur.FILTER; -import static org.apache.lucene.search.BooleanClause.Occur.MUST; - -public class PercolatorService extends AbstractComponent implements Releasable { - - public final static float NO_SCORE = Float.NEGATIVE_INFINITY; - public final static String TYPE_NAME = ".percolator"; - - private final BigArrays bigArrays; - private final ScriptService scriptService; - private final IndicesService indicesService; - private final ClusterService clusterService; - private final HighlightPhase highlightPhase; - private final AggregationPhase aggregationPhase; - private final PageCacheRecycler pageCacheRecycler; - private final CloseableThreadLocal cache; - private final IndexNameExpressionResolver indexNameExpressionResolver; - private final PercolateDocumentParser percolateDocumentParser; - - private final PercolatorIndex single; - private final PercolatorIndex multi; - private final ParseFieldMatcher parseFieldMatcher; - private final FetchPhase fetchPhase; - - @Inject - public PercolatorService(Settings settings, IndexNameExpressionResolver indexNameExpressionResolver, IndicesService indicesService, - PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, - HighlightPhase highlightPhase, ClusterService clusterService, - AggregationPhase aggregationPhase, ScriptService scriptService, - PercolateDocumentParser percolateDocumentParser, FetchPhase fetchPhase) { - super(settings); - this.indexNameExpressionResolver = indexNameExpressionResolver; - this.percolateDocumentParser = percolateDocumentParser; - this.fetchPhase = fetchPhase; - this.parseFieldMatcher = new ParseFieldMatcher(settings); - this.indicesService = indicesService; - this.pageCacheRecycler = pageCacheRecycler; - this.bigArrays = bigArrays; - this.clusterService = clusterService; - this.scriptService = scriptService; - this.aggregationPhase = aggregationPhase; - this.highlightPhase = highlightPhase; - - final long maxReuseBytes = settings.getAsBytesSize("indices.memory.memory_index.size_per_thread", new ByteSizeValue(1, ByteSizeUnit.MB)).bytes(); - cache = new CloseableThreadLocal() { - @Override - protected MemoryIndex initialValue() { - // TODO: should we expose payloads as an option? should offsets be turned on always? - return new ExtendedMemoryIndex(true, false, maxReuseBytes); - } - }; - single = new SingleDocumentPercolatorIndex(cache); - multi = new MultiDocumentPercolatorIndex(cache); - } - - public ReduceResult reduce(boolean onlyCount, List shardResponses) throws IOException { - if (onlyCount) { - long finalCount = 0; - for (PercolateShardResponse shardResponse : shardResponses) { - finalCount += shardResponse.topDocs().totalHits; - } - - InternalAggregations reducedAggregations = reduceAggregations(shardResponses); - return new PercolatorService.ReduceResult(finalCount, reducedAggregations); - } else { - int requestedSize = shardResponses.get(0).requestedSize(); - TopDocs[] shardResults = new TopDocs[shardResponses.size()]; - long foundMatches = 0; - for (int i = 0; i < shardResults.length; i++) { - TopDocs shardResult = shardResponses.get(i).topDocs(); - foundMatches += shardResult.totalHits; - shardResults[i] = shardResult; - } - TopDocs merged = TopDocs.merge(requestedSize, shardResults); - PercolateResponse.Match[] matches = new PercolateResponse.Match[merged.scoreDocs.length]; - for (int i = 0; i < merged.scoreDocs.length; i++) { - ScoreDoc doc = merged.scoreDocs[i]; - PercolateShardResponse shardResponse = shardResponses.get(doc.shardIndex); - String id = shardResponse.ids().get(doc.doc); - Map hl = shardResponse.hls().get(doc.doc); - matches[i] = new PercolateResponse.Match(new Text(shardResponse.getIndex()), new Text(id), doc.score, hl); - } - InternalAggregations reducedAggregations = reduceAggregations(shardResponses); - return new PercolatorService.ReduceResult(foundMatches, matches, reducedAggregations); - } - } - - public PercolateShardResponse percolate(PercolateShardRequest request) throws IOException { - final IndexService percolateIndexService = indicesService.indexServiceSafe(request.shardId().getIndex()); - final IndexShard indexShard = percolateIndexService.getShard(request.shardId().id()); - indexShard.readAllowed(); // check if we can read the shard... - PercolatorQueriesRegistry percolateQueryRegistry = indexShard.percolateRegistry(); - percolateQueryRegistry.prePercolate(); - long startTime = System.nanoTime(); - - // TODO: The filteringAliases should be looked up at the coordinating node and serialized with all shard request, - // just like is done in other apis. - String[] filteringAliases = indexNameExpressionResolver.filteringAliases( - clusterService.state(), - indexShard.shardId().getIndex().getName(), - request.indices() - ); - Query aliasFilter = percolateIndexService.aliasFilter(percolateIndexService.newQueryShardContext(), filteringAliases); - - SearchShardTarget searchShardTarget = new SearchShardTarget(clusterService.localNode().id(), request.shardId().getIndex(), - request.shardId().id()); - final PercolateContext context = new PercolateContext(request, searchShardTarget, indexShard, percolateIndexService, - pageCacheRecycler, bigArrays, scriptService, aliasFilter, parseFieldMatcher, fetchPhase); - SearchContext.setCurrent(context); - try { - ParsedDocument parsedDocument = percolateDocumentParser.parse(request, context, percolateIndexService.mapperService()); - if (context.searcher().getIndexReader().maxDoc() == 0) { - return new PercolateShardResponse(Lucene.EMPTY_TOP_DOCS, Collections.emptyMap(), Collections.emptyMap(), context); - } - if (context.size() < 0) { - context.size(0); - } - - // parse the source either into one MemoryIndex, if it is a single document or index multiple docs if nested - PercolatorIndex percolatorIndex; - DocumentMapper documentMapper = indexShard.mapperService().documentMapper(request.documentType()); - boolean isNested = documentMapper != null && documentMapper.hasNestedObjects(); - if (parsedDocument.docs().size() > 1) { - assert isNested; - percolatorIndex = multi; - } else { - percolatorIndex = single; - } - percolatorIndex.prepare(context, parsedDocument); - - BucketCollector aggregatorCollector = null; - if (context.aggregations() != null) { - AggregationContext aggregationContext = new AggregationContext(context); - context.aggregations().aggregationContext(aggregationContext); - Aggregator[] aggregators = context.aggregations().factories().createTopLevelAggregators(); - List aggregatorCollectors = new ArrayList<>(aggregators.length); - for (int i = 0; i < aggregators.length; i++) { - if (!(aggregators[i] instanceof GlobalAggregator)) { - Aggregator aggregator = aggregators[i]; - aggregatorCollectors.add(aggregator); - } - } - context.aggregations().aggregators(aggregators); - aggregatorCollector = BucketCollector.wrap(aggregatorCollectors); - aggregatorCollector.preCollection(); - } - PercolatorQueriesRegistry queriesRegistry = indexShard.percolateRegistry(); - return doPercolate(context, queriesRegistry, aggregationPhase, aggregatorCollector, highlightPhase); - } finally { - SearchContext.removeCurrent(); - context.close(); - percolateQueryRegistry.postPercolate(System.nanoTime() - startTime); - } - } - - // moved the core percolation logic to a pck protected method to make testing easier: - static PercolateShardResponse doPercolate(PercolateContext context, PercolatorQueriesRegistry queriesRegistry, AggregationPhase aggregationPhase, @Nullable BucketCollector aggregatorCollector, HighlightPhase highlightPhase) throws IOException { - PercolatorQuery.Builder builder = new PercolatorQuery.Builder(context.docSearcher(), queriesRegistry.getPercolateQueries(), context.percolatorTypeFilter()); - if (queriesRegistry.indexSettings().getSettings().getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, null).onOrAfter(Version.V_5_0_0)) { - builder.extractQueryTermsQuery(PercolatorFieldMapper.EXTRACTED_TERMS_FULL_FIELD_NAME, PercolatorFieldMapper.UNKNOWN_QUERY_FULL_FIELD_NAME); - } - if (context.percolateQuery() != null || context.aliasFilter() != null) { - BooleanQuery.Builder bq = new BooleanQuery.Builder(); - if (context.percolateQuery() != null) { - bq.add(context.percolateQuery(), MUST); - } - if (context.aliasFilter() != null) { - bq.add(context.aliasFilter(), FILTER); - } - builder.setPercolateQuery(bq.build()); - } - PercolatorQuery percolatorQuery = builder.build(); - - if (context.isOnlyCount() || context.size() == 0) { - TotalHitCountCollector collector = new TotalHitCountCollector(); - context.searcher().search(percolatorQuery, MultiCollector.wrap(collector, aggregatorCollector)); - if (aggregatorCollector != null) { - aggregatorCollector.postCollection(); - aggregationPhase.execute(context); - } - return new PercolateShardResponse(new TopDocs(collector.getTotalHits(), Lucene.EMPTY_SCORE_DOCS, 0f), Collections.emptyMap(), Collections.emptyMap(), context); - } else { - int size = context.size(); - if (size > context.searcher().getIndexReader().maxDoc()) { - // prevent easy OOM if more than the total number of docs that - // exist is requested... - size = context.searcher().getIndexReader().maxDoc(); - } - TopScoreDocCollector collector = TopScoreDocCollector.create(size); - context.searcher().search(percolatorQuery, MultiCollector.wrap(collector, aggregatorCollector)); - if (aggregatorCollector != null) { - aggregatorCollector.postCollection(); - aggregationPhase.execute(context); - } - - TopDocs topDocs = collector.topDocs(); - Map ids = new HashMap<>(topDocs.scoreDocs.length); - Map> hls = new HashMap<>(topDocs.scoreDocs.length); - for (ScoreDoc scoreDoc : topDocs.scoreDocs) { - if (context.trackScores() == false) { - // No sort or tracking scores was provided, so use special - // value to indicate to not show the scores: - scoreDoc.score = NO_SCORE; - } - - int segmentIdx = ReaderUtil.subIndex(scoreDoc.doc, context.searcher().getIndexReader().leaves()); - LeafReaderContext atomicReaderContext = context.searcher().getIndexReader().leaves().get(segmentIdx); - final int segmentDocId = scoreDoc.doc - atomicReaderContext.docBase; - SingleFieldsVisitor fieldsVisitor = new SingleFieldsVisitor(UidFieldMapper.NAME); - atomicReaderContext.reader().document(segmentDocId, fieldsVisitor); - String id = fieldsVisitor.uid().id(); - ids.put(scoreDoc.doc, id); - if (context.highlight() != null) { - Query query = queriesRegistry.getPercolateQueries().get(new BytesRef(id)); - context.parsedQuery(new ParsedQuery(query)); - context.hitContext().cache().clear(); - highlightPhase.hitExecute(context, context.hitContext()); - hls.put(scoreDoc.doc, context.hitContext().hit().getHighlightFields()); - } - } - return new PercolateShardResponse(topDocs, ids, hls, context); - } - } - - @Override - public void close() { - cache.close(); - } - - private InternalAggregations reduceAggregations(List shardResults) { - if (shardResults.get(0).aggregations() == null) { - return null; - } - - List aggregationsList = new ArrayList<>(shardResults.size()); - for (PercolateShardResponse shardResult : shardResults) { - aggregationsList.add(shardResult.aggregations()); - } - InternalAggregations aggregations = InternalAggregations.reduce(aggregationsList, new InternalAggregation.ReduceContext(bigArrays, scriptService)); - if (aggregations != null) { - List pipelineAggregators = shardResults.get(0).pipelineAggregators(); - if (pipelineAggregators != null) { - List newAggs = StreamSupport.stream(aggregations.spliterator(), false).map((p) -> { - return (InternalAggregation) p; - }).collect(Collectors.toList()); - for (SiblingPipelineAggregator pipelineAggregator : pipelineAggregators) { - InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), - new InternalAggregation.ReduceContext(bigArrays, scriptService)); - newAggs.add(newAgg); - } - aggregations = new InternalAggregations(newAggs); - } - } - return aggregations; - } - - public final static class ReduceResult { - - private final long count; - private final PercolateResponse.Match[] matches; - private final InternalAggregations reducedAggregations; - - ReduceResult(long count, PercolateResponse.Match[] matches, InternalAggregations reducedAggregations) { - this.count = count; - this.matches = matches; - this.reducedAggregations = reducedAggregations; - } - - public ReduceResult(long count, InternalAggregations reducedAggregations) { - this.count = count; - this.matches = null; - this.reducedAggregations = reducedAggregations; - } - - public long count() { - return count; - } - - public PercolateResponse.Match[] matches() { - return matches; - } - - public InternalAggregations reducedAggregations() { - return reducedAggregations; - } - } - - -} diff --git a/core/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java b/core/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java deleted file mode 100644 index 1d5268e3794e..000000000000 --- a/core/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -package org.elasticsearch.percolator; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.memory.MemoryIndex; -import org.apache.lucene.util.CloseableThreadLocal; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.internal.UidFieldMapper; - -import java.io.IOException; - -/** - * Implementation of {@link PercolatorIndex} that can only hold a single Lucene document - * and is optimized for that - */ -class SingleDocumentPercolatorIndex implements PercolatorIndex { - - private final CloseableThreadLocal cache; - - SingleDocumentPercolatorIndex(CloseableThreadLocal cache) { - this.cache = cache; - } - - @Override - public void prepare(PercolateContext context, ParsedDocument parsedDocument) { - MemoryIndex memoryIndex = cache.get(); - for (IndexableField field : parsedDocument.rootDoc().getFields()) { - Analyzer analyzer = context.analysisService().defaultIndexAnalyzer(); - DocumentMapper documentMapper = context.mapperService().documentMapper(parsedDocument.type()); - if (documentMapper != null && documentMapper.mappers().getMapper(field.name()) != null) { - analyzer = documentMapper.mappers().indexAnalyzer(); - } - if (field.fieldType().indexOptions() == IndexOptions.NONE && field.name().equals(UidFieldMapper.NAME)) { - continue; - } - try { - // TODO: instead of passing null here, we can have a CTL> and pass previous, - // like the indexer does - try (TokenStream tokenStream = field.tokenStream(analyzer, null)) { - if (tokenStream != null) { - memoryIndex.addField(field.name(), tokenStream, field.boost()); - } - } - } catch (Exception e) { - throw new ElasticsearchException("Failed to create token stream for [" + field.name() + "]", e); - } - } - context.initialize(new DocEngineSearcher(memoryIndex), parsedDocument); - } - - private class DocEngineSearcher extends Engine.Searcher { - - private final MemoryIndex memoryIndex; - - public DocEngineSearcher(MemoryIndex memoryIndex) { - super("percolate", memoryIndex.createSearcher()); - this.memoryIndex = memoryIndex; - } - - @Override - public void close() { - try { - this.reader().close(); - memoryIndex.reset(); - } catch (IOException e) { - throw new ElasticsearchException("failed to close percolator in-memory index", e); - } - } - } -} diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java index fa4371846f6d..92fb21db38c5 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java @@ -78,7 +78,7 @@ public class RestIndicesStatsAction extends BaseRestHandler { indicesStatsRequest.flush(metrics.contains("flush")); indicesStatsRequest.warmer(metrics.contains("warmer")); indicesStatsRequest.queryCache(metrics.contains("query_cache")); - indicesStatsRequest.percolate(metrics.contains("percolate")); + indicesStatsRequest.percolate(metrics.contains("percolator_cache")); indicesStatsRequest.segments(metrics.contains("segments")); indicesStatsRequest.fieldData(metrics.contains("fielddata")); indicesStatsRequest.completion(metrics.contains("completion")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index 0e2e26164a81..77366e1cc81d 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -222,21 +222,9 @@ public class RestIndicesAction extends AbstractCatAction { table.addCell("merges.total_time", "sibling:pri;alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges"); table.addCell("pri.merges.total_time", "default:false;text-align:right;desc:time spent in merges"); - table.addCell("percolate.current", "sibling:pri;alias:pc,percolateCurrent;default:false;text-align:right;desc:number of current percolations"); - table.addCell("pri.percolate.current", "default:false;text-align:right;desc:number of current percolations"); - - table.addCell("percolate.memory_size", "sibling:pri;alias:pm,percolateMemory;default:false;text-align:right;desc:memory used by percolations"); - table.addCell("pri.percolate.memory_size", "default:false;text-align:right;desc:memory used by percolations"); - table.addCell("percolate.queries", "sibling:pri;alias:pq,percolateQueries;default:false;text-align:right;desc:number of registered percolation queries"); table.addCell("pri.percolate.queries", "default:false;text-align:right;desc:number of registered percolation queries"); - table.addCell("percolate.time", "sibling:pri;alias:pti,percolateTime;default:false;text-align:right;desc:time spent percolating"); - table.addCell("pri.percolate.time", "default:false;text-align:right;desc:time spent percolating"); - - table.addCell("percolate.total", "sibling:pri;alias:pto,percolateTotal;default:false;text-align:right;desc:total percolations"); - table.addCell("pri.percolate.total", "default:false;text-align:right;desc:total percolations"); - table.addCell("refresh.total", "sibling:pri;alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes"); table.addCell("pri.refresh.total", "default:false;text-align:right;desc:total refreshes"); @@ -436,20 +424,8 @@ public class RestIndicesAction extends AbstractCatAction { table.addCell(indexStats == null ? null : indexStats.getTotal().getMerge().getTotalTime()); table.addCell(indexStats == null ? null : indexStats.getPrimaries().getMerge().getTotalTime()); - table.addCell(indexStats == null ? null : indexStats.getTotal().getPercolate().getCurrent()); - table.addCell(indexStats == null ? null : indexStats.getPrimaries().getPercolate().getCurrent()); - - table.addCell(indexStats == null ? null : indexStats.getTotal().getPercolate().getMemorySize()); - table.addCell(indexStats == null ? null : indexStats.getPrimaries().getPercolate().getMemorySize()); - - table.addCell(indexStats == null ? null : indexStats.getTotal().getPercolate().getNumQueries()); - table.addCell(indexStats == null ? null : indexStats.getPrimaries().getPercolate().getNumQueries()); - - table.addCell(indexStats == null ? null : indexStats.getTotal().getPercolate().getTime()); - table.addCell(indexStats == null ? null : indexStats.getPrimaries().getPercolate().getTime()); - - table.addCell(indexStats == null ? null : indexStats.getTotal().getPercolate().getCount()); - table.addCell(indexStats == null ? null : indexStats.getPrimaries().getPercolate().getCount()); + table.addCell(indexStats == null ? null : indexStats.getTotal().getPercolatorCache().getNumQueries()); + table.addCell(indexStats == null ? null : indexStats.getPrimaries().getPercolatorCache().getNumQueries()); table.addCell(indexStats == null ? null : indexStats.getTotal().getRefresh().getTotal()); table.addCell(indexStats == null ? null : indexStats.getPrimaries().getRefresh().getTotal()); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java index 0605bc4dcab3..4b97f8a942b1 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java @@ -46,7 +46,7 @@ import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.shard.IndexingStats; import org.elasticsearch.index.merge.MergeStats; -import org.elasticsearch.index.percolator.PercolateStats; +import org.elasticsearch.index.percolator.PercolatorQueryCacheStats; import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.search.stats.SearchStats; import org.elasticsearch.index.suggest.stats.SuggestStats; @@ -67,7 +67,6 @@ import org.elasticsearch.script.ScriptStats; import org.elasticsearch.search.suggest.completion.CompletionStats; import java.util.Locale; -import java.util.Map; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -186,11 +185,7 @@ public class RestNodesAction extends AbstractCatAction { table.addCell("merges.total_size", "alias:mts,mergesTotalSize;default:false;text-align:right;desc:size merged"); table.addCell("merges.total_time", "alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges"); - table.addCell("percolate.current", "alias:pc,percolateCurrent;default:false;text-align:right;desc:number of current percolations"); - table.addCell("percolate.memory_size", "alias:pm,percolateMemory;default:false;text-align:right;desc:memory used by percolations"); table.addCell("percolate.queries", "alias:pq,percolateQueries;default:false;text-align:right;desc:number of registered percolation queries"); - table.addCell("percolate.time", "alias:pti,percolateTime;default:false;text-align:right;desc:time spent percolating"); - table.addCell("percolate.total", "alias:pto,percolateTotal;default:false;text-align:right;desc:total percolations"); table.addCell("refresh.total", "alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes"); table.addCell("refresh.time", "alias:rti,refreshTime;default:false;text-align:right;desc:time spent in refreshes"); @@ -336,12 +331,8 @@ public class RestNodesAction extends AbstractCatAction { table.addCell(mergeStats == null ? null : mergeStats.getTotalSize()); table.addCell(mergeStats == null ? null : mergeStats.getTotalTime()); - PercolateStats percolateStats = indicesStats == null ? null : indicesStats.getPercolate(); - table.addCell(percolateStats == null ? null : percolateStats.getCurrent()); - table.addCell(percolateStats == null ? null : percolateStats.getMemorySize()); - table.addCell(percolateStats == null ? null : percolateStats.getNumQueries()); - table.addCell(percolateStats == null ? null : percolateStats.getTime()); - table.addCell(percolateStats == null ? null : percolateStats.getCount()); + PercolatorQueryCacheStats percolatorQueryCacheStats = indicesStats == null ? null : indicesStats.getPercolate(); + table.addCell(percolatorQueryCacheStats == null ? null : percolatorQueryCacheStats.getNumQueries()); RefreshStats refreshStats = indicesStats == null ? null : indicesStats.getRefresh(); table.addCell(refreshStats == null ? null : refreshStats.getTotal()); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java index 445f7099fef0..1b3f239ae5f4 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java @@ -139,11 +139,7 @@ public class RestShardsAction extends AbstractCatAction { table.addCell("merges.total_size", "alias:mts,mergesTotalSize;default:false;text-align:right;desc:size merged"); table.addCell("merges.total_time", "alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges"); - table.addCell("percolate.current", "alias:pc,percolateCurrent;default:false;text-align:right;desc:number of current percolations"); - table.addCell("percolate.memory_size", "alias:pm,percolateMemory;default:false;text-align:right;desc:memory used by percolations"); table.addCell("percolate.queries", "alias:pq,percolateQueries;default:false;text-align:right;desc:number of registered percolation queries"); - table.addCell("percolate.time", "alias:pti,percolateTime;default:false;text-align:right;desc:time spent percolating"); - table.addCell("percolate.total", "alias:pto,percolateTotal;default:false;text-align:right;desc:total percolations"); table.addCell("refresh.total", "alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes"); table.addCell("refresh.time", "alias:rti,refreshTime;default:false;text-align:right;desc:time spent in refreshes"); @@ -282,11 +278,7 @@ public class RestShardsAction extends AbstractCatAction { table.addCell(commonStats == null ? null : commonStats.getMerge().getTotalSize()); table.addCell(commonStats == null ? null : commonStats.getMerge().getTotalTime()); - table.addCell(commonStats == null ? null : commonStats.getPercolate().getCurrent()); - table.addCell(commonStats == null ? null : commonStats.getPercolate().getMemorySize()); - table.addCell(commonStats == null ? null : commonStats.getPercolate().getNumQueries()); - table.addCell(commonStats == null ? null : commonStats.getPercolate().getTime()); - table.addCell(commonStats == null ? null : commonStats.getPercolate().getCount()); + table.addCell(commonStats == null ? null : commonStats.getPercolatorCache().getNumQueries()); table.addCell(commonStats == null ? null : commonStats.getRefresh().getTotal()); table.addCell(commonStats == null ? null : commonStats.getRefresh().getTotalTime()); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java index fa2e662c7388..a0812f3e9a4f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java @@ -63,7 +63,6 @@ public class RestThreadPoolAction extends AbstractCatAction { ThreadPool.Names.INDEX, ThreadPool.Names.MANAGEMENT, ThreadPool.Names.FORCE_MERGE, - ThreadPool.Names.PERCOLATE, ThreadPool.Names.REFRESH, ThreadPool.Names.SEARCH, ThreadPool.Names.SNAPSHOT, @@ -79,7 +78,6 @@ public class RestThreadPoolAction extends AbstractCatAction { "i", "ma", "fm", - "p", "r", "s", "sn", diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index 55c343ba874a..ae6d5aaf4bee 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -62,6 +62,7 @@ import org.elasticsearch.index.query.MoreLikeThisQueryParser; import org.elasticsearch.index.query.MultiMatchQueryParser; import org.elasticsearch.index.query.NestedQueryParser; import org.elasticsearch.index.query.ParentIdQueryParser; +import org.elasticsearch.index.query.PercolatorQueryParser; import org.elasticsearch.index.query.PrefixQueryParser; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParser; @@ -215,6 +216,7 @@ import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase; import org.elasticsearch.search.fetch.innerhits.InnerHitsFetchSubPhase; import org.elasticsearch.search.fetch.matchedqueries.MatchedQueriesFetchSubPhase; import org.elasticsearch.search.fetch.parent.ParentFieldSubFetchPhase; +import org.elasticsearch.index.percolator.PercolatorHighlightSubFetchPhase; import org.elasticsearch.search.fetch.script.ScriptFieldsFetchSubPhase; import org.elasticsearch.search.fetch.source.FetchSourceSubPhase; import org.elasticsearch.search.fetch.version.VersionFetchSubPhase; @@ -356,6 +358,7 @@ public class SearchModule extends AbstractModule { fetchSubPhaseMultibinder.addBinding().to(MatchedQueriesFetchSubPhase.class); fetchSubPhaseMultibinder.addBinding().to(HighlightPhase.class); fetchSubPhaseMultibinder.addBinding().to(ParentFieldSubFetchPhase.class); + fetchSubPhaseMultibinder.addBinding().to(PercolatorHighlightSubFetchPhase.class); for (Class clazz : fetchSubPhases) { fetchSubPhaseMultibinder.addBinding().to(clazz); } @@ -546,6 +549,7 @@ public class SearchModule extends AbstractModule { registerQueryParser(ExistsQueryParser::new); registerQueryParser(MatchNoneQueryParser::new); registerQueryParser(ParentIdQueryParser::new); + registerQueryParser(PercolatorQueryParser::new); if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { registerQueryParser(GeoShapeQueryParser::new); } diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java index 8c3c19343b40..71a289331f87 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java @@ -49,6 +49,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; +import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; @@ -487,6 +488,11 @@ public class DefaultSearchContext extends SearchContext { return indexService.fieldData(); } + @Override + public PercolatorQueryCache percolatorQueryCache() { + return indexService.cache().getPercolatorQueryCache(); + } + @Override public long timeoutInMillis() { return timeoutInMillis; diff --git a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java index 83ea2b1ccd8b..fedab3f9782f 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.object.ObjectMapper; +import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; @@ -274,6 +275,11 @@ public abstract class FilteredSearchContext extends SearchContext { return in.fieldData(); } + @Override + public PercolatorQueryCache percolatorQueryCache() { + return in.percolatorQueryCache(); + } + @Override public long timeoutInMillis() { return in.timeoutInMillis(); diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 2b35e182161d..ec47c6327cf6 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -38,6 +38,7 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.object.ObjectMapper; +import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; @@ -217,6 +218,8 @@ public abstract class SearchContext implements Releasable { public abstract IndexFieldDataService fieldData(); + public abstract PercolatorQueryCache percolatorQueryCache(); + public abstract long timeoutInMillis(); public abstract void timeoutInMillis(long timeoutInMillis); diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 70542d575ddc..a04594e706a9 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -83,7 +83,6 @@ public class ThreadPool extends AbstractComponent implements Closeable { public static final String BULK = "bulk"; public static final String SEARCH = "search"; public static final String SUGGEST = "suggest"; - public static final String PERCOLATE = "percolate"; public static final String MANAGEMENT = "management"; public static final String FLUSH = "flush"; public static final String REFRESH = "refresh"; @@ -141,7 +140,6 @@ public class ThreadPool extends AbstractComponent implements Closeable { map.put(Names.BULK, ThreadPoolType.FIXED); map.put(Names.SEARCH, ThreadPoolType.FIXED); map.put(Names.SUGGEST, ThreadPoolType.FIXED); - map.put(Names.PERCOLATE, ThreadPoolType.FIXED); map.put(Names.MANAGEMENT, ThreadPoolType.SCALING); map.put(Names.FLUSH, ThreadPoolType.SCALING); map.put(Names.REFRESH, ThreadPoolType.SCALING); @@ -230,7 +228,6 @@ public class ThreadPool extends AbstractComponent implements Closeable { add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.GET).size(availableProcessors).queueSize(1000)); add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.SEARCH).size(((availableProcessors * 3) / 2) + 1).queueSize(1000)); add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.SUGGEST).size(availableProcessors).queueSize(1000)); - add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.PERCOLATE).size(availableProcessors).queueSize(1000)); add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.MANAGEMENT).size(5).keepAlive("5m")); // no queue as this means clients will need to handle rejections on listener queue even if the operation succeeded // the assumption here is that the listeners should be very lightweight on the listeners side diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 606ff09a60f2..955eb309436e 100644 --- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -63,7 +63,6 @@ import org.elasticsearch.indices.IndexTemplateAlreadyExistsException; import org.elasticsearch.indices.IndexTemplateMissingException; import org.elasticsearch.indices.InvalidIndexTemplateException; import org.elasticsearch.indices.recovery.RecoverFilesRecoveryException; -import org.elasticsearch.percolator.PercolateException; import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.admin.indices.alias.delete.AliasesNotFoundException; @@ -382,19 +381,6 @@ public class ExceptionSerializationTests extends ESTestCase { assertEquals(id, ex.id()); } - public void testPercolateException() throws IOException { - ShardId id = new ShardId("foo", "_na_", 1); - PercolateException ex = serialize(new PercolateException(id, "percolate my ass", null)); - assertEquals(id, ex.getShardId()); - assertEquals("percolate my ass", ex.getMessage()); - assertNull(ex.getCause()); - - ex = serialize(new PercolateException(id, "percolate my ass", new NullPointerException())); - assertEquals(id, ex.getShardId()); - assertEquals("percolate my ass", ex.getMessage()); - assertTrue(ex.getCause() instanceof NullPointerException); - } - public void testRoutingValidationException() throws IOException { RoutingTableValidation validation = new RoutingTableValidation(); validation.addIndexFailure("foo", "bar"); @@ -746,7 +732,6 @@ public class ExceptionSerializationTests extends ESTestCase { ids.put(85, org.elasticsearch.index.AlreadyExpiredException.class); ids.put(86, org.elasticsearch.search.aggregations.AggregationExecutionException.class); ids.put(88, org.elasticsearch.indices.InvalidIndexTemplateException.class); - ids.put(89, org.elasticsearch.percolator.PercolateException.class); ids.put(90, org.elasticsearch.index.engine.RefreshFailedEngineException.class); ids.put(91, org.elasticsearch.search.aggregations.AggregationInitializationException.class); ids.put(92, org.elasticsearch.indices.recovery.DelayRecoveryException.class); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index 7c2747a1a281..3aa27b2d1756 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -29,8 +29,10 @@ import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; +import org.elasticsearch.action.fieldstats.FieldStatsAction; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.percolate.PercolateAction; +import org.elasticsearch.action.percolate.PercolateSourceBuilder; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Strings; @@ -125,20 +127,20 @@ public class TasksIT extends ESIntegTestCase { } public void testTransportReplicationAllShardsTasks() { - registerTaskManageListeners(PercolateAction.NAME); // main task - registerTaskManageListeners(PercolateAction.NAME + "[s]"); // shard level tasks + registerTaskManageListeners(FieldStatsAction.NAME); // main task + registerTaskManageListeners(FieldStatsAction.NAME + "[s]"); // shard level tasks createIndex("test"); ensureGreen("test"); // Make sure all shards are allocated - client().preparePercolate().setIndices("test").setDocumentType("foo").setSource("{}").get(); + client().prepareFieldStats().setFields("field").get(); // the percolate operation should produce one main task NumShards numberOfShards = getNumShards("test"); - assertEquals(1, numberOfEvents(PercolateAction.NAME, Tuple::v1)); + assertEquals(1, numberOfEvents(FieldStatsAction.NAME, Tuple::v1)); // and then one operation per shard - assertEquals(numberOfShards.totalNumShards, numberOfEvents(PercolateAction.NAME + "[s]", Tuple::v1)); + assertEquals(numberOfShards.totalNumShards, numberOfEvents(FieldStatsAction.NAME + "[s]", Tuple::v1)); // the shard level tasks should have the main task as a parent - assertParentTask(findEvents(PercolateAction.NAME + "[s]", Tuple::v1), findEvents(PercolateAction.NAME, Tuple::v1).get(0)); + assertParentTask(findEvents(FieldStatsAction.NAME + "[s]", Tuple::v1), findEvents(FieldStatsAction.NAME, Tuple::v1).get(0)); } public void testTransportBroadcastByNodeTasks() { diff --git a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java index 074e64f8232c..c803cc9624a3 100644 --- a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java @@ -26,13 +26,14 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.percolator.PercolatorService; +import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { @@ -48,28 +49,40 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().string(); mapperService.merge("type", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE, true); - String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(PercolatorService.TYPE_NAME) + String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(PercolatorFieldMapper.TYPE_NAME) .startObject("properties").startObject("query").field("type", "percolator").endObject().endObject() .endObject().endObject().string(); - mapperService.merge(PercolatorService.TYPE_NAME, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); + mapperService.merge(PercolatorFieldMapper.TYPE_NAME, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); } public void testPercolatorFieldMapper() throws Exception { - ParsedDocument doc = mapperService.documentMapper(PercolatorService.TYPE_NAME).parse("test", PercolatorService.TYPE_NAME, "1", XContentFactory.jsonBuilder().startObject() + ParsedDocument doc = mapperService.documentMapper(PercolatorFieldMapper.TYPE_NAME).parse("test", PercolatorFieldMapper.TYPE_NAME, "1", XContentFactory.jsonBuilder().startObject() .field("query", termQuery("field", "value")) .endObject().bytes()); assertThat(doc.rootDoc().getFields(PercolatorFieldMapper.EXTRACTED_TERMS_FULL_FIELD_NAME).length, equalTo(1)); assertThat(doc.rootDoc().getFields(PercolatorFieldMapper.EXTRACTED_TERMS_FULL_FIELD_NAME)[0].binaryValue().utf8ToString(), equalTo("field\0value")); + assertThat(doc.rootDoc().getFields(PercolatorFieldMapper.QUERY_BUILDER_FULL_FIELD_NAME).length, equalTo(1)); } + public void testPercolatorFieldMapperUnMappedField() throws Exception { + MapperParsingException exception = expectThrows(MapperParsingException.class, () -> { + mapperService.documentMapper(PercolatorFieldMapper.TYPE_NAME).parse("test", PercolatorFieldMapper.TYPE_NAME, "1", XContentFactory.jsonBuilder().startObject() + .field("query", termQuery("unmapped_field", "value")) + .endObject().bytes()); + }); + assertThat(exception.getCause(), instanceOf(QueryShardException.class)); + assertThat(exception.getCause().getMessage(), equalTo("No field mapping can be found for the field with name [unmapped_field]")); + } + + public void testPercolatorFieldMapper_noQuery() throws Exception { - ParsedDocument doc = mapperService.documentMapper(PercolatorService.TYPE_NAME).parse("test", PercolatorService.TYPE_NAME, "1", XContentFactory.jsonBuilder().startObject() + ParsedDocument doc = mapperService.documentMapper(PercolatorFieldMapper.TYPE_NAME).parse("test", PercolatorFieldMapper.TYPE_NAME, "1", XContentFactory.jsonBuilder().startObject() .endObject().bytes()); assertThat(doc.rootDoc().getFields(PercolatorFieldMapper.EXTRACTED_TERMS_FULL_FIELD_NAME).length, equalTo(0)); try { - mapperService.documentMapper(PercolatorService.TYPE_NAME).parse("test", PercolatorService.TYPE_NAME, "1", XContentFactory.jsonBuilder().startObject() + mapperService.documentMapper(PercolatorFieldMapper.TYPE_NAME).parse("test", PercolatorFieldMapper.TYPE_NAME, "1", XContentFactory.jsonBuilder().startObject() .nullField("query") .endObject().bytes()); } catch (MapperParsingException e) { @@ -81,11 +94,11 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { IndexService indexService = createIndex("test1", Settings.EMPTY); MapperService mapperService = indexService.mapperService(); - String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(PercolatorService.TYPE_NAME) + String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(PercolatorFieldMapper.TYPE_NAME) .startObject("properties").startObject("query").field("type", "percolator").field("index", "no").endObject().endObject() .endObject().endObject().string(); try { - mapperService.merge(PercolatorService.TYPE_NAME, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); + mapperService.merge(PercolatorFieldMapper.TYPE_NAME, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); fail("MapperParsingException expected"); } catch (MapperParsingException e) { assertThat(e.getMessage(), equalTo("Mapping definition for [query] has unsupported parameters: [index : no]")); diff --git a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhaseTests.java b/core/src/test/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhaseTests.java new file mode 100644 index 000000000000..5e37f1ce6e87 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhaseTests.java @@ -0,0 +1,85 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.percolator; + +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.index.query.PercolatorQuery; +import org.elasticsearch.search.highlight.SearchContextHighlight; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.test.ESTestCase; +import org.mockito.Mockito; + +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; + +public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase { + + public void testHitsExecutionNeeded() { + PercolatorQuery percolatorQuery = new PercolatorQuery.Builder("", ctx -> null, new BytesArray("{}"), + Mockito.mock(IndexSearcher.class), new MatchAllDocsQuery()) + .build(); + + PercolatorHighlightSubFetchPhase subFetchPhase = new PercolatorHighlightSubFetchPhase(null); + SearchContext searchContext = Mockito.mock(SearchContext.class); + Mockito.when(searchContext.highlight()).thenReturn(new SearchContextHighlight(Collections.emptyList())); + Mockito.when(searchContext.query()).thenReturn(new MatchAllDocsQuery()); + + assertThat(subFetchPhase.hitsExecutionNeeded(searchContext), is(false)); + IllegalStateException exception = expectThrows(IllegalStateException.class, + () -> subFetchPhase.hitsExecute(searchContext, null)); + assertThat(exception.getMessage(), equalTo("couldn't locate percolator query")); + + Mockito.when(searchContext.query()).thenReturn(percolatorQuery); + assertThat(subFetchPhase.hitsExecutionNeeded(searchContext), is(true)); + } + + public void testLocatePercolatorQuery() { + PercolatorQuery percolatorQuery = new PercolatorQuery.Builder("", ctx -> null, new BytesArray("{}"), + Mockito.mock(IndexSearcher.class), new MatchAllDocsQuery()) + .build(); + + assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(new MatchAllDocsQuery()), nullValue()); + BooleanQuery.Builder bq = new BooleanQuery.Builder(); + bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER); + assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(bq.build()), nullValue()); + bq.add(percolatorQuery, BooleanClause.Occur.FILTER); + assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(bq.build()), sameInstance(percolatorQuery)); + + ConstantScoreQuery constantScoreQuery = new ConstantScoreQuery(new MatchAllDocsQuery()); + assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(constantScoreQuery), nullValue()); + constantScoreQuery = new ConstantScoreQuery(percolatorQuery); + assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(constantScoreQuery), sameInstance(percolatorQuery)); + + BoostQuery boostQuery = new BoostQuery(new MatchAllDocsQuery(), 1f); + assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(boostQuery), nullValue()); + boostQuery = new BoostQuery(percolatorQuery, 1f); + assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(boostQuery), sameInstance(percolatorQuery)); + } + +} diff --git a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorQueryCacheTests.java b/core/src/test/java/org/elasticsearch/index/percolator/PercolatorQueryCacheTests.java new file mode 100644 index 000000000000..4b0e8dd70893 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/percolator/PercolatorQueryCacheTests.java @@ -0,0 +1,352 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.percolator; + +import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.document.BinaryDocValuesField; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.StoredField; +import org.apache.lucene.document.StringField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.TieredMergePolicy; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.WildcardQuery; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexWarmer; +import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.analysis.AnalysisService; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.internal.SourceFieldMapper; +import org.elasticsearch.index.mapper.internal.TypeFieldMapper; +import org.elasticsearch.index.query.BoolQueryParser; +import org.elasticsearch.index.query.PercolatorQuery; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParser; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.TermQueryParser; +import org.elasticsearch.index.query.WildcardQueryParser; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.similarity.SimilarityService; +import org.elasticsearch.index.warmer.ShardIndexWarmerService; +import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.threadpool.ThreadPool; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Matchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class PercolatorQueryCacheTests extends ESTestCase { + + private QueryShardContext queryShardContext; + private PercolatorQueryCache cache; + + void initialize(Object... fields) throws IOException { + Settings settings = Settings.settingsBuilder() + .put("node.name", PercolatorQueryCacheTests.class.toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .build(); + + Map> queryParsers = new HashMap<>(); + queryParsers.put("term", new TermQueryParser()); + queryParsers.put("wildcard", new WildcardQueryParser()); + queryParsers.put("bool", new BoolQueryParser()); + IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(settings, queryParsers); + + Settings indexSettings = Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("_index", ClusterState.UNKNOWN_UUID), indexSettings); + SimilarityService similarityService = new SimilarityService(idxSettings, Collections.emptyMap()); + AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + MapperRegistry mapperRegistry = new IndicesModule().getMapperRegistry(); + MapperService mapperService = new MapperService(idxSettings, analysisService, similarityService, mapperRegistry, + () -> queryShardContext); + mapperService.merge("type", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("type", fields).string()), + MapperService.MergeReason.MAPPING_UPDATE, false); + cache = new PercolatorQueryCache(idxSettings, () -> queryShardContext); + queryShardContext = new QueryShardContext(idxSettings, null, null, mapperService, similarityService, null, + indicesQueriesRegistry, cache); + } + + public void testLoadQueries() throws Exception { + Directory directory = newDirectory(); + IndexWriter indexWriter = new IndexWriter( + directory, + newIndexWriterConfig(new MockAnalyzer(random())) + ); + + boolean legacyFormat = randomBoolean(); + Version version = legacyFormat ? Version.V_2_0_0 : Version.CURRENT; + + storeQuery("0", indexWriter, termQuery("field1", "value1"), true, legacyFormat); + storeQuery("1", indexWriter, wildcardQuery("field1", "v*"), true, legacyFormat); + storeQuery("2", indexWriter, boolQuery().must(termQuery("field1", "value1")).must(termQuery("field2", "value2")), + true, legacyFormat); + // dymmy docs should be skipped during loading: + Document doc = new Document(); + doc.add(new StringField("dummy", "value", Field.Store.YES)); + indexWriter.addDocument(doc); + storeQuery("4", indexWriter, termQuery("field2", "value2"), true, legacyFormat); + // only documents that .percolator type should be loaded: + storeQuery("5", indexWriter, termQuery("field2", "value2"), false, legacyFormat); + storeQuery("6", indexWriter, termQuery("field3", "value3"), true, legacyFormat); + indexWriter.forceMerge(1); + + // also include queries for percolator docs marked as deleted: + indexWriter.deleteDocuments(new Term("id", "6")); + indexWriter.close(); + + ShardId shardId = new ShardId("_index", ClusterState.UNKNOWN_UUID, 0); + IndexReader indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), shardId); + assertThat(indexReader.leaves().size(), equalTo(1)); + assertThat(indexReader.numDeletedDocs(), equalTo(1)); + assertThat(indexReader.maxDoc(), equalTo(7)); + + initialize("field1", "type=keyword", "field2", "type=keyword", "field3", "type=keyword"); + + PercolatorQueryCache.QueriesLeaf leaf = cache.loadQueries(indexReader.leaves().get(0), version); + assertThat(leaf.queries.size(), equalTo(5)); + assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("field1", "value1")))); + assertThat(leaf.getQuery(1), equalTo(new WildcardQuery(new Term("field1", "v*")))); + assertThat(leaf.getQuery(2), equalTo(new BooleanQuery.Builder() + .add(new TermQuery(new Term("field1", "value1")), BooleanClause.Occur.MUST) + .add(new TermQuery(new Term("field2", "value2")), BooleanClause.Occur.MUST) + .build() + )); + assertThat(leaf.getQuery(4), equalTo(new TermQuery(new Term("field2", "value2")))); + assertThat(leaf.getQuery(6), equalTo(new TermQuery(new Term("field3", "value3")))); + + indexReader.close(); + directory.close(); + } + + public void testGetQueries() throws Exception { + Directory directory = newDirectory(); + IndexWriter indexWriter = new IndexWriter( + directory, + newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE) + ); + + storeQuery("0", indexWriter, termQuery("a", "0"), true, false); + storeQuery("1", indexWriter, termQuery("a", "1"), true, false); + storeQuery("2", indexWriter, termQuery("a", "2"), true, false); + indexWriter.flush(); + storeQuery("3", indexWriter, termQuery("a", "3"), true, false); + storeQuery("4", indexWriter, termQuery("a", "4"), true, false); + storeQuery("5", indexWriter, termQuery("a", "5"), true, false); + indexWriter.flush(); + storeQuery("6", indexWriter, termQuery("a", "6"), true, false); + storeQuery("7", indexWriter, termQuery("a", "7"), true, false); + storeQuery("8", indexWriter, termQuery("a", "8"), true, false); + indexWriter.flush(); + indexWriter.close(); + + ShardId shardId = new ShardId("_index", ClusterState.UNKNOWN_UUID , 0); + IndexReader indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), shardId); + assertThat(indexReader.leaves().size(), equalTo(3)); + assertThat(indexReader.maxDoc(), equalTo(9)); + + initialize("a", "type=keyword"); + + try { + cache.getQueries(indexReader.leaves().get(0)); + fail("IllegalStateException expected"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), equalTo("queries not loaded, queries should be have been preloaded during index warming...")); + } + + IndexShard indexShard = mockIndexShard(); + ThreadPool threadPool = mockThreadPool(); + IndexWarmer.Listener listener = cache.createListener(threadPool); + listener.warmNewReaders(indexShard, new Engine.Searcher("test", new IndexSearcher(indexReader))); + PercolatorQueryCacheStats stats = cache.getStats(shardId); + assertThat(stats.getNumQueries(), equalTo(9L)); + + PercolatorQuery.QueryRegistry.Leaf leaf = cache.getQueries(indexReader.leaves().get(0)); + assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "0")))); + assertThat(leaf.getQuery(1), equalTo(new TermQuery(new Term("a", "1")))); + assertThat(leaf.getQuery(2), equalTo(new TermQuery(new Term("a", "2")))); + + leaf = cache.getQueries(indexReader.leaves().get(1)); + assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "3")))); + assertThat(leaf.getQuery(1), equalTo(new TermQuery(new Term("a", "4")))); + assertThat(leaf.getQuery(2), equalTo(new TermQuery(new Term("a", "5")))); + + leaf = cache.getQueries(indexReader.leaves().get(2)); + assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "6")))); + assertThat(leaf.getQuery(1), equalTo(new TermQuery(new Term("a", "7")))); + assertThat(leaf.getQuery(2), equalTo(new TermQuery(new Term("a", "8")))); + + indexReader.close(); + directory.close(); + } + + public void testInvalidateEntries() throws Exception { + Directory directory = newDirectory(); + IndexWriter indexWriter = new IndexWriter( + directory, + newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE) + ); + + storeQuery("0", indexWriter, termQuery("a", "0"), true, false); + indexWriter.flush(); + storeQuery("1", indexWriter, termQuery("a", "1"), true, false); + indexWriter.flush(); + storeQuery("2", indexWriter, termQuery("a", "2"), true, false); + indexWriter.flush(); + + ShardId shardId = new ShardId("_index", ClusterState.UNKNOWN_UUID, 0); + IndexReader indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter), shardId); + assertThat(indexReader.leaves().size(), equalTo(3)); + assertThat(indexReader.maxDoc(), equalTo(3)); + + initialize("a", "type=keyword"); + + IndexShard indexShard = mockIndexShard(); + ThreadPool threadPool = mockThreadPool(); + IndexWarmer.Listener listener = cache.createListener(threadPool); + listener.warmNewReaders(indexShard, new Engine.Searcher("test", new IndexSearcher(indexReader))); + assertThat(cache.getStats(shardId).getNumQueries(), equalTo(3L)); + + PercolatorQuery.QueryRegistry.Leaf leaf = cache.getQueries(indexReader.leaves().get(0)); + assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "0")))); + + leaf = cache.getQueries(indexReader.leaves().get(1)); + assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "1")))); + + leaf = cache.getQueries(indexReader.leaves().get(2)); + assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "2")))); + + // change merge policy, so that merges will actually happen: + indexWriter.getConfig().setMergePolicy(new TieredMergePolicy()); + indexWriter.deleteDocuments(new Term("id", "1")); + indexWriter.forceMergeDeletes(); + indexReader.close(); + indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter), shardId); + assertThat(indexReader.leaves().size(), equalTo(2)); + assertThat(indexReader.maxDoc(), equalTo(2)); + listener.warmNewReaders(indexShard, new Engine.Searcher("test", new IndexSearcher(indexReader))); + assertThat(cache.getStats(shardId).getNumQueries(), equalTo(2L)); + + leaf = cache.getQueries(indexReader.leaves().get(0)); + assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "0")))); + + leaf = cache.getQueries(indexReader.leaves().get(1)); + assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "2")))); + + indexWriter.forceMerge(1); + indexReader.close(); + indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter), shardId); + assertThat(indexReader.leaves().size(), equalTo(1)); + assertThat(indexReader.maxDoc(), equalTo(2)); + listener.warmNewReaders(indexShard, new Engine.Searcher("test", new IndexSearcher(indexReader))); + assertThat(cache.getStats(shardId).getNumQueries(), equalTo(2L)); + + leaf = cache.getQueries(indexReader.leaves().get(0)); + assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "0")))); + assertThat(leaf.getQuery(1), equalTo(new TermQuery(new Term("a", "2")))); + + indexWriter.close(); + indexReader.close(); + directory.close(); + } + + void storeQuery(String id, IndexWriter indexWriter, QueryBuilder queryBuilder, boolean typeField, boolean legacy) throws IOException { + Document doc = new Document(); + doc.add(new StringField("id", id, Field.Store.NO)); + if (typeField) { + doc.add(new StringField(TypeFieldMapper.NAME, PercolatorFieldMapper.TYPE_NAME, Field.Store.NO)); + } + if (legacy) { + BytesReference percolatorQuery = XContentFactory.jsonBuilder().startObject() + .field("query", queryBuilder) + .endObject().bytes(); + doc.add(new StoredField( + SourceFieldMapper.NAME, + percolatorQuery.array(), percolatorQuery.arrayOffset(), percolatorQuery.length()) + ); + } else { + BytesRef queryBuilderAsBytes = new BytesRef( + XContentFactory.contentBuilder(PercolatorQueryCache.QUERY_BUILDER_CONTENT_TYPE).value(queryBuilder).bytes().toBytes() + ); + doc.add(new BinaryDocValuesField(PercolatorFieldMapper.QUERY_BUILDER_FULL_FIELD_NAME, queryBuilderAsBytes)); + } + indexWriter.addDocument(doc); + } + + IndexShard mockIndexShard() { + IndexShard indexShard = mock(IndexShard.class); + ShardIndexWarmerService shardIndexWarmerService = mock(ShardIndexWarmerService.class); + when(shardIndexWarmerService.logger()).thenReturn(logger); + when(indexShard.warmerService()).thenReturn(shardIndexWarmerService); + IndexSettings indexSettings = new IndexSettings( + IndexMetaData.builder("_index").settings(Settings.builder() + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + ).build(), + Settings.EMPTY + ); + when(indexShard.indexSettings()).thenReturn(indexSettings); + return indexShard; + } + + ThreadPool mockThreadPool() { + ThreadPool threadPool = mock(ThreadPool.class); + when(threadPool.executor(anyString())).thenReturn(Runnable::run); + return threadPool; + } + +} diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index 8f63daaa8f46..e0f44fd84910 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -74,6 +74,7 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityService; @@ -286,8 +287,9 @@ public abstract class AbstractQueryTestCase> } }); + PercolatorQueryCache percolatorQueryCache = new PercolatorQueryCache(idxSettings, () -> queryShardContext); indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class); - queryShardContext = new QueryShardContext(idxSettings, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry); + queryShardContext = new QueryShardContext(idxSettings, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry, percolatorQueryCache); //create some random type with some default field, those types will stick around for all of the subclasses currentTypes = new String[randomIntBetween(0, 5)]; for (int i = 0; i < currentTypes.length; i++) { diff --git a/core/src/test/java/org/elasticsearch/index/query/PercolatorQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/PercolatorQueryBuilderTests.java new file mode 100644 index 000000000000..30d4ec908b87 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/PercolatorQueryBuilderTests.java @@ -0,0 +1,216 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import com.fasterxml.jackson.core.JsonParseException; +import org.apache.lucene.search.Query; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.script.Script; +import org.hamcrest.Matchers; + +import java.io.IOException; +import java.util.Collections; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class PercolatorQueryBuilderTests extends AbstractQueryTestCase { + + private String indexedDocumentIndex; + private String indexedDocumentType; + private String indexedDocumentId; + private String indexedDocumentRouting; + private String indexedDocumentPreference; + private Long indexedDocumentVersion; + private BytesReference documentSource; + + boolean indexedDocumentExists = true; + + @Override + protected PercolatorQueryBuilder doCreateTestQueryBuilder() { + return doCreateTestQueryBuilder(randomBoolean()); + } + + private PercolatorQueryBuilder doCreateTestQueryBuilder(boolean indexedDocument) { + String docType = randomAsciiOfLength(4); + documentSource = randomSource(); + if (indexedDocument) { + indexedDocumentIndex = randomAsciiOfLength(4); + indexedDocumentType = randomAsciiOfLength(4); + indexedDocumentId = randomAsciiOfLength(4); + indexedDocumentRouting = randomAsciiOfLength(4); + indexedDocumentPreference = randomAsciiOfLength(4); + indexedDocumentVersion = (long) randomIntBetween(0, Integer.MAX_VALUE); + return new PercolatorQueryBuilder(docType, indexedDocumentIndex, indexedDocumentType, indexedDocumentId, + indexedDocumentRouting, indexedDocumentPreference, indexedDocumentVersion); + } else { + return new PercolatorQueryBuilder(docType, documentSource); + } + } + + @Override + protected GetResponse executeGet(GetRequest getRequest) { + assertThat(getRequest.index(), Matchers.equalTo(indexedDocumentIndex)); + assertThat(getRequest.type(), Matchers.equalTo(indexedDocumentType)); + assertThat(getRequest.id(), Matchers.equalTo(indexedDocumentId)); + assertThat(getRequest.routing(), Matchers.equalTo(indexedDocumentRouting)); + assertThat(getRequest.preference(), Matchers.equalTo(indexedDocumentPreference)); + assertThat(getRequest.version(), Matchers.equalTo(indexedDocumentVersion)); + if (indexedDocumentExists) { + return new GetResponse( + new GetResult(indexedDocumentIndex, indexedDocumentType, indexedDocumentId, 0L, true, documentSource, + Collections.emptyMap()) + ); + } else { + return new GetResponse( + new GetResult(indexedDocumentIndex, indexedDocumentType, indexedDocumentId, -1, false, null, Collections.emptyMap()) + ); + } + } + + @Override + protected void doAssertLuceneQuery(PercolatorQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, Matchers.instanceOf(PercolatorQuery.class)); + PercolatorQuery percolatorQuery = (PercolatorQuery) query; + assertThat(percolatorQuery.getDocumentType(), Matchers.equalTo(queryBuilder.getDocumentType())); + assertThat(percolatorQuery.getDocumentSource(), Matchers.equalTo(documentSource)); + } + + @Override + public void testMustRewrite() throws IOException { + PercolatorQueryBuilder pqb = doCreateTestQueryBuilder(true); + try { + pqb.toQuery(queryShardContext()); + fail("IllegalStateException expected"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), equalTo("query builder must be rewritten first")); + } + QueryBuilder rewrite = pqb.rewrite(queryShardContext()); + PercolatorQueryBuilder geoShapeQueryBuilder = new PercolatorQueryBuilder(pqb.getDocumentType(), documentSource); + assertEquals(geoShapeQueryBuilder, rewrite); + } + + public void testIndexedDocumentDoesNotExist() throws IOException { + indexedDocumentExists = false; + PercolatorQueryBuilder pqb = doCreateTestQueryBuilder(true); + try { + pqb.rewrite(queryShardContext()); + fail("ResourceNotFoundException expected"); + } catch (ResourceNotFoundException e) { + String expectedString = "indexed document [" + indexedDocumentIndex + "/" + indexedDocumentType + "/" + + indexedDocumentId + "] couldn't be found"; + assertThat(e.getMessage() , equalTo(expectedString)); + } + } + + // overwrite this test, because adding bogus field to the document part is valid and that would make the test fail + // (the document part represents the document being percolated and any key value pair is allowed there) + public void testUnknownObjectException() throws IOException { + String validQuery = createTestQueryBuilder().toString(); + int endPos = validQuery.indexOf("document"); + if (endPos == -1) { + endPos = validQuery.length(); + } + assertThat(validQuery, containsString("{")); + for (int insertionPosition = 0; insertionPosition < endPos; insertionPosition++) { + if (validQuery.charAt(insertionPosition) == '{') { + String testQuery = validQuery.substring(0, insertionPosition) + "{ \"newField\" : " + + validQuery.substring(insertionPosition) + "}"; + try { + parseQuery(testQuery); + fail("some parsing exception expected for query: " + testQuery); + } catch (ParsingException | Script.ScriptParseException | ElasticsearchParseException e) { + // different kinds of exception wordings depending on location + // of mutation, so no simple asserts possible here + } catch (JsonParseException e) { + // mutation produced invalid json + } + } + } + } + + public void testRequiredParameters() { + try { + QueryBuilders.percolatorQuery(null, new BytesArray("{}")); + fail("IllegalArgumentException expected"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("[document_type] is a required argument")); + } + try { + QueryBuilders.percolatorQuery("_document_type", null); + fail("IllegalArgumentException expected"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("[document] is a required argument")); + } + try { + QueryBuilders.percolatorQuery(null, "_index", "_type", "_id", null, null, null); + fail("IllegalArgumentException expected"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("[document_type] is a required argument")); + } + try { + QueryBuilders.percolatorQuery("_document_type", null, "_type", "_id", null, null, null); + fail("IllegalArgumentException expected"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("[index] is a required argument")); + } + try { + QueryBuilders.percolatorQuery("_document_type", "_index", null, "_id", null, null, null); + fail("IllegalArgumentException expected"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("[type] is a required argument")); + } + try { + QueryBuilders.percolatorQuery("_document_type", "_index", "_type", null, null, null, null); + fail("IllegalArgumentException expected"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("[id] is a required argument")); + } + } + + public void testFromJsonNoDocumentType() throws IOException { + try { + parseQuery("{\"percolator\" : { \"document\": {}}"); + fail("IllegalArgumentException expected"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("[percolator] query is missing required [document_type] parameter")); + } + } + + private static BytesReference randomSource() { + try { + XContentBuilder xContent = XContentFactory.jsonBuilder(); + xContent.map(RandomDocumentPicks.randomSource(random())); + return xContent.bytes(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorQueryTests.java b/core/src/test/java/org/elasticsearch/index/query/PercolatorQueryTests.java similarity index 82% rename from core/src/test/java/org/elasticsearch/percolator/PercolatorQueryTests.java rename to core/src/test/java/org/elasticsearch/index/query/PercolatorQueryTests.java index 8bef91385670..347802d37706 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/PercolatorQueryTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.percolator; +package org.elasticsearch.index.query; import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.apache.lucene.document.Field; @@ -44,11 +44,12 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.percolator.ExtractQueryTermsService; +import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.test.ESTestCase; import org.junit.After; import org.junit.Before; @@ -75,13 +76,22 @@ public class PercolatorQueryTests extends ESTestCase { private Directory directory; private IndexWriter indexWriter; - private Map queries; + private Map queries; + private PercolatorQuery.QueryRegistry queryRegistry; private DirectoryReader directoryReader; @Before public void init() throws Exception { directory = newDirectory(); queries = new HashMap<>(); + queryRegistry = ctx -> docId -> { + try { + String val = ctx.reader().document(docId).get(UidFieldMapper.NAME); + return queries.get(Uid.createUid(val).id()); + } catch (IOException e) { + throw new RuntimeException(e); + } + }; IndexWriterConfig config = new IndexWriterConfig(new WhitespaceAnalyzer()); config.setMergePolicy(NoMergePolicy.INSTANCE); indexWriter = new IndexWriter(directory, config); @@ -127,8 +137,10 @@ public class PercolatorQueryTests extends ESTestCase { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolatorQuery.Builder builder = new PercolatorQuery.Builder( + "docType", + queryRegistry, + new BytesArray("{}"), percolateSearcher, - queries, new MatchAllDocsQuery() ); builder.extractQueryTermsQuery(EXTRACTED_TERMS_FIELD_NAME, UNKNOWN_QUERY_FIELD_NAME); @@ -142,39 +154,6 @@ public class PercolatorQueryTests extends ESTestCase { assertThat(topDocs.scoreDocs[4].doc, equalTo(7)); } - public void testWithScoring() throws Exception { - addPercolatorQuery("1", new TermQuery(new Term("field", "brown")), "field", "value1"); - - indexWriter.close(); - directoryReader = DirectoryReader.open(directory); - // don't use newSearcher, which randomizes similarity. if it gets classic sim, the test eats it, - // as the score becomes 1 due to querynorm. - IndexSearcher shardSearcher = new IndexSearcher(directoryReader); - - MemoryIndex memoryIndex = new MemoryIndex(); - memoryIndex.addField("field", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer()); - IndexSearcher percolateSearcher = memoryIndex.createSearcher(); - - PercolatorQuery.Builder builder = new PercolatorQuery.Builder( - percolateSearcher, - queries, - new MatchAllDocsQuery() - ); - builder.extractQueryTermsQuery(EXTRACTED_TERMS_FIELD_NAME, UNKNOWN_QUERY_FIELD_NAME); - builder.setPercolateQuery(new TermQuery(new Term("field", "value1"))); - - PercolatorQuery percolatorQuery = builder.build(); - TopDocs topDocs = shardSearcher.search(percolatorQuery, 1); - assertThat(topDocs.totalHits, equalTo(1)); - assertThat(topDocs.scoreDocs.length, equalTo(1)); - assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); - assertThat(topDocs.scoreDocs[0].score, not(1f)); - - Explanation explanation = shardSearcher.explain(percolatorQuery, 0); - assertThat(explanation.isMatch(), is(true)); - assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[0].score)); - } - public void testDuel() throws Exception { int numQueries = scaledRandomIntBetween(32, 256); for (int i = 0; i < numQueries; i++) { @@ -203,8 +182,10 @@ public class PercolatorQueryTests extends ESTestCase { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolatorQuery.Builder builder1 = new PercolatorQuery.Builder( + "docType", + queryRegistry, + new BytesArray("{}"), percolateSearcher, - queries, new MatchAllDocsQuery() ); // enables the optimization that prevents queries from being evaluated that don't match @@ -212,8 +193,10 @@ public class PercolatorQueryTests extends ESTestCase { TopDocs topDocs1 = shardSearcher.search(builder1.build(), 10); PercolatorQuery.Builder builder2 = new PercolatorQuery.Builder( + "docType", + queryRegistry, + new BytesArray("{}"), percolateSearcher, - queries, new MatchAllDocsQuery() ); TopDocs topDocs2 = shardSearcher.search(builder2.build(), 10); @@ -227,10 +210,11 @@ public class PercolatorQueryTests extends ESTestCase { } void addPercolatorQuery(String id, Query query, String... extraFields) throws IOException { - queries.put(new BytesRef(id), query); + queries.put(id, query); ParseContext.Document document = new ParseContext.Document(); - ExtractQueryTermsService.extractQueryTerms(query, document, EXTRACTED_TERMS_FIELD_NAME, UNKNOWN_QUERY_FIELD_NAME, EXTRACTED_TERMS_FIELD_TYPE); - document.add(new StoredField(UidFieldMapper.NAME, Uid.createUid(PercolatorService.TYPE_NAME, id))); + ExtractQueryTermsService.extractQueryTerms(query, document, EXTRACTED_TERMS_FIELD_NAME, UNKNOWN_QUERY_FIELD_NAME, + EXTRACTED_TERMS_FIELD_TYPE); + document.add(new StoredField(UidFieldMapper.NAME, Uid.createUid(PercolatorFieldMapper.TYPE_NAME, id))); assert extraFields.length % 2 == 0; for (int i = 0; i < extraFields.length; i++) { document.add(new StringField(extraFields[i], extraFields[++i], Field.Store.NO)); diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java index 110c93d429ce..f705db3a537a 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java @@ -47,7 +47,7 @@ public class QueryShardContextTests extends ESTestCase { MapperService mapperService = mock(MapperService.class); when(mapperService.getIndexSettings()).thenReturn(indexSettings); QueryShardContext context = new QueryShardContext( - indexSettings, null, null, mapperService, null, null, null + indexSettings, null, null, mapperService, null, null, null, null ); context.setAllowUnmappedFields(false); diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 2954558e73d4..c44a08a14376 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -85,6 +85,7 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; +import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.snapshots.IndexShardRepository; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; @@ -536,7 +537,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); - ShardStats stats = new ShardStats(shard.routingEntry(), shard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), shard, new CommonStatsFlags()), shard.commitStats()); + ShardStats stats = new ShardStats(shard.routingEntry(), shard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), test.cache().getPercolatorQueryCache(), shard, new CommonStatsFlags()), shard.commitStats()); assertEquals(shard.shardPath().getRootDataPath().toString(), stats.getDataPath()); assertEquals(shard.shardPath().getRootStatePath().toString(), stats.getStatePath()); assertEquals(shard.shardPath().isCustomDataPath(), stats.isCustomDataPath()); @@ -1160,7 +1161,11 @@ public class IndexShardTests extends ESSingleNodeTestCase { ShardRouting routing = new ShardRouting(shard.routingEntry()); shard.close("simon says", true); NodeServicesProvider indexServices = indexService.getIndexServices(); - IndexShard newShard = new IndexShard(shard.shardId(), indexService.getIndexSettings(), shard.shardPath(), shard.store(), indexService.cache(), indexService.mapperService(), indexService.similarityService(), indexService.fieldData(), shard.getEngineFactory(), indexService.getIndexEventListener(), wrapper, indexServices, indexService.getSearchSlowLog(), null, listeners); + IndexShard newShard = new IndexShard(shard.shardId(), indexService.getIndexSettings(), shard.shardPath(), + shard.store(), indexService.cache(), indexService.mapperService(), indexService.similarityService(), + indexService.fieldData(), shard.getEngineFactory(), indexService.getIndexEventListener(), wrapper, + indexServices, indexService.getSearchSlowLog(), null, listeners + ); ShardRoutingHelper.reinit(routing); newShard.updateRoutingEntry(routing, false); DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, Version.CURRENT); diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index 35bffcf5cb27..8ea053a64ab1 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -37,6 +37,7 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequestBuilder; import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder; +import org.elasticsearch.action.percolate.MultiPercolateResponse; import org.elasticsearch.action.percolate.PercolateRequestBuilder; import org.elasticsearch.action.percolate.PercolateSourceBuilder; import org.elasticsearch.action.search.MultiSearchRequestBuilder; @@ -63,6 +64,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; @@ -793,7 +795,13 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { if (requestBuilder instanceof MultiSearchRequestBuilder) { MultiSearchResponse multiSearchResponse = ((MultiSearchRequestBuilder) requestBuilder).get(); assertThat(multiSearchResponse.getResponses().length, equalTo(1)); + assertThat(multiSearchResponse.getResponses()[0].isFailure(), is(true)); assertThat(multiSearchResponse.getResponses()[0].getResponse(), nullValue()); + } else if (requestBuilder instanceof MultiPercolateRequestBuilder) { + MultiPercolateResponse multiPercolateResponse = ((MultiPercolateRequestBuilder) requestBuilder).get(); + assertThat(multiPercolateResponse.getItems().length, equalTo(1)); + assertThat(multiPercolateResponse.getItems()[0].isFailure(), is(true)); + assertThat(multiPercolateResponse.getItems()[0].getResponse(), nullValue()); } else { try { requestBuilder.get(); diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index fd8ee45a0625..b7cb64a7d2f0 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -661,7 +661,7 @@ public class IndexStatsIT extends ESIntegTestCase { public void testFlagOrdinalOrder() { Flag[] flags = new Flag[]{Flag.Store, Flag.Indexing, Flag.Get, Flag.Search, Flag.Merge, Flag.Flush, Flag.Refresh, - Flag.QueryCache, Flag.FieldData, Flag.Docs, Flag.Warmer, Flag.Percolate, Flag.Completion, Flag.Segments, + Flag.QueryCache, Flag.FieldData, Flag.Docs, Flag.Warmer, Flag.PercolatorCache, Flag.Completion, Flag.Segments, Flag.Translog, Flag.Suggest, Flag.RequestCache, Flag.Recovery}; assertThat(flags.length, equalTo(Flag.values().length)); @@ -902,7 +902,7 @@ public class IndexStatsIT extends ESIntegTestCase { case Warmer: builder.setWarmer(set); break; - case Percolate: + case PercolatorCache: builder.setPercolate(set); break; case Completion: @@ -953,8 +953,8 @@ public class IndexStatsIT extends ESIntegTestCase { return response.getStore() != null; case Warmer: return response.getWarmer() != null; - case Percolate: - return response.getPercolate() != null; + case PercolatorCache: + return response.getPercolatorCache() != null; case Completion: return response.getCompletion() != null; case Segments: diff --git a/core/src/test/java/org/elasticsearch/percolator/ConcurrentPercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/ConcurrentPercolatorIT.java deleted file mode 100644 index 1cf2ef035b9d..000000000000 --- a/core/src/test/java/org/elasticsearch/percolator/ConcurrentPercolatorIT.java +++ /dev/null @@ -1,394 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.percolator; - -import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.percolate.PercolateResponse; -import org.elasticsearch.action.percolate.PercolateSourceBuilder; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.test.ESIntegTestCase; - -import java.util.Random; -import java.util.Set; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Semaphore; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; - -import static org.elasticsearch.index.query.QueryBuilders.boolQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.percolator.PercolatorTestUtil.convertFromTextArray; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.arrayContaining; -import static org.hamcrest.Matchers.arrayContainingInAnyOrder; -import static org.hamcrest.Matchers.arrayWithSize; -import static org.hamcrest.Matchers.emptyArray; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.nullValue; - - -/** - * - */ -public class ConcurrentPercolatorIT extends ESIntegTestCase { - public void testSimpleConcurrentPercolator() throws Throwable { - // We need to index a document / define mapping, otherwise field1 doesn't get recognized as number field. - // If we don't do this, then 'test2' percolate query gets parsed as a TermQuery and not a RangeQuery. - // The percolate api doesn't parse the doc if no queries have registered, so it can't lazily create a mapping - assertAcked(prepareCreate("index").addMapping("type", "field1", "type=long", "field2", "type=text")); // random # shards better has a mapping! - ensureGreen(); - - final BytesReference onlyField1 = XContentFactory.jsonBuilder().startObject().startObject("doc") - .field("field1", 1) - .endObject().endObject().bytes(); - final BytesReference onlyField2 = XContentFactory.jsonBuilder().startObject().startObject("doc") - .field("field2", "value") - .endObject().endObject().bytes(); - final BytesReference bothFields = XContentFactory.jsonBuilder().startObject().startObject("doc") - .field("field1", 1) - .field("field2", "value") - .endObject().endObject().bytes(); - - client().prepareIndex("index", "type", "1").setSource(XContentFactory.jsonBuilder().startObject() - .field("field1", 1) - .field("field2", "value") - .endObject()).execute().actionGet(); - - client().prepareIndex("index", PercolatorService.TYPE_NAME, "test1") - .setSource(XContentFactory.jsonBuilder().startObject().field("query", termQuery("field2", "value")).endObject()) - .execute().actionGet(); - client().prepareIndex("index", PercolatorService.TYPE_NAME, "test2") - .setSource(XContentFactory.jsonBuilder().startObject().field("query", termQuery("field1", 1)).endObject()) - .execute().actionGet(); - refresh(); // make sure it's refreshed - - final CountDownLatch start = new CountDownLatch(1); - final AtomicBoolean stop = new AtomicBoolean(false); - final AtomicInteger counts = new AtomicInteger(0); - final AtomicReference exceptionHolder = new AtomicReference<>(); - Thread[] threads = new Thread[scaledRandomIntBetween(2, 5)]; - final int numberOfPercolations = scaledRandomIntBetween(1000, 10000); - - for (int i = 0; i < threads.length; i++) { - Runnable r = new Runnable() { - @Override - public void run() { - try { - start.await(); - while (!stop.get()) { - int count = counts.incrementAndGet(); - if ((count > numberOfPercolations)) { - stop.set(true); - } - PercolateResponse percolate; - if (count % 3 == 0) { - percolate = client().preparePercolate().setIndices("index").setDocumentType("type") - .setSource(bothFields) - .execute().actionGet(); - assertThat(percolate.getMatches(), arrayWithSize(2)); - assertThat(convertFromTextArray(percolate.getMatches(), "index"), arrayContainingInAnyOrder("test1", "test2")); - } else if (count % 3 == 1) { - percolate = client().preparePercolate().setIndices("index").setDocumentType("type") - .setSource(onlyField2) - .execute().actionGet(); - assertThat(percolate.getMatches(), arrayWithSize(1)); - assertThat(convertFromTextArray(percolate.getMatches(), "index"), arrayContaining("test1")); - } else { - percolate = client().preparePercolate().setIndices("index").setDocumentType("type") - .setSource(onlyField1) - .execute().actionGet(); - assertThat(percolate.getMatches(), arrayWithSize(1)); - assertThat(convertFromTextArray(percolate.getMatches(), "index"), arrayContaining("test2")); - } - } - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } catch (Throwable e) { - exceptionHolder.set(e); - Thread.currentThread().interrupt(); - } - } - }; - threads[i] = new Thread(r); - threads[i].start(); - } - - start.countDown(); - for (Thread thread : threads) { - thread.join(); - } - - Throwable assertionError = exceptionHolder.get(); - if (assertionError != null) { - throw assertionError; - } - } - - public void testConcurrentAddingAndPercolating() throws Exception { - assertAcked(prepareCreate("index").addMapping("type", "field1", "type=text", "field2", "type=text")); - ensureGreen(); - final int numIndexThreads = scaledRandomIntBetween(1, 3); - final int numPercolateThreads = scaledRandomIntBetween(2, 6); - final int numPercolatorOperationsPerThread = scaledRandomIntBetween(100, 1000); - - final Set exceptionsHolder = ConcurrentCollections.newConcurrentSet(); - final CountDownLatch start = new CountDownLatch(1); - final AtomicInteger runningPercolateThreads = new AtomicInteger(numPercolateThreads); - final AtomicInteger type1 = new AtomicInteger(); - final AtomicInteger type2 = new AtomicInteger(); - final AtomicInteger type3 = new AtomicInteger(); - - final AtomicInteger idGen = new AtomicInteger(); - - Thread[] indexThreads = new Thread[numIndexThreads]; - for (int i = 0; i < numIndexThreads; i++) { - final Random rand = new Random(getRandom().nextLong()); - Runnable r = new Runnable() { - @Override - public void run() { - try { - XContentBuilder onlyField1 = XContentFactory.jsonBuilder().startObject() - .field("query", termQuery("field1", "value")).endObject(); - XContentBuilder onlyField2 = XContentFactory.jsonBuilder().startObject() - .field("query", termQuery("field2", "value")).endObject(); - XContentBuilder field1And2 = XContentFactory.jsonBuilder().startObject() - .field("query", boolQuery().must(termQuery("field1", "value")).must(termQuery("field2", "value"))).endObject(); - - start.await(); - while (runningPercolateThreads.get() > 0) { - Thread.sleep(100); - int x = rand.nextInt(3); - String id = Integer.toString(idGen.incrementAndGet()); - IndexResponse response; - switch (x) { - case 0: - response = client().prepareIndex("index", PercolatorService.TYPE_NAME, id) - .setSource(onlyField1) - .setRefresh(true) - .execute().actionGet(); - type1.incrementAndGet(); - break; - case 1: - response = client().prepareIndex("index", PercolatorService.TYPE_NAME, id) - .setSource(onlyField2) - .setRefresh(true) - .execute().actionGet(); - type2.incrementAndGet(); - break; - case 2: - response = client().prepareIndex("index", PercolatorService.TYPE_NAME, id) - .setSource(field1And2) - .setRefresh(true) - .execute().actionGet(); - type3.incrementAndGet(); - break; - default: - throw new IllegalStateException("Illegal x=" + x); - } - assertThat(response.getId(), equalTo(id)); - assertThat(response.getVersion(), equalTo(1L)); - } - } catch (Throwable t) { - exceptionsHolder.add(t); - logger.error("Error in indexing thread...", t); - } - } - }; - indexThreads[i] = new Thread(r); - indexThreads[i].start(); - } - - Thread[] percolateThreads = new Thread[numPercolateThreads]; - for (int i = 0; i < numPercolateThreads; i++) { - final Random rand = new Random(getRandom().nextLong()); - Runnable r = new Runnable() { - @Override - public void run() { - try { - XContentBuilder onlyField1Doc = XContentFactory.jsonBuilder().startObject().startObject("doc") - .field("field1", "value") - .endObject().endObject(); - XContentBuilder onlyField2Doc = XContentFactory.jsonBuilder().startObject().startObject("doc") - .field("field2", "value") - .endObject().endObject(); - XContentBuilder field1AndField2Doc = XContentFactory.jsonBuilder().startObject().startObject("doc") - .field("field1", "value") - .field("field2", "value") - .endObject().endObject(); - start.await(); - for (int counter = 0; counter < numPercolatorOperationsPerThread; counter++) { - int x = rand.nextInt(3); - int atLeastExpected; - PercolateResponse response; - switch (x) { - case 0: - atLeastExpected = type1.get(); - response = client().preparePercolate().setIndices("index").setDocumentType("type") - .setSource(onlyField1Doc).execute().actionGet(); - assertNoFailures(response); - assertThat(response.getSuccessfulShards(), equalTo(response.getTotalShards())); - assertThat(response.getCount(), greaterThanOrEqualTo((long) atLeastExpected)); - break; - case 1: - atLeastExpected = type2.get(); - response = client().preparePercolate().setIndices("index").setDocumentType("type") - .setSource(onlyField2Doc).execute().actionGet(); - assertNoFailures(response); - assertThat(response.getSuccessfulShards(), equalTo(response.getTotalShards())); - assertThat(response.getCount(), greaterThanOrEqualTo((long) atLeastExpected)); - break; - case 2: - atLeastExpected = type3.get(); - response = client().preparePercolate().setIndices("index").setDocumentType("type") - .setSource(field1AndField2Doc).execute().actionGet(); - assertNoFailures(response); - assertThat(response.getSuccessfulShards(), equalTo(response.getTotalShards())); - assertThat(response.getCount(), greaterThanOrEqualTo((long) atLeastExpected)); - break; - } - } - } catch (Throwable t) { - exceptionsHolder.add(t); - logger.error("Error in percolate thread...", t); - } finally { - runningPercolateThreads.decrementAndGet(); - } - } - }; - percolateThreads[i] = new Thread(r); - percolateThreads[i].start(); - } - - start.countDown(); - for (Thread thread : indexThreads) { - thread.join(); - } - for (Thread thread : percolateThreads) { - thread.join(); - } - - for (Throwable t : exceptionsHolder) { - logger.error("Unexpected exception while indexing", t); - } - assertThat(exceptionsHolder.isEmpty(), equalTo(true)); - } - - public void testConcurrentAddingAndRemovingWhilePercolating() throws Exception { - assertAcked(prepareCreate("index").addMapping("type", "field1", "type=text")); - ensureGreen(); - final int numIndexThreads = scaledRandomIntBetween(1, 3); - final int numberPercolateOperation = scaledRandomIntBetween(10, 100); - - final AtomicReference exceptionHolder = new AtomicReference<>(null); - final AtomicInteger idGen = new AtomicInteger(0); - final Set liveIds = ConcurrentCollections.newConcurrentSet(); - final AtomicBoolean run = new AtomicBoolean(true); - Thread[] indexThreads = new Thread[numIndexThreads]; - final Semaphore semaphore = new Semaphore(numIndexThreads, true); - for (int i = 0; i < indexThreads.length; i++) { - final Random rand = new Random(getRandom().nextLong()); - Runnable r = new Runnable() { - @Override - public void run() { - try { - XContentBuilder doc = XContentFactory.jsonBuilder().startObject() - .field("query", termQuery("field1", "value")).endObject(); - outer: - while (run.get()) { - semaphore.acquire(); - try { - if (!liveIds.isEmpty() && rand.nextInt(100) < 19) { - String id; - do { - if (liveIds.isEmpty()) { - continue outer; - } - id = Integer.toString(randomInt(idGen.get())); - } while (!liveIds.remove(id)); - - DeleteResponse response = client().prepareDelete("index", PercolatorService.TYPE_NAME, id) - .setRefresh(true) - .execute().actionGet(); - assertThat(response.getId(), equalTo(id)); - assertThat("doc[" + id + "] should have been deleted, but isn't", response.isFound(), equalTo(true)); - } else { - String id = Integer.toString(idGen.getAndIncrement()); - IndexResponse response = client().prepareIndex("index", PercolatorService.TYPE_NAME, id) - .setSource(doc) - .setRefresh(true) - .execute().actionGet(); - liveIds.add(id); - assertThat(response.isCreated(), equalTo(true)); // We only add new docs - assertThat(response.getId(), equalTo(id)); - } - } finally { - semaphore.release(); - } - } - } catch (InterruptedException iex) { - logger.error("indexing thread was interrupted...", iex); - run.set(false); - } catch (Throwable t) { - run.set(false); - exceptionHolder.set(t); - logger.error("Error in indexing thread...", t); - } - } - }; - indexThreads[i] = new Thread(r); - indexThreads[i].start(); - } - - String percolateDoc = XContentFactory.jsonBuilder().startObject() - .field("field1", "value") - .endObject().string(); - for (int counter = 0; counter < numberPercolateOperation; counter++) { - Thread.sleep(5); - semaphore.acquire(numIndexThreads); - try { - if (!run.get()) { - break; - } - int atLeastExpected = liveIds.size(); - PercolateResponse response = client().preparePercolate().setIndices("index").setDocumentType("type") - .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(percolateDoc)) - .setSize(atLeastExpected) - .get(); - assertThat(response.getShardFailures(), emptyArray()); - assertThat(response.getSuccessfulShards(), equalTo(response.getTotalShards())); - assertThat(response.getMatches().length, equalTo(atLeastExpected)); - } finally { - semaphore.release(numIndexThreads); - } - } - run.set(false); - for (Thread thread : indexThreads) { - thread.join(); - } - assertThat("exceptionHolder should have been empty, but holds: " + exceptionHolder.toString(), exceptionHolder.get(), nullValue()); - } - -} diff --git a/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java index 6dfcc5a878ac..f51180f5a5a6 100644 --- a/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java @@ -24,8 +24,10 @@ import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder; import org.elasticsearch.action.percolate.MultiPercolateResponse; import org.elasticsearch.action.percolate.PercolateSourceBuilder; import org.elasticsearch.client.Requests; +import org.elasticsearch.common.compress.NotXContentException; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESIntegTestCase; @@ -49,6 +51,7 @@ import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -60,19 +63,19 @@ public class MultiPercolatorIT extends ESIntegTestCase { ensureGreen(); logger.info("--> register a queries"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "2") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "3") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(matchQuery("field1", "b")) .must(matchQuery("field1", "c")) ).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "4") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); refresh(); @@ -122,7 +125,7 @@ public class MultiPercolatorIT extends ESIntegTestCase { item = response.getItems()[4]; assertThat(item.getResponse(), nullValue()); assertThat(item.getErrorMessage(), notNullValue()); - assertThat(item.getErrorMessage(), containsString("document missing")); + assertThat(item.getErrorMessage(), containsString("[test/type/5] doesn't exist")); } public void testWithRouting() throws Exception { @@ -130,22 +133,22 @@ public class MultiPercolatorIT extends ESIntegTestCase { ensureGreen(); logger.info("--> register a queries"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") .setRouting("a") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "2") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") .setRouting("a") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "3") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") .setRouting("a") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(matchQuery("field1", "b")) .must(matchQuery("field1", "c")) ).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "4") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") .setRouting("a") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); @@ -201,7 +204,7 @@ public class MultiPercolatorIT extends ESIntegTestCase { item = response.getItems()[4]; assertThat(item.getResponse(), nullValue()); assertThat(item.getErrorMessage(), notNullValue()); - assertThat(item.getErrorMessage(), containsString("document missing")); + assertThat(item.getErrorMessage(), containsString("[test/type/5] doesn't exist")); } public void testExistingDocsOnly() throws Exception { @@ -210,7 +213,7 @@ public class MultiPercolatorIT extends ESIntegTestCase { int numQueries = randomIntBetween(50, 100); logger.info("--> register a queries"); for (int i = 0; i < numQueries; i++) { - client().prepareIndex("test", PercolatorService.TYPE_NAME, Integer.toString(i)) + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, Integer.toString(i)) .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); } @@ -253,7 +256,7 @@ public class MultiPercolatorIT extends ESIntegTestCase { assertThat(response.items().length, equalTo(numPercolateRequest)); for (MultiPercolateResponse.Item item : response) { assertThat(item.isFailure(), equalTo(true)); - assertThat(item.getErrorMessage(), containsString("document missing")); + assertThat(item.getErrorMessage(), containsString("doesn't exist")); assertThat(item.getResponse(), nullValue()); } @@ -283,12 +286,10 @@ public class MultiPercolatorIT extends ESIntegTestCase { createIndex("test"); ensureGreen(); - NumShards test = getNumShards("test"); - int numQueries = randomIntBetween(50, 100); logger.info("--> register a queries"); for (int i = 0; i < numQueries; i++) { - client().prepareIndex("test", PercolatorService.TYPE_NAME, Integer.toString(i)) + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, Integer.toString(i)) .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); } @@ -324,13 +325,8 @@ public class MultiPercolatorIT extends ESIntegTestCase { response = builder.execute().actionGet(); assertThat(response.items().length, equalTo(numPercolateRequest)); for (MultiPercolateResponse.Item item : response) { - assertThat(item.isFailure(), equalTo(false)); - assertThat(item.getResponse().getSuccessfulShards(), equalTo(0)); - assertThat(item.getResponse().getShardFailures().length, equalTo(test.numPrimaries)); - for (ShardOperationFailedException shardFailure : item.getResponse().getShardFailures()) { - assertThat(shardFailure.reason(), containsString("Failed to derive xcontent")); - assertThat(shardFailure.status().getStatus(), equalTo(400)); - } + assertThat(item.isFailure(), equalTo(true)); + assertThat(item.getFailure(), notNullValue()); } // one valid request @@ -402,7 +398,7 @@ public class MultiPercolatorIT extends ESIntegTestCase { assertAcked(client().admin().indices().prepareCreate("nestedindex").addMapping("company", mapping)); ensureGreen("nestedindex"); - client().prepareIndex("nestedindex", PercolatorService.TYPE_NAME, "Q").setSource(jsonBuilder().startObject() + client().prepareIndex("nestedindex", PercolatorFieldMapper.TYPE_NAME, "Q").setSource(jsonBuilder().startObject() .field("query", QueryBuilders.nestedQuery("employee", QueryBuilders.matchQuery("employee.name", "virginia potts").operator(Operator.AND)).scoreMode(ScoreMode.Avg)).endObject()).get(); refresh(); diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java b/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java deleted file mode 100644 index 854a25358e71..000000000000 --- a/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.percolator; - -import org.apache.lucene.index.Term; -import org.apache.lucene.search.TermQuery; -import org.elasticsearch.Version; -import org.elasticsearch.action.percolate.PercolateShardRequest; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AnalysisService; -import org.elasticsearch.index.analysis.AnalyzerProvider; -import org.elasticsearch.index.analysis.CharFilterFactory; -import org.elasticsearch.index.analysis.TokenFilterFactory; -import org.elasticsearch.index.analysis.TokenizerFactory; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.query.QueryParser; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.query.TermQueryParser; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.similarity.SimilarityService; -import org.elasticsearch.indices.IndicesModule; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.aggregations.AggregationBinaryParseElement; -import org.elasticsearch.search.aggregations.AggregationParseElement; -import org.elasticsearch.search.aggregations.AggregationPhase; -import org.elasticsearch.search.aggregations.AggregatorParsers; -import org.elasticsearch.search.highlight.HighlightPhase; -import org.elasticsearch.search.highlight.Highlighters; -import org.elasticsearch.search.sort.SortParseElement; -import org.elasticsearch.test.ESTestCase; -import org.junit.Before; -import org.mockito.Mockito; - -import java.util.Collections; -import java.util.Map; - -import static java.util.Collections.singletonMap; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; - -public class PercolateDocumentParserTests extends ESTestCase { - - private MapperService mapperService; - private PercolateDocumentParser parser; - private QueryShardContext queryShardContext; - private PercolateShardRequest request; - - @Before - public void init() { - IndexSettings indexSettings = new IndexSettings(new IndexMetaData.Builder("_index").settings( - Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) - .build(), Settings.EMPTY); - AnalysisService analysisService = new AnalysisService(indexSettings, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); - IndicesModule indicesModule = new IndicesModule(); - mapperService = new MapperService(indexSettings, analysisService, new SimilarityService(indexSettings, Collections.emptyMap()), indicesModule.getMapperRegistry(), () -> null); - - Map> parsers = singletonMap("term", new TermQueryParser()); - IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(indexSettings.getSettings(), parsers); - - queryShardContext = new QueryShardContext(indexSettings, null, null, mapperService, null, null, indicesQueriesRegistry); - - HighlightPhase highlightPhase = new HighlightPhase(Settings.EMPTY, new Highlighters()); - AggregatorParsers aggregatorParsers = new AggregatorParsers(Collections.emptySet(), Collections.emptySet(), - new NamedWriteableRegistry()); - AggregationPhase aggregationPhase = new AggregationPhase(new AggregationParseElement(aggregatorParsers, indicesQueriesRegistry), - new AggregationBinaryParseElement(aggregatorParsers, indicesQueriesRegistry)); - parser = new PercolateDocumentParser(highlightPhase, new SortParseElement(), aggregationPhase); - - request = Mockito.mock(PercolateShardRequest.class); - Mockito.when(request.shardId()).thenReturn(new ShardId("_index", "_na_", 0)); - Mockito.when(request.documentType()).thenReturn("type"); - } - - public void testParseDoc() throws Exception { - XContentBuilder source = jsonBuilder().startObject() - .startObject("doc") - .field("field1", "value1") - .endObject() - .endObject(); - Mockito.when(request.source()).thenReturn(source.bytes()); - - PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService, queryShardContext); - ParsedDocument parsedDocument = parser.parse(request, context, mapperService); - assertThat(parsedDocument.rootDoc().get("field1"), equalTo("value1")); - } - - public void testParseDocAndOtherOptions() throws Exception { - XContentBuilder source = jsonBuilder().startObject() - .startObject("doc") - .field("field1", "value1") - .endObject() - .startObject("query") - .startObject("term").field("field1", "value1").endObject() - .endObject() - .field("track_scores", true) - .field("size", 123) - .startObject("sort").startObject("_score").endObject().endObject() - .endObject(); - Mockito.when(request.source()).thenReturn(source.bytes()); - - PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService, queryShardContext); - ParsedDocument parsedDocument = parser.parse(request, context, mapperService); - assertThat(parsedDocument.rootDoc().get("field1"), equalTo("value1")); - assertThat(context.percolateQuery(), equalTo(new TermQuery(new Term("field1", "value1")))); - assertThat(context.trackScores(), is(true)); - assertThat(context.size(), is(123)); - assertThat(context.sort(), nullValue()); - } - - public void testParseDocSource() throws Exception { - XContentBuilder source = jsonBuilder().startObject() - .startObject("query") - .startObject("term").field("field1", "value1").endObject() - .endObject() - .field("track_scores", true) - .field("size", 123) - .startObject("sort").startObject("_score").endObject().endObject() - .endObject(); - XContentBuilder docSource = jsonBuilder().startObject() - .field("field1", "value1") - .endObject(); - Mockito.when(request.source()).thenReturn(source.bytes()); - Mockito.when(request.docSource()).thenReturn(docSource.bytes()); - - PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService, queryShardContext); - ParsedDocument parsedDocument = parser.parse(request, context, mapperService); - assertThat(parsedDocument.rootDoc().get("field1"), equalTo("value1")); - assertThat(context.percolateQuery(), equalTo(new TermQuery(new Term("field1", "value1")))); - assertThat(context.trackScores(), is(true)); - assertThat(context.size(), is(123)); - assertThat(context.sort(), nullValue()); - } - - public void testParseDocSourceAndSource() throws Exception { - XContentBuilder source = jsonBuilder().startObject() - .startObject("doc") - .field("field1", "value1") - .endObject() - .startObject("query") - .startObject("term").field("field1", "value1").endObject() - .endObject() - .field("track_scores", true) - .field("size", 123) - .startObject("sort").startObject("_score").endObject().endObject() - .endObject(); - XContentBuilder docSource = jsonBuilder().startObject() - .field("field1", "value1") - .endObject(); - Mockito.when(request.source()).thenReturn(source.bytes()); - Mockito.when(request.docSource()).thenReturn(docSource.bytes()); - - PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService, queryShardContext); - try { - parser.parse(request, context, mapperService); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Can't specify the document to percolate in the source of the request and as document id")); - } - } - -} diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java index f07e50b1dfcd..8393c80786c0 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java @@ -21,6 +21,7 @@ package org.elasticsearch.percolator; import org.elasticsearch.action.percolate.PercolateRequestBuilder; import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -69,7 +70,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { String value = values[i % numUniqueQueries]; expectedCount[i % numUniqueQueries]++; QueryBuilder queryBuilder = matchQuery("field1", value); - client().prepareIndex("test", PercolatorService.TYPE_NAME, Integer.toString(i)) + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, Integer.toString(i)) .setSource(jsonBuilder().startObject().field("query", queryBuilder).field("field2", "b").endObject()).execute() .actionGet(); } @@ -134,7 +135,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { String value = values[i % numUniqueQueries]; expectedCount[i % numUniqueQueries]++; QueryBuilder queryBuilder = matchQuery("field1", value); - client().prepareIndex("test", PercolatorService.TYPE_NAME, Integer.toString(i)) + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, Integer.toString(i)) .setSource(jsonBuilder().startObject().field("query", queryBuilder).field("field2", "b").endObject()).execute() .actionGet(); } @@ -212,7 +213,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { for (int i = 0; i < numQueries; i++) { String value = "value0"; QueryBuilder queryBuilder = matchQuery("field1", value); - client().prepareIndex("test", PercolatorService.TYPE_NAME, Integer.toString(i)) + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, Integer.toString(i)) .setSource(jsonBuilder().startObject().field("query", queryBuilder).field("field2", i % 3 == 0 ? "b" : "a").endObject()) .execute() .actionGet(); diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java index 5945a21dc977..f6f5d260550f 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java @@ -20,11 +20,9 @@ package org.elasticsearch.percolator; import com.vividsolutions.jts.geom.Coordinate; import org.apache.lucene.search.join.ScoreMode; -import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; -import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.action.percolate.PercolateSourceBuilder; @@ -38,17 +36,15 @@ import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.engine.DocumentMissingException; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.functionscore.weight.WeightBuilder; import org.elasticsearch.index.query.support.QueryInnerHits; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.highlight.HighlightBuilder; -import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; @@ -83,7 +79,6 @@ import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.fieldValueFactorFunction; import static org.elasticsearch.percolator.PercolatorTestUtil.convertFromTextArray; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertMatchCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; @@ -94,10 +89,7 @@ import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; /** @@ -113,19 +105,19 @@ public class PercolatorIT extends ESIntegTestCase { client().prepareIndex("test", "type", "1").setSource("field1", "value").execute().actionGet(); logger.info("--> register a queries"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "2") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "3") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(matchQuery("field1", "b")) .must(matchQuery("field1", "c")) ).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "4") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); refresh(); @@ -173,7 +165,8 @@ public class PercolatorIT extends ESIntegTestCase { .setGetRequest(Requests.getRequest("test").type("type").id("5")) .execute().actionGet(); fail("Exception should have been thrown"); - } catch (DocumentMissingException e) { + } catch (ResourceNotFoundException e) { + assertThat(e.getMessage(), equalTo("percolate document [test/type/5] doesn't exist")); } } @@ -194,7 +187,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getMatches(), emptyArray()); // add first query... - client().prepareIndex("test", PercolatorService.TYPE_NAME, "test1") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "test1") .setSource(XContentFactory.jsonBuilder().startObject().field("query", termQuery("field2", "value")).endObject()) .execute().actionGet(); refresh(); @@ -207,7 +200,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("test1")); // add second query... - client().prepareIndex("test", PercolatorService.TYPE_NAME, "test2") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "test2") .setSource(XContentFactory.jsonBuilder().startObject().field("query", termQuery("field1", 1)).endObject()) .execute().actionGet(); refresh(); @@ -221,7 +214,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("test1", "test2")); - client().prepareDelete("test", PercolatorService.TYPE_NAME, "test2").execute().actionGet(); + client().prepareDelete("test", PercolatorFieldMapper.TYPE_NAME, "test2").execute().actionGet(); refresh(); response = client().preparePercolate() .setIndices("test").setDocumentType("type1") @@ -239,7 +232,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> register a queries"); for (int i = 1; i <= 100; i++) { - client().prepareIndex("test", PercolatorService.TYPE_NAME, Integer.toString(i)) + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, Integer.toString(i)) .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .setRouting(Integer.toString(i % 2)) .execute().actionGet(); @@ -282,7 +275,7 @@ public class PercolatorIT extends ESIntegTestCase { client().prepareIndex("my-queries-index", "test", "1").setSource("field1", "value1").execute().actionGet(); logger.info("--> register a query"); - client().prepareIndex("my-queries-index", PercolatorService.TYPE_NAME, "kuku1") + client().prepareIndex("my-queries-index", PercolatorFieldMapper.TYPE_NAME, "kuku1") .setSource(jsonBuilder().startObject() .field("color", "blue") .field("query", termQuery("field1", "value1")) @@ -296,7 +289,7 @@ public class PercolatorIT extends ESIntegTestCase { client().prepareIndex("my-queries-index", "test", "1").setSource("field1", "value1").execute().actionGet(); logger.info("--> register a query"); - client().prepareIndex("my-queries-index", PercolatorService.TYPE_NAME, "kuku2") + client().prepareIndex("my-queries-index", PercolatorFieldMapper.TYPE_NAME, "kuku2") .setSource(jsonBuilder().startObject() .field("color", "blue") .field("query", termQuery("field1", "value1")) @@ -323,7 +316,7 @@ public class PercolatorIT extends ESIntegTestCase { ensureGreen(); logger.info("--> register a query"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject() .field("source", "productizer") .field("query", QueryBuilders.constantScoreQuery(QueryBuilders.queryStringQuery("filingcategory:s"))) @@ -351,7 +344,7 @@ public class PercolatorIT extends ESIntegTestCase { ensureGreen(); logger.info("--> register a query"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "kuku") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "kuku") .setSource(jsonBuilder().startObject() .field("color", "blue") .field("query", termQuery("field1", "value1")) @@ -359,7 +352,7 @@ public class PercolatorIT extends ESIntegTestCase { .execute().actionGet(); refresh(); SearchResponse countResponse = client().prepareSearch().setSize(0) - .setQuery(matchAllQuery()).setTypes(PercolatorService.TYPE_NAME) + .setQuery(matchAllQuery()).setTypes(PercolatorFieldMapper.TYPE_NAME) .execute().actionGet(); assertThat(countResponse.getHits().totalHits(), equalTo(1L)); @@ -388,7 +381,7 @@ public class PercolatorIT extends ESIntegTestCase { client().admin().indices().prepareDelete("test").execute().actionGet(); logger.info("--> make sure percolated queries for it have been deleted as well"); countResponse = client().prepareSearch().setSize(0) - .setQuery(matchAllQuery()).setTypes(PercolatorService.TYPE_NAME) + .setQuery(matchAllQuery()).setTypes(PercolatorFieldMapper.TYPE_NAME) .execute().actionGet(); assertHitCount(countResponse, 0L); } @@ -398,7 +391,7 @@ public class PercolatorIT extends ESIntegTestCase { ensureGreen(); logger.info("--> register a query 1"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "kuku") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "kuku") .setSource(jsonBuilder().startObject() .field("color", "blue") .field("query", termQuery("field1", "value1")) @@ -407,7 +400,7 @@ public class PercolatorIT extends ESIntegTestCase { .execute().actionGet(); logger.info("--> register a query 2"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "bubu") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "bubu") .setSource(jsonBuilder().startObject() .field("color", "green") .field("query", termQuery("field1", "value2")) @@ -441,7 +434,7 @@ public class PercolatorIT extends ESIntegTestCase { ensureGreen(); logger.info("--> register a query 1"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "kuku") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "kuku") .setSource(jsonBuilder().startObject() .field("color", "blue") .field("query", termQuery("field1", "value1")) @@ -458,7 +451,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(convertFromTextArray(percolate.getMatches(), "test"), arrayContaining("kuku")); logger.info("--> register a query 2"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "bubu") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "bubu") .setSource(jsonBuilder().startObject() .field("color", "green") .field("query", termQuery("field1", "value2")) @@ -475,7 +468,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(convertFromTextArray(percolate.getMatches(), "test"), arrayContaining("bubu")); logger.info("--> register a query 3"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "susu") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "susu") .setSource(jsonBuilder().startObject() .field("color", "red") .field("query", termQuery("field1", "value2")) @@ -495,7 +488,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(convertFromTextArray(percolate.getMatches(), "test"), arrayContaining("susu")); logger.info("--> deleting query 1"); - client().prepareDelete("test", PercolatorService.TYPE_NAME, "kuku").setRefresh(true).execute().actionGet(); + client().prepareDelete("test", PercolatorFieldMapper.TYPE_NAME, "kuku").setRefresh(true).execute().actionGet(); percolate = client().preparePercolate() .setIndices("test").setDocumentType("type1") @@ -512,7 +505,10 @@ public class PercolatorIT extends ESIntegTestCase { ensureGreen(); logger.info("--> register a query"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) + .execute().actionGet(); + client().prepareIndex("test2", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); refresh(); @@ -528,67 +524,7 @@ public class PercolatorIT extends ESIntegTestCase { NumShards numShards = getNumShards("test"); IndicesStatsResponse indicesResponse = client().admin().indices().prepareStats("test").execute().actionGet(); - assertThat(indicesResponse.getTotal().getPercolate().getCount(), equalTo((long) numShards.numPrimaries)); - assertThat(indicesResponse.getTotal().getPercolate().getCurrent(), equalTo(0L)); - assertThat(indicesResponse.getTotal().getPercolate().getNumQueries(), equalTo((long)numShards.dataCopies)); //number of copies - assertThat(indicesResponse.getTotal().getPercolate().getMemorySizeInBytes(), equalTo(-1L)); - - NodesStatsResponse nodesResponse = client().admin().cluster().prepareNodesStats().execute().actionGet(); - long percolateCount = 0; - for (NodeStats nodeStats : nodesResponse) { - percolateCount += nodeStats.getIndices().getPercolate().getCount(); - } - assertThat(percolateCount, equalTo((long) numShards.numPrimaries)); - - logger.info("--> Second percolate request"); - response = client().preparePercolate() - .setIndices("test").setDocumentType("type") - .setSource(jsonBuilder().startObject().startObject("doc").field("field", "val").endObject().endObject()) - .execute().actionGet(); - assertMatchCount(response, 1L); - assertThat(response.getMatches(), arrayWithSize(1)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("1")); - - indicesResponse = client().admin().indices().prepareStats().setPercolate(true).execute().actionGet(); - assertThat(indicesResponse.getTotal().getPercolate().getCount(), equalTo((long) numShards.numPrimaries * 2)); - assertThat(indicesResponse.getTotal().getPercolate().getCurrent(), equalTo(0L)); - assertThat(indicesResponse.getTotal().getPercolate().getNumQueries(), equalTo((long)numShards.dataCopies)); //number of copies - assertThat(indicesResponse.getTotal().getPercolate().getMemorySizeInBytes(), equalTo(-1L)); - - percolateCount = 0; - nodesResponse = client().admin().cluster().prepareNodesStats().execute().actionGet(); - for (NodeStats nodeStats : nodesResponse) { - percolateCount += nodeStats.getIndices().getPercolate().getCount(); - } - assertThat(percolateCount, equalTo((long) numShards.numPrimaries *2)); - - // We might be faster than 1 ms, so run upto 1000 times until have spend 1ms or more on percolating - boolean moreThanOneMs = false; - int counter = 3; // We already ran two times. - do { - indicesResponse = client().admin().indices().prepareStats("test").execute().actionGet(); - if (indicesResponse.getTotal().getPercolate().getTimeInMillis() > 0) { - moreThanOneMs = true; - break; - } - - logger.info("--> {}th percolate request", counter); - response = client().preparePercolate() - .setIndices("test").setDocumentType("type") - .setSource(jsonBuilder().startObject().startObject("doc").field("field", "val").endObject().endObject()) - .execute().actionGet(); - assertThat(response.getMatches(), arrayWithSize(1)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("1")); - } while (++counter <= 1000); - assertTrue("Something is off, we should have spent at least 1ms on percolating...", moreThanOneMs); - - long percolateSumTime = 0; - nodesResponse = client().admin().cluster().prepareNodesStats().execute().actionGet(); - for (NodeStats nodeStats : nodesResponse) { - percolateCount += nodeStats.getIndices().getPercolate().getCount(); - percolateSumTime += nodeStats.getIndices().getPercolate().getTimeInMillis(); - } - assertThat(percolateSumTime, greaterThan(0L)); + assertThat(indicesResponse.getTotal().getPercolatorCache().getNumQueries(), equalTo((long)numShards.dataCopies)); // number of copies } public void testPercolatingExistingDocs() throws Exception { @@ -602,19 +538,19 @@ public class PercolatorIT extends ESIntegTestCase { client().prepareIndex("test", "type", "4").setSource("field1", "d").execute().actionGet(); logger.info("--> register a queries"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "2") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "3") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(matchQuery("field1", "b")) .must(matchQuery("field1", "c")) ).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "4") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); refresh(); @@ -667,19 +603,19 @@ public class PercolatorIT extends ESIntegTestCase { client().prepareIndex("test", "type", "4").setSource("field1", "d").setRouting("1").execute().actionGet(); logger.info("--> register a queries"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "2") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "3") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(matchQuery("field1", "b")) .must(matchQuery("field1", "c")) ).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "4") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); refresh(); @@ -732,19 +668,19 @@ public class PercolatorIT extends ESIntegTestCase { client().prepareIndex("test", "type", "4").setSource("field1", "d").execute().actionGet(); logger.info("--> registering queries"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "2") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "3") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(matchQuery("field1", "b")) .must(matchQuery("field1", "c")) ).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "4") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); refresh(); @@ -788,7 +724,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> registering queries"); for (int i = 1; i <= 10; i++) { String index = i % 2 == 0 ? "test1" : "test2"; - client().prepareIndex(index, PercolatorService.TYPE_NAME, Integer.toString(i)) + client().prepareIndex(index, PercolatorFieldMapper.TYPE_NAME, Integer.toString(i)) .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); } @@ -861,15 +797,15 @@ public class PercolatorIT extends ESIntegTestCase { public void testPercolateWithAliasFilter() throws Exception { assertAcked(prepareCreate("my-index") - .addMapping(PercolatorService.TYPE_NAME, "a", "type=keyword") + .addMapping(PercolatorFieldMapper.TYPE_NAME, "a", "type=keyword") .addAlias(new Alias("a").filter(QueryBuilders.termQuery("a", "a"))) .addAlias(new Alias("b").filter(QueryBuilders.termQuery("a", "b"))) .addAlias(new Alias("c").filter(QueryBuilders.termQuery("a", "c"))) ); - client().prepareIndex("my-index", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("my-index", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).field("a", "a").endObject()) .get(); - client().prepareIndex("my-index", PercolatorService.TYPE_NAME, "2") + client().prepareIndex("my-index", PercolatorFieldMapper.TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).field("a", "b").endObject()) .get(); refresh(); @@ -944,19 +880,19 @@ public class PercolatorIT extends ESIntegTestCase { client().prepareIndex("test", "type", "1").setSource("field1", "value").execute().actionGet(); logger.info("--> register a queries"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "2") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "3") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(matchQuery("field1", "b")) .must(matchQuery("field1", "c")) ).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "4") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); refresh(); @@ -1000,7 +936,8 @@ public class PercolatorIT extends ESIntegTestCase { .setGetRequest(Requests.getRequest("test").type("type").id("5")) .execute().actionGet(); fail("Exception should have been thrown"); - } catch (DocumentMissingException e) { + } catch (ResourceNotFoundException e) { + assertThat(e.getMessage(), equalTo("percolate document [test/type/5] doesn't exist")); } } @@ -1015,19 +952,19 @@ public class PercolatorIT extends ESIntegTestCase { client().prepareIndex("test", "type", "4").setSource("field1", "d").execute().actionGet(); logger.info("--> register a queries"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "2") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "3") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(matchQuery("field1", "b")) .must(matchQuery("field1", "c")) ).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "4") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); refresh(); @@ -1075,7 +1012,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> register {} queries", totalQueries); for (int level = 1; level <= numLevels; level++) { for (int query = 1; query <= numQueriesPerLevel; query++) { - client().prepareIndex("my-index", PercolatorService.TYPE_NAME, level + "-" + query) + client().prepareIndex("my-index", PercolatorFieldMapper.TYPE_NAME, level + "-" + query) .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).field("level", level).endObject()) .execute().actionGet(); } @@ -1169,7 +1106,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> register {} queries", numQueries); for (int i = 0; i < numQueries; i++) { int value = randomInt(10); - client().prepareIndex("my-index", PercolatorService.TYPE_NAME, Integer.toString(i)) + client().prepareIndex("my-index", PercolatorFieldMapper.TYPE_NAME, Integer.toString(i)) .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).field("level", i).field("field1", value).endObject()) .execute().actionGet(); if (!controlMap.containsKey(value)) { @@ -1250,10 +1187,10 @@ public class PercolatorIT extends ESIntegTestCase { createIndex("my-index"); ensureGreen(); - client().prepareIndex("my-index", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("my-index", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).field("level", 1).endObject()) .execute().actionGet(); - client().prepareIndex("my-index", PercolatorService.TYPE_NAME, "2") + client().prepareIndex("my-index", PercolatorFieldMapper.TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).field("level", 2).endObject()) .execute().actionGet(); refresh(); @@ -1271,33 +1208,6 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getMatches()[1].getScore(), equalTo(1f)); } - public void testPercolateSortingUnsupportedField() throws Exception { - client().admin().indices().prepareCreate("my-index") - .addMapping("my-type", "field", "type=text") - .addMapping(PercolatorService.TYPE_NAME, "level", "type=integer", "query", "type=object,enabled=false") - .get(); - ensureGreen(); - - client().prepareIndex("my-index", PercolatorService.TYPE_NAME, "1") - .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).field("level", 1).endObject()) - .get(); - client().prepareIndex("my-index", PercolatorService.TYPE_NAME, "2") - .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).field("level", 2).endObject()) - .get(); - refresh(); - - PercolateResponse response = client().preparePercolate().setIndices("my-index").setDocumentType("my-type") - .setSize(2) - .setPercolateDoc(docBuilder().setDoc("field", "value")) - .setPercolateQuery(QueryBuilders.functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("level"))) - .addSort(SortBuilders.fieldSort("level")) - .get(); - - assertThat(response.getShardFailures().length, equalTo(getNumShards("my-index").numPrimaries)); - assertThat(response.getShardFailures()[0].status(), equalTo(RestStatus.BAD_REQUEST)); - assertThat(response.getShardFailures()[0].reason(), containsString("Only _score desc is supported")); - } - public void testPercolateOnEmptyIndex() throws Exception { client().admin().indices().prepareCreate("my-index").execute().actionGet(); ensureGreen(); @@ -1306,7 +1216,7 @@ public class PercolatorIT extends ESIntegTestCase { .setSortByScore(true) .setSize(2) .setPercolateDoc(docBuilder().setDoc("field", "value")) - .setPercolateQuery(QueryBuilders.functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("level"))) + .setPercolateQuery(QueryBuilders.functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("level").missing(0.0))) .execute().actionGet(); assertMatchCount(response, 0L); } @@ -1322,19 +1232,19 @@ public class PercolatorIT extends ESIntegTestCase { assertAcked(prepareCreate("test").addMapping("type", "field1", fieldMapping.toString())); logger.info("--> register a queries"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "brown fox")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "2") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "lazy dog")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "3") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") .setSource(jsonBuilder().startObject().field("query", termQuery("field1", "jumps")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "4") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") .setSource(jsonBuilder().startObject().field("query", termQuery("field1", "dog")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "5") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "5") .setSource(jsonBuilder().startObject().field("query", termQuery("field1", "fox")).endObject()) .execute().actionGet(); refresh(); @@ -1525,7 +1435,7 @@ public class PercolatorIT extends ESIntegTestCase { ensureGreen(); logger.info("--> register a query"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject() .field("query", QueryBuilders.constantScoreQuery(QueryBuilders.boolQuery() .must(QueryBuilders.queryStringQuery("root")) @@ -1579,10 +1489,10 @@ public class PercolatorIT extends ESIntegTestCase { client().admin().indices().prepareCreate("test").execute().actionGet(); ensureGreen(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .get(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .get(); refresh(); @@ -1602,7 +1512,7 @@ public class PercolatorIT extends ESIntegTestCase { ensureGreen(); try { - client().prepareIndex("test", PercolatorService.TYPE_NAME) + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME) .setSource(jsonBuilder().startObject().field("query", termQuery("field1", "value")).endObject()) .get(); fail(); @@ -1611,7 +1521,7 @@ public class PercolatorIT extends ESIntegTestCase { } try { - client().prepareIndex("test", PercolatorService.TYPE_NAME) + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME) .setSource(jsonBuilder().startObject().field("query", rangeQuery("field1").from(0).to(1)).endObject()) .get(); fail(); @@ -1626,10 +1536,10 @@ public class PercolatorIT extends ESIntegTestCase { .get(); ensureGreen(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", rangeQuery("timestamp").from("now-1d").to("now")).endObject()) .get(); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "2") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", constantScoreQuery(rangeQuery("timestamp").from("now-1d").to("now"))).endObject()) .get(); refresh(); @@ -1654,7 +1564,7 @@ public class PercolatorIT extends ESIntegTestCase { assertAcked(client().admin().indices().prepareCreate("nestedindex").addMapping("company", mapping)); ensureGreen("nestedindex"); - client().prepareIndex("nestedindex", PercolatorService.TYPE_NAME, "Q").setSource(jsonBuilder().startObject() + client().prepareIndex("nestedindex", PercolatorFieldMapper.TYPE_NAME, "Q").setSource(jsonBuilder().startObject() .field("query", QueryBuilders.nestedQuery("employee", QueryBuilders.matchQuery("employee.name", "virginia potts").operator(Operator.AND)).scoreMode(ScoreMode.Avg)).endObject()).get(); refresh(); @@ -1783,12 +1693,12 @@ public class PercolatorIT extends ESIntegTestCase { "}"; assertAcked(client().admin().indices().prepareCreate("test").addMapping("doc", mapping)); ensureGreen("test"); - client().prepareIndex("test", PercolatorService.TYPE_NAME).setSource(q1).setId("q1").get(); - client().prepareIndex("test", PercolatorService.TYPE_NAME).setSource(q2).setId("q2").get(); - client().prepareIndex("test", PercolatorService.TYPE_NAME).setSource(q3).setId("q3").get(); - client().prepareIndex("test", PercolatorService.TYPE_NAME).setSource(q4).setId("q4").get(); - client().prepareIndex("test", PercolatorService.TYPE_NAME).setSource(q5).setId("q5").get(); - client().prepareIndex("test", PercolatorService.TYPE_NAME).setSource(q6).setId("q6").get(); + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME).setSource(q1).setId("q1").get(); + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME).setSource(q2).setId("q2").get(); + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME).setSource(q3).setId("q3").get(); + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME).setSource(q4).setId("q4").get(); + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME).setSource(q5).setId("q5").get(); + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME).setSource(q6).setId("q6").get(); refresh(); PercolateResponse response = client().preparePercolate() .setIndices("test").setDocumentType("doc") @@ -1822,7 +1732,7 @@ public class PercolatorIT extends ESIntegTestCase { .put("index.percolator.map_unmapped_fields_as_string", true); assertAcked(prepareCreate("test") .setSettings(settings)); - client().prepareIndex("test", PercolatorService.TYPE_NAME) + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME) .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "value")).endObject()).get(); refresh(); logger.info("--> Percolate doc with field1=value"); @@ -1842,7 +1752,7 @@ public class PercolatorIT extends ESIntegTestCase { assertAcked(prepareCreate("test") .setSettings(settings) .addMapping("type", "location", "type=geo_shape")); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", geoShapeQuery("location", ShapeBuilders.newEnvelope(new Coordinate(0d, 50d), new Coordinate(2d, 40d)))).endObject()) .get(); refresh(); @@ -1878,7 +1788,7 @@ public class PercolatorIT extends ESIntegTestCase { assertAcked(prepareCreate("index").addMapping("mapping", mapping)); try { - client().prepareIndex("index", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("index", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", nestedQuery("nested", matchQuery("nested.name", "value")).innerHit(new QueryInnerHits())).endObject()) .execute().actionGet(); fail("Expected a parse error, because inner_hits isn't supported in the percolate api"); @@ -1893,14 +1803,14 @@ public class PercolatorIT extends ESIntegTestCase { // the percolate api assertAcked(prepareCreate("index").addMapping("child", "_parent", "type=parent").addMapping("parent")); - client().prepareIndex("index", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("index", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", hasChildQuery("child", matchAllQuery())).endObject()) .execute().actionGet(); } public void testPercolateDocumentWithParentField() throws Exception { assertAcked(prepareCreate("index").addMapping("child", "_parent", "type=parent").addMapping("parent")); - client().prepareIndex("index", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("index", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); refresh(); @@ -1915,7 +1825,7 @@ public class PercolatorIT extends ESIntegTestCase { } public void testFilterByNow() throws Exception { - client().prepareIndex("index", PercolatorService.TYPE_NAME, "1") + client().prepareIndex("index", PercolatorFieldMapper.TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).field("created", "2015-07-10T14:41:54+0000").endObject()) .get(); refresh(); diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java deleted file mode 100644 index 05a4a156a015..000000000000 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.percolator; - -import org.apache.lucene.analysis.core.WhitespaceAnalyzer; -import org.apache.lucene.document.FieldType; -import org.apache.lucene.document.StoredField; -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.NoMergePolicy; -import org.apache.lucene.index.Term; -import org.apache.lucene.index.memory.MemoryIndex; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.TopDocs; -import org.apache.lucene.store.Directory; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; -import org.elasticsearch.action.percolate.PercolateShardResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.Uid; -import org.elasticsearch.index.mapper.internal.UidFieldMapper; -import org.elasticsearch.index.percolator.ExtractQueryTermsService; -import org.elasticsearch.index.percolator.PercolatorFieldMapper; -import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.internal.ContextIndexSearcher; -import org.elasticsearch.test.ESTestCase; -import org.junit.After; -import org.junit.Before; - -import java.io.IOException; - -import static org.hamcrest.Matchers.equalTo; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class PercolatorServiceTests extends ESTestCase { - - private Directory directory; - private IndexWriter indexWriter; - private DirectoryReader directoryReader; - - @Before - public void init() throws Exception { - directory = newDirectory(); - IndexWriterConfig config = new IndexWriterConfig(new WhitespaceAnalyzer()); - config.setMergePolicy(NoMergePolicy.INSTANCE); - indexWriter = new IndexWriter(directory, config); - } - - @After - public void destroy() throws Exception { - directoryReader.close(); - directory.close(); - } - - public void testCount() throws Exception { - PercolateContext context = mock(PercolateContext.class); - when(context.shardTarget()).thenReturn(new SearchShardTarget("_id", new Index("_index", "_na_"), 0)); - when(context.percolatorTypeFilter()).thenReturn(new MatchAllDocsQuery()); - when(context.isOnlyCount()).thenReturn(true); - IndexShard shard = mock(IndexShard.class); - when(shard.shardId()).thenReturn(new ShardId("_index", "_na_", 0)); - when(context.indexShard()).thenReturn(shard); - - PercolatorQueriesRegistry registry = createRegistry(); - addPercolatorQuery("1", new TermQuery(new Term("field", "brown")), indexWriter, registry); - addPercolatorQuery("2", new TermQuery(new Term("field", "fox")), indexWriter, registry); - addPercolatorQuery("3", new TermQuery(new Term("field", "monkey")), indexWriter, registry); - - indexWriter.close(); - directoryReader = DirectoryReader.open(directory); - IndexSearcher shardSearcher = newSearcher(directoryReader); - when(context.searcher()).thenReturn(new ContextIndexSearcher(new Engine.Searcher("test", shardSearcher), shardSearcher.getQueryCache(), shardSearcher.getQueryCachingPolicy())); - - MemoryIndex memoryIndex = new MemoryIndex(); - memoryIndex.addField("field", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer()); - IndexSearcher percolateSearcher = memoryIndex.createSearcher(); - when(context.docSearcher()).thenReturn(percolateSearcher); - - PercolateShardResponse response = PercolatorService.doPercolate(context, registry, null, null, null); - assertThat(response.topDocs().totalHits, equalTo(2)); - } - - public void testTopMatching() throws Exception { - PercolateContext context = mock(PercolateContext.class); - when(context.shardTarget()).thenReturn(new SearchShardTarget("_id", new Index("_index", "_na_"), 0)); - when(context.percolatorTypeFilter()).thenReturn(new MatchAllDocsQuery()); - when(context.size()).thenReturn(10); - IndexShard shard = mock(IndexShard.class); - when(shard.shardId()).thenReturn(new ShardId("_index", "_na_", 0)); - when(context.indexShard()).thenReturn(shard); - - PercolatorQueriesRegistry registry = createRegistry(); - addPercolatorQuery("1", new TermQuery(new Term("field", "brown")), indexWriter, registry); - addPercolatorQuery("2", new TermQuery(new Term("field", "monkey")), indexWriter, registry); - addPercolatorQuery("3", new TermQuery(new Term("field", "fox")), indexWriter, registry); - - indexWriter.close(); - directoryReader = DirectoryReader.open(directory); - IndexSearcher shardSearcher = newSearcher(directoryReader); - when(context.searcher()).thenReturn(new ContextIndexSearcher(new Engine.Searcher("test", shardSearcher), shardSearcher.getQueryCache(), shardSearcher.getQueryCachingPolicy())); - - MemoryIndex memoryIndex = new MemoryIndex(); - memoryIndex.addField("field", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer()); - IndexSearcher percolateSearcher = memoryIndex.createSearcher(); - when(context.docSearcher()).thenReturn(percolateSearcher); - - PercolateShardResponse response = PercolatorService.doPercolate(context, registry, null, null, null); - TopDocs topDocs = response.topDocs(); - assertThat(topDocs.totalHits, equalTo(2)); - assertThat(topDocs.scoreDocs.length, equalTo(2)); - assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); - assertThat(topDocs.scoreDocs[1].doc, equalTo(2)); - } - - void addPercolatorQuery(String id, Query query, IndexWriter writer, PercolatorQueriesRegistry registry) throws IOException { - registry.getPercolateQueries().put(new BytesRef(id), query); - ParseContext.Document document = new ParseContext.Document(); - FieldType extractedQueryTermsFieldType = new FieldType(); - extractedQueryTermsFieldType.setTokenized(false); - extractedQueryTermsFieldType.setIndexOptions(IndexOptions.DOCS); - extractedQueryTermsFieldType.freeze(); - ExtractQueryTermsService.extractQueryTerms(query, document, PercolatorFieldMapper.EXTRACTED_TERMS_FULL_FIELD_NAME, PercolatorFieldMapper.UNKNOWN_QUERY_FULL_FIELD_NAME, extractedQueryTermsFieldType); - document.add(new StoredField(UidFieldMapper.NAME, Uid.createUid(PercolatorService.TYPE_NAME, id))); - writer.addDocument(document); - } - - PercolatorQueriesRegistry createRegistry() { - Index index = new Index("_index", "_na_"); - IndexSettings indexSettings = new IndexSettings(new IndexMetaData.Builder("_index").settings( - Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) - .build(), Settings.EMPTY); - return new PercolatorQueriesRegistry( - new ShardId(index, 0), - indexSettings, - null - ); - } - -} diff --git a/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java deleted file mode 100644 index f76a117ddb0d..000000000000 --- a/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.percolator; - -import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; -import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder; -import org.elasticsearch.action.percolate.MultiPercolateResponse; -import org.elasticsearch.action.percolate.PercolateRequestBuilder; -import org.elasticsearch.action.percolate.PercolateResponse; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.Requests; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.Priority; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.test.ESIntegTestCase.Scope; - -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReentrantLock; -import java.util.function.Predicate; - -import static org.elasticsearch.action.percolate.PercolateSourceBuilder.docBuilder; -import static org.elasticsearch.client.Requests.clusterHealthRequest; -import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.percolator.PercolatorTestUtil.convertFromTextArray; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertMatchCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.arrayContainingInAnyOrder; -import static org.hamcrest.Matchers.arrayWithSize; -import static org.hamcrest.Matchers.emptyArray; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; - -@ClusterScope(scope = Scope.TEST, numDataNodes = 0, numClientNodes = 0, transportClientRatio = 0) -public class RecoveryPercolatorIT extends ESIntegTestCase { - @Override - protected int numberOfShards() { - return 1; - } - - public void testRestartNodePercolator1() throws Exception { - internalCluster().startNode(); - assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=text").addMapping(PercolatorService.TYPE_NAME, "color", "type=text")); - - logger.info("--> register a query"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "kuku") - .setSource(jsonBuilder().startObject() - .field("color", "blue") - .field("query", termQuery("field1", "value1")) - .endObject()) - .setRefresh(true) - .get(); - - PercolateResponse percolate = client().preparePercolate() - .setIndices("test").setDocumentType("type1") - .setSource(jsonBuilder().startObject().startObject("doc") - .field("field1", "value1") - .endObject().endObject()) - .get(); - assertThat(percolate.getMatches(), arrayWithSize(1)); - - internalCluster().rollingRestart(); - - logger.info("Running Cluster Health (wait for the shards to startup)"); - ensureYellow(); - - percolate = client().preparePercolate() - .setIndices("test").setDocumentType("type1") - .setSource(jsonBuilder().startObject().startObject("doc") - .field("field1", "value1") - .endObject().endObject()) - .get(); - assertMatchCount(percolate, 1L); - assertThat(percolate.getMatches(), arrayWithSize(1)); - } - - public void testRestartNodePercolator2() throws Exception { - internalCluster().startNode(); - assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=text").addMapping(PercolatorService.TYPE_NAME, "color", "type=text")); - - logger.info("--> register a query"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "kuku") - .setSource(jsonBuilder().startObject() - .field("color", "blue") - .field("query", termQuery("field1", "value1")) - .endObject()) - .setRefresh(true) - .get(); - - assertThat(client().prepareSearch().setSize(0).setTypes(PercolatorService.TYPE_NAME).setQuery(matchAllQuery()).get().getHits().totalHits(), equalTo(1L)); - - PercolateResponse percolate = client().preparePercolate() - .setIndices("test").setDocumentType("type1") - .setSource(jsonBuilder().startObject().startObject("doc") - .field("field1", "value1") - .endObject().endObject()) - .get(); - assertMatchCount(percolate, 1L); - assertThat(percolate.getMatches(), arrayWithSize(1)); - - internalCluster().rollingRestart(); - - logger.info("Running Cluster Health (wait for the shards to startup)"); - ClusterHealthResponse clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForYellowStatus().waitForActiveShards(1)).actionGet(); - logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); - assertThat(clusterHealth.isTimedOut(), equalTo(false)); - SearchResponse countResponse = client().prepareSearch().setSize(0).setTypes(PercolatorService.TYPE_NAME).setQuery(matchAllQuery()).get(); - assertHitCount(countResponse, 1L); - - DeleteIndexResponse actionGet = client().admin().indices().prepareDelete("test").get(); - assertThat(actionGet.isAcknowledged(), equalTo(true)); - assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=text").addMapping(PercolatorService.TYPE_NAME, "color", "type=text")); - clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForYellowStatus().waitForActiveShards(1)).actionGet(); - logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); - assertThat(clusterHealth.isTimedOut(), equalTo(false)); - assertThat(client().prepareSearch().setSize(0).setTypes(PercolatorService.TYPE_NAME).setQuery(matchAllQuery()).get().getHits().totalHits(), equalTo(0L)); - - percolate = client().preparePercolate() - .setIndices("test").setDocumentType("type1") - .setSource(jsonBuilder().startObject().startObject("doc") - .field("field1", "value1") - .endObject().endObject()) - .get(); - assertMatchCount(percolate, 0L); - assertThat(percolate.getMatches(), emptyArray()); - - logger.info("--> register a query"); - client().prepareIndex("test", PercolatorService.TYPE_NAME, "kuku") - .setSource(jsonBuilder().startObject() - .field("color", "blue") - .field("query", termQuery("field1", "value1")) - .endObject()) - .setRefresh(true) - .get(); - - assertThat(client().prepareSearch().setSize(0).setTypes(PercolatorService.TYPE_NAME).setQuery(matchAllQuery()).get().getHits().totalHits(), equalTo(1L)); - - percolate = client().preparePercolate() - .setIndices("test").setDocumentType("type1") - .setSource(jsonBuilder().startObject().startObject("doc") - .field("field1", "value1") - .endObject().endObject()) - .get(); - assertMatchCount(percolate, 1L); - assertThat(percolate.getMatches(), arrayWithSize(1)); - } - - public void testLoadingPercolateQueriesDuringCloseAndOpen() throws Exception { - internalCluster().startNode(); - internalCluster().startNode(); - - assertAcked(client().admin().indices().prepareCreate("test") - .setSettings(settingsBuilder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 2) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1))); - ensureGreen(); - - logger.info("--> Add dummy docs"); - client().prepareIndex("test", "type1", "1").setSource("field1", 0).get(); - client().prepareIndex("test", "type2", "1").setSource("field1", 1).get(); - - logger.info("--> register a queries"); - for (int i = 1; i <= 100; i++) { - client().prepareIndex("test", PercolatorService.TYPE_NAME, Integer.toString(i)) - .setSource(jsonBuilder().startObject() - .field("query", rangeQuery("field1").from(0).to(i)) - .endObject()) - .get(); - } - refresh(); - - logger.info("--> Percolate doc with field1=95"); - PercolateResponse response = client().preparePercolate() - .setIndices("test").setDocumentType("type1") - .setSource(jsonBuilder().startObject().startObject("doc").field("field1", 95).endObject().endObject()) - .get(); - assertMatchCount(response, 6L); - assertThat(response.getMatches(), arrayWithSize(6)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("95", "96", "97", "98", "99", "100")); - - logger.info("--> Close and open index to trigger percolate queries loading..."); - assertAcked(client().admin().indices().prepareClose("test")); - assertAcked(client().admin().indices().prepareOpen("test")); - ensureGreen(); - - logger.info("--> Percolate doc with field1=100"); - response = client().preparePercolate() - .setIndices("test").setDocumentType("type1") - .setSource(jsonBuilder().startObject().startObject("doc").field("field1", 100).endObject().endObject()).get(); - - assertMatchCount(response, 1L); - assertThat(response.getMatches(), arrayWithSize(1)); - assertThat(response.getMatches()[0].getId().string(), equalTo("100")); - } - - public void testPercolatorRecovery() throws Exception { - // 3 nodes, 2 primary + 2 replicas per primary, so each node should have a copy of the data. - // We only start and stop nodes 2 and 3, so all requests should succeed and never be partial. - internalCluster().startNode(settingsBuilder().put("node.stay", true)); - internalCluster().startNode(settingsBuilder().put("node.stay", false)); - internalCluster().startNode(settingsBuilder().put("node.stay", false)); - ensureGreen(); - client().admin().indices().prepareCreate("test") - .setSettings(settingsBuilder() - .put("index.number_of_shards", 2) - .put("index.number_of_replicas", 2) - ) - .get(); - ensureGreen(); - - final Client client = internalCluster().client(input -> input.getAsBoolean("node.stay", true)); - final int numQueries = randomIntBetween(50, 100); - logger.info("--> register a queries"); - for (int i = 0; i < numQueries; i++) { - client.prepareIndex("test", PercolatorService.TYPE_NAME, Integer.toString(i)) - .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) - .get(); - } - refresh(); - - final String document = "{\"field\" : \"a\"}"; - client.prepareIndex("test", "type", "1") - .setSource(document) - .get(); - - final Lock lock = new ReentrantLock(); - final AtomicBoolean run = new AtomicBoolean(true); - final AtomicReference error = new AtomicReference<>(); - Runnable r = () -> { - try { - while (run.get()) { - PercolateRequestBuilder percolateBuilder = client.preparePercolate() - .setIndices("test").setDocumentType("type").setSize(numQueries); - if (randomBoolean()) { - percolateBuilder.setPercolateDoc(docBuilder().setDoc(document)); - } else { - percolateBuilder.setGetRequest(Requests.getRequest("test").type("type").id("1")); - } - PercolateResponse response; - try { - lock.lock(); - response = percolateBuilder.get(); - } finally { - lock.unlock(); - } - assertNoFailures(response); - assertThat(response.getSuccessfulShards(), equalTo(response.getTotalShards())); - assertThat(response.getCount(), equalTo((long) numQueries)); - assertThat(response.getMatches().length, equalTo(numQueries)); - } - } catch (Throwable t) { - logger.info("Error in percolate thread...", t); - run.set(false); - error.set(t); - } - }; - Thread t = new Thread(r); - t.start(); - Predicate nodePredicate = input -> !input.getAsBoolean("node.stay", false); - try { - // 1 index, 2 primaries, 2 replicas per primary - for (int i = 0; i < 4; i++) { - try { - lock.lock(); - internalCluster().stopRandomNode(nodePredicate); - } finally { - lock.unlock(); - } - client.admin().cluster().prepareHealth("test") - .setWaitForEvents(Priority.LANGUID) - .setTimeout(TimeValue.timeValueMinutes(2)) - .setWaitForYellowStatus() - .setWaitForActiveShards(4) // 2 nodes, so 4 shards (2 primaries, 2 replicas) - .get(); - assertThat(error.get(), nullValue()); - try { - lock.lock(); - internalCluster().stopRandomNode(nodePredicate); - } finally { - lock.unlock(); - } - client.admin().cluster().prepareHealth("test") - .setWaitForEvents(Priority.LANGUID) - .setTimeout(TimeValue.timeValueMinutes(2)) - .setWaitForYellowStatus() - .setWaitForActiveShards(2) // 1 node, so 2 shards (2 primaries, 0 replicas) - .get(); - assertThat(error.get(), nullValue()); - internalCluster().startNode(settingsBuilder().put("node.stay", false)); - client.admin().cluster().prepareHealth("test") - .setWaitForEvents(Priority.LANGUID) - .setTimeout(TimeValue.timeValueMinutes(2)) - .setWaitForYellowStatus() - .setWaitForActiveShards(4) // 2 nodes, so 4 shards (2 primaries, 2 replicas) - .get(); - assertThat(error.get(), nullValue()); - internalCluster().startNode(settingsBuilder().put("node.stay", false)); - client.admin().cluster().prepareHealth("test") - .setWaitForEvents(Priority.LANGUID) - .setTimeout(TimeValue.timeValueMinutes(2)) - .setWaitForGreenStatus() // We're confirm the shard settings, so green instead of yellow - .setWaitForActiveShards(6) // 3 nodes, so 6 shards (2 primaries, 4 replicas) - .get(); - assertThat(error.get(), nullValue()); - } - } finally { - run.set(false); - } - t.join(); - assertThat(error.get(), nullValue()); - } - -} diff --git a/core/src/test/java/org/elasticsearch/percolator/TTLPercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/TTLPercolatorIT.java deleted file mode 100644 index f85b12d85ace..000000000000 --- a/core/src/test/java/org/elasticsearch/percolator/TTLPercolatorIT.java +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.percolator; - -import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.percolate.PercolateResponse; -import org.elasticsearch.client.Client; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.AlreadyExpiredException; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.hamcrest.Matchers; - -import java.io.IOException; -import java.util.concurrent.TimeUnit; - -import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.percolator.PercolatorTestUtil.convertFromTextArray; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertMatchCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.arrayContaining; -import static org.hamcrest.Matchers.emptyArray; -import static org.hamcrest.Matchers.equalTo; - -/** - */ -@ClusterScope(scope = ESIntegTestCase.Scope.TEST) -public class TTLPercolatorIT extends ESIntegTestCase { - private static final long PURGE_INTERVAL = 200; - - @Override - protected void beforeIndexDeletion() { - } - - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return settingsBuilder() - .put(super.nodeSettings(nodeOrdinal)) - .put("indices.ttl.interval", PURGE_INTERVAL, TimeUnit.MILLISECONDS) - .build(); - } - - public void testPercolatingWithTimeToLive() throws Exception { - final Client client = client(); - ensureGreen(); - - String percolatorMapping = XContentFactory.jsonBuilder().startObject().startObject(PercolatorService.TYPE_NAME) - .startObject("_ttl").field("enabled", true).endObject() - .startObject("_timestamp").field("enabled", true).endObject() - .endObject().endObject().string(); - - String typeMapping = XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("_ttl").field("enabled", true).endObject() - .startObject("_timestamp").field("enabled", true).endObject() - .startObject("properties").startObject("field1").field("type", "text").endObject().endObject() - .endObject().endObject().string(); - - client.admin().indices().prepareCreate("test") - .setSettings(settingsBuilder().put("index.number_of_shards", 2)) - .addMapping(PercolatorService.TYPE_NAME, percolatorMapping) - .addMapping("type1", typeMapping) - .execute().actionGet(); - ensureGreen(); - - final NumShards test = getNumShards("test"); - - long ttl = 1500; - long now = System.currentTimeMillis(); - client.prepareIndex("test", PercolatorService.TYPE_NAME, "kuku").setSource(jsonBuilder() - .startObject() - .startObject("query") - .startObject("term") - .field("field1", "value1") - .endObject() - .endObject() - .endObject() - ).setRefresh(true).setTTL(ttl).execute().actionGet(); - - IndicesStatsResponse response = client.admin().indices().prepareStats("test") - .clear().setIndexing(true) - .execute().actionGet(); - assertThat(response.getIndices().get("test").getTotal().getIndexing().getTotal().getIndexCount(), equalTo((long)test.dataCopies)); - - PercolateResponse percolateResponse = client.preparePercolate() - .setIndices("test").setDocumentType("type1") - .setSource(jsonBuilder() - .startObject() - .startObject("doc") - .field("field1", "value1") - .endObject() - .endObject() - ).execute().actionGet(); - assertNoFailures(percolateResponse); - if (percolateResponse.getMatches().length == 0) { - // OK, ttl + purgeInterval has passed (slow machine or many other tests were running at the same time - GetResponse getResponse = client.prepareGet("test", PercolatorService.TYPE_NAME, "kuku").execute().actionGet(); - assertThat(getResponse.isExists(), equalTo(false)); - response = client.admin().indices().prepareStats("test") - .clear().setIndexing(true) - .execute().actionGet(); - long currentDeleteCount = response.getIndices().get("test").getTotal().getIndexing().getTotal().getDeleteCount(); - assertThat(currentDeleteCount, equalTo((long)test.dataCopies)); - return; - } - - assertThat(convertFromTextArray(percolateResponse.getMatches(), "test"), arrayContaining("kuku")); - long timeSpent = System.currentTimeMillis() - now; - long waitTime = ttl + PURGE_INTERVAL - timeSpent; - if (waitTime >= 0) { - Thread.sleep(waitTime); // Doesn't make sense to check the deleteCount before ttl has expired - } - - // See comment in SimpleTTLTests - logger.info("Checking if the ttl purger has run"); - assertTrue(awaitBusy(() -> { - IndicesStatsResponse indicesStatsResponse = client.admin().indices().prepareStats("test").clear().setIndexing(true).get(); - // TTL deletes one doc, but it is indexed in the primary shard and replica shards - return indicesStatsResponse.getIndices().get("test").getTotal().getIndexing().getTotal().getDeleteCount() == test.dataCopies; - }, 5, TimeUnit.SECONDS)); - - percolateResponse = client.preparePercolate() - .setIndices("test").setDocumentType("type1") - .setSource(jsonBuilder() - .startObject() - .startObject("doc") - .field("field1", "value1") - .endObject() - .endObject() - ).execute().actionGet(); - assertMatchCount(percolateResponse, 0L); - assertThat(percolateResponse.getMatches(), emptyArray()); - } - - public void testEnsureTTLDoesNotCreateIndex() throws IOException, InterruptedException { - ensureGreen(); - client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder() - .put("indices.ttl.interval", 60, TimeUnit.SECONDS) // 60 sec - .build()).get(); - - String typeMapping = XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("_ttl").field("enabled", true).endObject() - .endObject().endObject().string(); - - client().admin().indices().prepareCreate("test") - .setSettings(settingsBuilder().put("index.number_of_shards", 1)) - .addMapping("type1", typeMapping) - .execute().actionGet(); - ensureGreen(); - client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder() - .put("indices.ttl.interval", 1, TimeUnit.SECONDS) - .build()).get(); - - for (int i = 0; i < 100; i++) { - logger.debug("index doc {} ", i); - try { - client().prepareIndex("test", "type1", "" + i).setSource(jsonBuilder() - .startObject() - .startObject("query") - .startObject("term") - .field("field1", "value1") - .endObject() - .endObject() - .endObject() - ).setTTL(randomIntBetween(1, 500)).setRefresh(true).execute().actionGet(); - } catch (MapperParsingException e) { - logger.info("failed indexing {}", e, i); - // if we are unlucky the TTL is so small that we see the expiry date is already in the past when - // we parse the doc ignore those... - assertThat(e.getCause(), Matchers.instanceOf(AlreadyExpiredException.class)); - } - - } - refresh(); - assertTrue(awaitBusy(() -> { - IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats("test").clear().setIndexing(true).get(); - logger.debug("delete count [{}]", indicesStatsResponse.getIndices().get("test").getTotal().getIndexing().getTotal().getDeleteCount()); - // TTL deletes one doc, but it is indexed in the primary shard and replica shards - return indicesStatsResponse.getIndices().get("test").getTotal().getIndexing().getTotal().getDeleteCount() != 0; - }, 5, TimeUnit.SECONDS)); - internalCluster().wipeIndices("test"); - client().admin().indices().prepareCreate("test") - .addMapping("type1", typeMapping) - .execute().actionGet(); - } -} diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java index 84b3b5a36a26..bc2c38ef6014 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java @@ -275,7 +275,7 @@ public class HighlightBuilderTests extends ESTestCase { Index index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_"); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings); // shard context will only need indicesQueriesRegistry for building Query objects nested in highlighter - QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, indicesQueriesRegistry) { + QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, indicesQueriesRegistry, null) { @Override public MappedFieldType fieldMapper(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name); diff --git a/core/src/test/java/org/elasticsearch/search/percolator/PercolatorQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/percolator/PercolatorQuerySearchIT.java new file mode 100644 index 000000000000..55f2ab801213 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/percolator/PercolatorQuerySearchIT.java @@ -0,0 +1,131 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.percolator; + +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.index.percolator.PercolatorFieldMapper; +import org.elasticsearch.search.highlight.HighlightBuilder; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESSingleNodeTestCase; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchQuery; +import static org.elasticsearch.index.query.QueryBuilders.percolatorQuery; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.hamcrest.Matchers.equalTo; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; + +public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { + + public void testPercolatorQuery() throws Exception { + createIndex("test", client().admin().indices().prepareCreate("test") + .addMapping("type", "field1", "type=keyword", "field2", "type=keyword") + ); + + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) + .get(); + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") + .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "value")).endObject()) + .get(); + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") + .setSource(jsonBuilder().startObject().field("query", boolQuery() + .must(matchQuery("field1", "value")) + .must(matchQuery("field2", "value")) + ).endObject()).get(); + client().admin().indices().prepareRefresh().get(); + + BytesReference source = jsonBuilder().startObject().endObject().bytes(); + logger.info("percolating empty doc"); + SearchResponse response = client().prepareSearch() + .setQuery(percolatorQuery("type", source)) + .get(); + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + + source = jsonBuilder().startObject().field("field1", "value").endObject().bytes(); + logger.info("percolating doc with 1 field"); + response = client().prepareSearch() + .setQuery(percolatorQuery("type", source)) + .addSort("_uid", SortOrder.ASC) + .get(); + assertHitCount(response, 2); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + + source = jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject().bytes(); + logger.info("percolating doc with 2 fields"); + response = client().prepareSearch() + .setQuery(percolatorQuery("type", source)) + .addSort("_uid", SortOrder.ASC) + .get(); + assertHitCount(response, 3); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + assertThat(response.getHits().getAt(2).getId(), equalTo("3")); + } + + public void testPercolatorQueryWithHighlighting() throws Exception { + createIndex("test", client().admin().indices().prepareCreate("test") + .addMapping("type", "field1", "type=text") + ); + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "brown fox")).endObject()) + .execute().actionGet(); + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") + .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "lazy dog")).endObject()) + .execute().actionGet(); + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") + .setSource(jsonBuilder().startObject().field("query", termQuery("field1", "jumps")).endObject()) + .execute().actionGet(); + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") + .setSource(jsonBuilder().startObject().field("query", termQuery("field1", "dog")).endObject()) + .execute().actionGet(); + client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "5") + .setSource(jsonBuilder().startObject().field("query", termQuery("field1", "fox")).endObject()) + .execute().actionGet(); + client().admin().indices().prepareRefresh().get(); + + BytesReference document = jsonBuilder().startObject() + .field("field1", "The quick brown fox jumps over the lazy dog") + .endObject().bytes(); + SearchResponse searchResponse = client().prepareSearch() + .setQuery(percolatorQuery("type", document)) + .highlighter(new HighlightBuilder().field("field1")) + .addSort("_uid", SortOrder.ASC) + .get(); + assertHitCount(searchResponse, 5); + + assertThat(searchResponse.getHits().getAt(0).getHighlightFields().get("field1").fragments()[0].string(), + equalTo("The quick brown fox jumps over the lazy dog")); + assertThat(searchResponse.getHits().getAt(1).getHighlightFields().get("field1").fragments()[0].string(), + equalTo("The quick brown fox jumps over the lazy dog")); + assertThat(searchResponse.getHits().getAt(2).getHighlightFields().get("field1").fragments()[0].string(), + equalTo("The quick brown fox jumps over the lazy dog")); + assertThat(searchResponse.getHits().getAt(3).getHighlightFields().get("field1").fragments()[0].string(), + equalTo("The quick brown fox jumps over the lazy dog")); + assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("field1").fragments()[0].string(), + equalTo("The quick brown fox jumps over the lazy dog"));; + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java index ef14df83cf42..e0673a64ee17 100644 --- a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java @@ -162,7 +162,8 @@ public class QueryRescoreBuilderTests extends ESTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), indexSettings); // shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer - QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, indicesQueriesRegistry) { + QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, indicesQueriesRegistry, + null) { @Override public MappedFieldType fieldMapper(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 04bcd7e71b4b..efc7521c05e0 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -38,7 +38,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.percolator.PercolatorService; +import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.sort.FieldSortBuilder; @@ -347,7 +347,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { .setSource(source).execute().actionGet(); } - client().prepareIndex(INDEX, PercolatorService.TYPE_NAME, "4") + client().prepareIndex(INDEX, PercolatorFieldMapper.TYPE_NAME, "4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolStatsTests.java b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolStatsTests.java index b18be91f575d..25b0e06c4f1f 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolStatsTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolStatsTests.java @@ -70,7 +70,6 @@ public class ThreadPoolStatsTests extends ESTestCase { stats.add(new ThreadPoolStats.Stats(ThreadPool.Names.WARMER, -1, 0, 0, 0, 0, 0L)); stats.add(new ThreadPoolStats.Stats(ThreadPool.Names.GENERIC, -1, 0, 0, 0, 0, 0L)); stats.add(new ThreadPoolStats.Stats(ThreadPool.Names.FORCE_MERGE, -1, 0, 0, 0, 0, 0L)); - stats.add(new ThreadPoolStats.Stats(ThreadPool.Names.PERCOLATE, -1, 0, 0, 0, 0, 0L)); stats.add(new ThreadPoolStats.Stats(ThreadPool.Names.SAME, -1, 0, 0, 0, 0, 0L)); @@ -103,7 +102,6 @@ public class ThreadPoolStatsTests extends ESTestCase { } assertThat(names, contains(ThreadPool.Names.FORCE_MERGE, ThreadPool.Names.GENERIC, - ThreadPool.Names.PERCOLATE, ThreadPool.Names.SAME, ThreadPool.Names.SEARCH, ThreadPool.Names.SUGGEST, diff --git a/docs/java-api/index.asciidoc b/docs/java-api/index.asciidoc index 012633f1e4b9..fbd3fe331656 100644 --- a/docs/java-api/index.asciidoc +++ b/docs/java-api/index.asciidoc @@ -142,8 +142,6 @@ include::search.asciidoc[] include::aggs.asciidoc[] -include::percolate.asciidoc[] - include::query-dsl.asciidoc[] include::indexed-scripts.asciidoc[] diff --git a/docs/java-api/percolate.asciidoc b/docs/java-api/query-dsl/percolator-query.asciidoc similarity index 63% rename from docs/java-api/percolate.asciidoc rename to docs/java-api/query-dsl/percolator-query.asciidoc index a08e09b2afe2..874c46c79860 100644 --- a/docs/java-api/percolate.asciidoc +++ b/docs/java-api/query-dsl/percolator-query.asciidoc @@ -1,12 +1,8 @@ -[[percolate]] -== Percolate API +[[java-query-percolator-query]] +==== Percolator query -The percolator allows one to register queries against an index, and then -send `percolate` requests which include a doc, getting back the -queries that match on that doc out of the set of registered queries. - -Read the main {ref}/search-percolate.html[percolate] -documentation before reading this guide. +See: + * {ref}/query-percolator-query.html[Percolator Query] [source,java] -------------------------------------------------- @@ -37,14 +33,12 @@ docBuilder.field("doc").startObject(); //This is needed to designate the documen docBuilder.field("content", "This is amazing!"); docBuilder.endObject(); //End of the doc field docBuilder.endObject(); //End of the JSON root object -//Percolate -PercolateResponse response = client.preparePercolate() - .setIndices("myIndexName") - .setDocumentType("myDocumentType") - .setSource(docBuilder).execute().actionGet(); +// Percolate, by executing the percolator query in the query dsl: +SearchResponse response = client().prepareSearch("myIndexName") + .setQuery(QueryBuilders.percolatorQuery("myDocumentType", docBuilder.bytes())) + .get(); //Iterate over the results -for(PercolateResponse.Match match : response) { - //Handle the result which is the name of - //the query in the percolator +for(SearchHit hit : response.getHits()) { + // Percolator queries as hit } -------------------------------------------------- diff --git a/docs/java-api/query-dsl/special-queries.asciidoc b/docs/java-api/query-dsl/special-queries.asciidoc index 0df5af890ec3..d5c9db073917 100644 --- a/docs/java-api/query-dsl/special-queries.asciidoc +++ b/docs/java-api/query-dsl/special-queries.asciidoc @@ -27,3 +27,5 @@ include::template-query.asciidoc[] include::script-query.asciidoc[] +include::percolator-query.asciidoc[] + diff --git a/docs/reference/migration/migrate_5_0/percolator.asciidoc b/docs/reference/migration/migrate_5_0/percolator.asciidoc index 3c560182c87b..73f262afb700 100644 --- a/docs/reference/migration/migrate_5_0/percolator.asciidoc +++ b/docs/reference/migration/migrate_5_0/percolator.asciidoc @@ -10,12 +10,17 @@ indices created from 5.0 onwards, the terms used in a percolator query are automatically indexed to allow for more efficient query selection during percolation. +==== Percolate and multi percolator APIs + +Percolator and multi percolate APIs have been deprecated and will be removed in the next major release. These APIs have +been replaced by the `percolator` query that can be used in the search and multi search APIs. + ==== Percolator mapping -The percolate API can no longer accept documents that reference fields that -don't already exist in the mapping. +The `percolator` query can no longer accept documents that reference fields +that don't already exist in the mapping. Before the percolate API allowed this. -The percolate API no longer modifies the mappings. Before the percolate API +The `percolator` query no longer modifies the mappings. Before the percolate API could be used to dynamically introduce new fields to the mappings based on the fields in the document being percolated. This no longer works, because these unmapped fields are not persisted in the mapping. @@ -27,15 +32,12 @@ response, unless the `.percolate` type was specified explicitly in the search request. Now, percolator documents are treated in the same way as any other document and are returned by search requests. -==== Percolator `size` default - -The percolator by default sets the `size` option to `10` whereas before this -was unlimited. - -==== Percolate API - -When percolating an existing document then specifying a document in the source -of the percolate request is not allowed any more. +==== Percolating existing document +When percolating an existing document then also specifying a document as source in the +`percolator` query is not allowed any more. Before the percolate API allowed and ignored +the existing document. +==== Percolate Stats +Percolate stats have been replaced with percolator query cache stats in nodes stats and cluster stats APIs. \ No newline at end of file diff --git a/docs/reference/query-dsl/percolator-query.asciidoc b/docs/reference/query-dsl/percolator-query.asciidoc new file mode 100644 index 000000000000..f2b1e2b98056 --- /dev/null +++ b/docs/reference/query-dsl/percolator-query.asciidoc @@ -0,0 +1,424 @@ +[[query-dsl-percolator-query]] +=== Percolator Query + +Traditionally you design documents based on your data, store them into an index, and then define queries via the search API +in order to retrieve these documents. The percolator works in the opposite direction. First you store queries into an +index and then you use the `percolator` query to search for the queries which match a specified document (or documents). + +The reason that queries can be stored comes from the fact that in Elasticsearch both documents and queries are defined in +JSON. This allows you to embed queries into documents via the index API. Elasticsearch can extract the query from a +document and make it available for search via the `percolator` query. Since documents are also defined as JSON, +you can define a document in the `percolator` query. + +[IMPORTANT] +===================================== + +Fields referred to in a percolator query must *already* exist in the mapping +associated with the index used for percolation. In order to make sure these fields exist, +add or update a mapping via the <> or <> APIs. + +===================================== + +[float] +=== Sample Usage + +Create an index with a mapping for the field `message`: + +[source,js] +-------------------------------------------------- +curl -XPUT 'localhost:9200/my-index' -d '{ + "mappings": { + "my-type": { + "properties": { + "message": { + "type": "string" + } + } + } + } +}' +-------------------------------------------------- + +Register a query in the percolator: + +[source,js] +-------------------------------------------------- +curl -XPUT 'localhost:9200/my-index/.percolator/1' -d '{ + "query" : { + "match" : { + "message" : "bonsai tree" + } + } +}' +-------------------------------------------------- + +Match a document to the registered percolator queries: + +[source,js] +-------------------------------------------------- +curl -XGET 'localhost:9200/my-index/_search' -d '{ + "query" : { + "percolator" : { + "document_type" : "my-type", + "document" : { + "message" : "A new bonsai tree in the office" + } + } + } +}' +-------------------------------------------------- + +The above request will yield the following response: + +[source,js] +-------------------------------------------------- +{ + "took": 5, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 5, + "failed": 0 + }, + "hits": { + "total": 1, + "max_score": 0, + "hits": [ + { <1> + "_index": "my-index", + "_type": ".percolator", + "_id": "1", + "_score": 0, + "_source": { + "query": { + "match": { + "message": "bonsai tree" + } + } + } + } + ] + } +} +-------------------------------------------------- + +<1> The percolate query with id `1` matches our document. + +[float] +=== Indexing Percolator Queries + +Percolate queries are stored as documents in a specific format and in an arbitrary index under a reserved type with the +name `.percolator`. The query itself is placed as is in a JSON object under the top level field `query`. + +[source,js] +-------------------------------------------------- +{ + "query" : { + "match" : { + "field" : "value" + } + } +} +-------------------------------------------------- + +Since this is just an ordinary document, any field can be added to this document. This can be useful later on to only +percolate documents by specific queries. + +[source,js] +-------------------------------------------------- +{ + "query" : { + "match" : { + "field" : "value" + } + }, + "priority" : "high" +} +-------------------------------------------------- + +Just as with any other type, the `.percolator` type has a mapping, which you can configure via the mappings APIs. +The default percolate mapping doesn't index the query field, only stores it. + +Because `.percolate` is a type it also has a mapping. By default the following mapping is active: + +[source,js] +-------------------------------------------------- +{ + ".percolator" : { + "properties" : { + "query" : { + "type" : "percolator" + } + } + } +} +-------------------------------------------------- + +If needed, this mapping can be modified with the update mapping API. + +In order to un-register a percolate query the delete API can be used. So if the previous added query needs to be deleted +the following delete requests needs to be executed: + +[source,js] +-------------------------------------------------- +curl -XDELETE localhost:9200/my-index/.percolator/1 +-------------------------------------------------- + +[float] +==== Parameters + +The following parameters are required when percolating a document: + +[horizontal] +`document_type`:: The type / mapping of the document being percolated. This is parameter is always required. +`document`:: The source of the document being percolated. + +Instead of specifying a the source of the document being percolated, the source can also be retrieved from an already +stored document. The `percolator` query will then internally execute a get request to fetch that document. + +In that case the `document` parameter can be substituted with the following parameters: + +[horizontal] +`index`:: The index the document resides in. This is a required parameter. +`type`:: The type of the document to fetch. This is a required parameter. +`id`:: The id of the document to fetch. This is a required parameter. +`routing`:: Optionally, routing to be used to fetch document to percolate. +`preference`:: Optionally, preference to be used to fetch document to percolate. +`version`:: Optionally, the expected version of the document to be fetched. + +[float] +==== Dedicated Percolator Index + +Percolate queries can be added to any index. Instead of adding percolate queries to the index the data resides in, +these queries can also be added to a dedicated index. The advantage of this is that this dedicated percolator index +can have its own index settings (For example the number of primary and replica shards). If you choose to have a dedicated +percolate index, you need to make sure that the mappings from the normal index are also available on the percolate index. +Otherwise percolate queries can be parsed incorrectly. + +[float] +==== Percolating an Existing Document + +In order to percolate a newly indexed document, the `percolator` query can be used. Based on the response +from an index request, the `_id` and other meta information can be used to immediately percolate the newly added +document. + +[float] +===== Example + +Based on the previous example. + +Index the document we want to percolate: + +[source,js] +-------------------------------------------------- +curl -XPUT "http://localhost:9200/my-index/message/1" -d' +{ + "message" : "A new bonsai tree in the office" +}' +-------------------------------------------------- + +Index response: + +[source,js] +-------------------------------------------------- +{ + "_index": "my-index", + "_type": "message", + "_id": "1", + "_version": 1, + "_shards": { + "total": 2, + "successful": 1, + "failed": 0 + }, + "created": true +} +-------------------------------------------------- + +Percolating an existing document, using the index response as basis to build to new search request: + +[source,js] +-------------------------------------------------- +curl -XGET "http://localhost:9200/my-index/_search" -d' +{ + "query" : { + "percolator" : { + "document_type" : "my-type", + "index" : "my-index", + "type" : "message", + "id" : "1", + "version" : 1 <1> + } + } +}' +-------------------------------------------------- + +<1> The version is optional, but useful in certain cases. We can then ensure that we are try to percolate +the document we just have indexed. A change may be made after we have indexed, and if that is the +case the then the search request would fail with a version conflict error. + +The search response returned is identical as in the previous example. + +[float] +==== Percolator and highlighting + +The percolator query is handled in a special way when it comes to highlighting. The percolator queries hits are used +to highlight the document that is provided in the `percolator` query. Whereas with regular highlighting the query in +the search request is used to highlight the hits. + +[float] +===== Example + +This example is based on the mapping of the first example. + +Add a percolator query: + +[source,js] +-------------------------------------------------- +curl -XPUT "http://localhost:9200/my-index/.percolator/1" -d' +{ + "query" : { + "match" : { + "message" : "brown fox" + } + } +}' +-------------------------------------------------- + +Add another percolator query: + +[source,js] +-------------------------------------------------- +curl -XPUT "http://localhost:9200/my-index/.percolator/2" -d' +{ + "query" : { + "match" : { + "message" : "lazy dog" + } + } +}' +-------------------------------------------------- + +Execute a search request with `percolator` and highlighting enabled: + +[source,js] +-------------------------------------------------- +curl -XGET "http://localhost:9200/my-index/_search" -d' +{ + "query" : { + "percolator" : { + "document_type" : "my-type", + "document" : { + "message" : "The quick brown fox jumps over the lazy dog" + } + } + }, + "highlight": { + "fields": { + "message": {} + } + } +}' +-------------------------------------------------- + +This will yield the following response. + +[source,js] +-------------------------------------------------- +{ + "took": 14, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 5, + "failed": 0 + }, + "hits": { + "total": 2, + "max_score": 0, + "hits": [ + { + "_index": "my-index", + "_type": ".percolator", + "_id": "2", + "_score": 0, + "_source": { + "query": { + "match": { + "message": "lazy dog" + } + } + }, + "highlight": { + "message": [ + "The quick brown fox jumps over the lazy dog" <1> + ] + } + }, + { + "_index": "my-index", + "_type": ".percolator", + "_id": "1", + "_score": 0, + "_source": { + "query": { + "match": { + "message": "brown fox" + } + } + }, + "highlight": { + "message": [ + "The quick brown fox jumps over the lazy dog" <1> + ] + } + } + ] + } +} +-------------------------------------------------- + +<1> Instead of the query in the search request highlighting the percolator hits, the percolator queries are highlighting + the document defined in the `percolator` query. + +[float] +==== How it Works Under the Hood + +When indexing a document that contains a query in an index and the `.percolator` type, the query part of the documents gets +parsed into a Lucene query and is kept in memory until that percolator document is removed or the index containing the +`.percolator` type gets removed. So, all the active percolator queries are kept in memory. + +At search time, the document specified in the request gets parsed into a Lucene document and is stored in a in-memory +Lucene index. This in-memory index can just hold this one document and it is optimized for that. Then all the queries +that are registered to the index that the searh request is targeted for, are going to be executed on this single document +in-memory index. This happens on each shard the search request needs to execute. + +By using `routing` or additional queries the amount of percolator queries that need to be executed can be reduced and thus +the time the search API needs to run can be decreased. + +[float] +==== Important Notes + +Because the percolator query is processing one document at a time, it doesn't support queries and filters that run +against child documents such as `has_child` and `has_parent`. + +The percolator doesn't work with queries like `template` and `geo_shape` queries when these queries fetch documents +to substitute parts of the query. The reason is that the percolator stores the query terms during indexing in order to +speedup percolating in certain cases and this doesn't work if part of the query is defined in another document. +There is no way to know for the percolator to know if an external document has changed and even if this was the case the +percolator query has to be reindexed. + +The `wildcard` and `regexp` query natively use a lot of memory and because the percolator keeps the queries into memory +this can easily take up the available memory in the heap space. If possible try to use a `prefix` query or ngramming to +achieve the same result (with way less memory being used). + +[float] +==== Forcing Unmapped Fields to be Handled as Strings + +In certain cases it is unknown what kind of percolator queries do get registered, and if no field mapping exists for fields +that are referred by percolator queries then adding a percolator query fails. This means the mapping needs to be updated +to have the field with the appropriate settings, and then the percolator query can be added. But sometimes it is sufficient +if all unmapped fields are handled as if these were default string fields. In those cases one can configure the +`index.percolator.map_unmapped_fields_as_string` setting to `true` (default to `false`) and then if a field referred in +a percolator query does not exist, it will be handled as a default string field so that adding the percolator query doesn't +fail. \ No newline at end of file diff --git a/docs/reference/query-dsl/special-queries.asciidoc b/docs/reference/query-dsl/special-queries.asciidoc index 1a2d63d22658..16d0020bf10c 100644 --- a/docs/reference/query-dsl/special-queries.asciidoc +++ b/docs/reference/query-dsl/special-queries.asciidoc @@ -20,6 +20,10 @@ final query to execute. This query allows a script to act as a filter. Also see the <>. +<>:: + +This query finds queries that are stored as documents that match with +the specified document. include::mlt-query.asciidoc[] @@ -27,3 +31,5 @@ include::template-query.asciidoc[] include::script-query.asciidoc[] +include::percolator-query.asciidoc[] + diff --git a/docs/reference/search/percolate.asciidoc b/docs/reference/search/percolate.asciidoc index bb4040770ebc..44400f5b8161 100644 --- a/docs/reference/search/percolate.asciidoc +++ b/docs/reference/search/percolate.asciidoc @@ -3,511 +3,6 @@ added[5.0.0,Percolator queries modifications aren't visible immediately and a refresh is required] -added[5.0.0,Percolate api by defaults limits the number of matches to `10` whereas before this wasn't set] +added[5.0.0,Percolate and multi percolate APIs have been deprecated and has been replaced by <>] added[5.0.0,For indices created on or after version 5.0.0 the percolator automatically indexes the query terms with the percolator queries this allows the percolator to percolate documents quicker. It is advisable to reindex any pre 5.0.0 indices to take advantage of this new optimization] - -Traditionally you design documents based on your data, store them into an index, and then define queries via the search API -in order to retrieve these documents. The percolator works in the opposite direction. First you store queries into an -index and then, via the percolate API, you define documents in order to retrieve these queries. - -The reason that queries can be stored comes from the fact that in Elasticsearch both documents and queries are defined in -JSON. This allows you to embed queries into documents via the index API. Elasticsearch can extract the query from a -document and make it available to the percolate API. Since documents are also defined as JSON, you can define a document -in a request to the percolate API. - -[IMPORTANT] -===================================== - -Fields referred to in a percolator query must *already* exist in the mapping -associated with the index used for percolation. In order to make sure these fields exist, -add or update a mapping via the <> or <> APIs. - -===================================== - -[float] -=== Sample Usage - -Create an index with a mapping for the field `message`: - -[source,js] --------------------------------------------------- -curl -XPUT 'localhost:9200/my-index' -d '{ - "mappings": { - "my-type": { - "properties": { - "message": { - "type": "string" - } - } - } - } -}' --------------------------------------------------- - -Register a query in the percolator: - -[source,js] --------------------------------------------------- -curl -XPUT 'localhost:9200/my-index/.percolator/1' -d '{ - "query" : { - "match" : { - "message" : "bonsai tree" - } - } -}' --------------------------------------------------- - -Match a document to the registered percolator queries: - -[source,js] --------------------------------------------------- -curl -XGET 'localhost:9200/my-index/my-type/_percolate' -d '{ - "doc" : { - "message" : "A new bonsai tree in the office" - } -}' --------------------------------------------------- - -The above request will yield the following response: - -[source,js] --------------------------------------------------- -{ - "took" : 19, - "_shards" : { - "total" : 5, - "successful" : 5, - "failed" : 0 - }, - "total" : 1, - "matches" : [ <1> - { - "_index" : "my-index", - "_id" : "1" - } - ] -} --------------------------------------------------- - -<1> The percolate query with id `1` matches our document. - -[float] -=== Indexing Percolator Queries - -Percolate queries are stored as documents in a specific format and in an arbitrary index under a reserved type with the -name `.percolator`. The query itself is placed as is in a JSON object under the top level field `query`. - -[source,js] --------------------------------------------------- -{ - "query" : { - "match" : { - "field" : "value" - } - } -} --------------------------------------------------- - -Since this is just an ordinary document, any field can be added to this document. This can be useful later on to only -percolate documents by specific queries. - -[source,js] --------------------------------------------------- -{ - "query" : { - "match" : { - "field" : "value" - } - }, - "priority" : "high" -} --------------------------------------------------- - -On top of this, also a mapping type can be associated with this query. This allows to control how certain queries -like range queries, shape filters, and other query & filters that rely on mapping settings get constructed. This is -important since the percolate queries are indexed into the `.percolator` type, and the queries / filters that rely on -mapping settings would yield unexpected behaviour. Note: By default, field names do get resolved in a smart manner, -but in certain cases with multiple types this can lead to unexpected behavior, so being explicit about it will help. - -[source,js] --------------------------------------------------- -{ - "query" : { - "range" : { - "created_at" : { - "gte" : "2010-01-01T00:00:00", - "lte" : "2011-01-01T00:00:00" - } - } - }, - "type" : "tweet", - "priority" : "high" -} --------------------------------------------------- - -In the above example the range query really gets parsed into a Lucene numeric range query, based on the settings for -the field `created_at` in the type `tweet`. - -Just as with any other type, the `.percolator` type has a mapping, which you can configure via the mappings APIs. -The default percolate mapping doesn't index the query field, only stores it. - -Because `.percolate` is a type it also has a mapping. By default the following mapping is active: - -[source,js] --------------------------------------------------- -{ - ".percolator" : { - "properties" : { - "query" : { - "type" : "object", - "enabled" : false - } - } - } -} --------------------------------------------------- - -If needed, this mapping can be modified with the update mapping API. - -In order to un-register a percolate query the delete API can be used. So if the previous added query needs to be deleted -the following delete requests needs to be executed: - -[source,js] --------------------------------------------------- -curl -XDELETE localhost:9200/my-index/.percolator/1 --------------------------------------------------- - -[float] -=== Percolate API - -The percolate API executes in a distributed manner, meaning it executes on all shards an index points to. - -.Required options -* `index` - The index that contains the `.percolator` type. This can also be an alias. -* `type` - The type of the document to be percolated. The mapping of that type is used to parse document. -* `doc` - The actual document to percolate. Unlike the other two options this needs to be specified in the request body. Note: This isn't required when percolating an existing document. - -[source,js] --------------------------------------------------- -curl -XGET 'localhost:9200/twitter/tweet/_percolate' -d '{ - "doc" : { - "created_at" : "2010-10-10T00:00:00", - "message" : "some text" - } -}' --------------------------------------------------- - -.Additional supported query string options -* `routing` - In case the percolate queries are partitioned by a custom routing value, that routing option makes sure -that the percolate request only gets executed on the shard where the routing value is partitioned to. This means that -the percolate request only gets executed on one shard instead of all shards. Multiple values can be specified as a -comma separated string, in that case the request can be be executed on more than one shard. -* `preference` - Controls which shard replicas are preferred to execute the request on. Works the same as in the search API. -* `ignore_unavailable` - Controls if missing concrete indices should silently be ignored. Same as is in the search API. -* `percolate_format` - If `ids` is specified then the matches array in the percolate response will contain a string -array of the matching ids instead of an array of objects. This can be useful to reduce the amount of data being send -back to the client. Obviously if there are two percolator queries with same id from different indices there is no way -to find out which percolator query belongs to what index. Any other value to `percolate_format` will be ignored. - -.Additional request body options -* `filter` - Reduces the number queries to execute during percolating. Only the percolator queries that match with the -filter will be included in the percolate execution. The filter option works in near realtime, so a refresh needs to have -occurred for the filter to included the latest percolate queries. -* `query` - Same as the `filter` option, but also the score is computed. The computed scores can then be used by the -`track_scores` and `sort` option. -* `size` - Defines to maximum number of matches (percolate queries) to be returned. Defaults to 10. -* `track_scores` - Whether the `_score` is included for each match. The `_score` is based on the query and represents -how the query matched the *percolate query's metadata*, *not* how the document (that is being percolated) matched -the query. The `query` option is required for this option. Defaults to `false`. -* `sort` - Define a sort specification like in the search API. Currently only sorting `_score` reverse (default relevancy) -is supported. Other sort fields will throw an exception. The `size` and `query` option are required for this setting. Like -`track_score` the score is based on the query and represents how the query matched to the percolate query's metadata -and *not* how the document being percolated matched to the query. -* `aggs` - Allows aggregation definitions to be included. The aggregations are based on the matching percolator queries, -look at the aggregation documentation on how to define aggregations. -* `highlight` - Allows highlight definitions to be included. The document being percolated is being highlight for each -matching query. This allows you to see how each match is highlighting the document being percolated. See highlight -documentation on how to define highlights. The `size` option is required for highlighting, the performance of highlighting - in the percolate API depends of how many matches are being highlighted. - -[float] -=== Dedicated Percolator Index - -Percolate queries can be added to any index. Instead of adding percolate queries to the index the data resides in, -these queries can also be added to a dedicated index. The advantage of this is that this dedicated percolator index -can have its own index settings (For example the number of primary and replica shards). If you choose to have a dedicated -percolate index, you need to make sure that the mappings from the normal index are also available on the percolate index. -Otherwise percolate queries can be parsed incorrectly. - -[float] -=== Filtering Executed Queries - -Filtering allows to reduce the number of queries, any filter that the search API supports, (except the ones mentioned in important notes) -can also be used in the percolate API. The filter only works on the metadata fields. The `query` field isn't indexed by -default. Based on the query we indexed before, the following filter can be defined: - -[source,js] --------------------------------------------------- -curl -XGET localhost:9200/test/type1/_percolate -d '{ - "doc" : { - "field" : "value" - }, - "filter" : { - "term" : { - "priority" : "high" - } - } -}' --------------------------------------------------- - -[float] -=== Percolator Count API - -The count percolate API, only keeps track of the number of matches and doesn't keep track of the actual matches -Example: - -[source,js] --------------------------------------------------- -curl -XGET 'localhost:9200/my-index/my-type/_percolate/count' -d '{ - "doc" : { - "message" : "some message" - } -}' --------------------------------------------------- - -Response: - -[source,js] --------------------------------------------------- -{ - ... // header - "total" : 3 -} --------------------------------------------------- - - -[float] -=== Percolating an Existing Document - -In order to percolate a newly indexed document, the percolate existing document can be used. Based on the response -from an index request, the `_id` and other meta information can be used to immediately percolate the newly added -document. - -.Supported options for percolating an existing document on top of existing percolator options: -* `id` - The id of the document to retrieve the source for. -* `percolate_index` - The index containing the percolate queries. Defaults to the `index` defined in the url. -* `percolate_type` - The percolate type (used for parsing the document). Default to `type` defined in the url. -* `routing` - The routing value to use when retrieving the document to percolate. -* `preference` - Which shard to prefer when retrieving the existing document. -* `percolate_routing` - The routing value to use when percolating the existing document. -* `percolate_preference` - Which shard to prefer when executing the percolate request. -* `version` - Enables a version check. If the fetched document's version isn't equal to the specified version then the request fails with a version conflict and the percolation request is aborted. - -Internally the percolate API will issue a GET request for fetching the `_source` of the document to percolate. -For this feature to work, the `_source` for documents to be percolated needs to be stored. - -If percolating an existing document and the a document is also specified in the source of the percolate request then -an error is thrown. Either the document to percolate should be specified in the source or be defined by specifying the -index, type and id. - -[float] -==== Example - -Index response: - -[source,js] --------------------------------------------------- -{ - "_index" : "my-index", - "_type" : "message", - "_id" : "1", - "_version" : 1, - "created" : true -} --------------------------------------------------- - -Percolating an Existing Document: - -[source,js] --------------------------------------------------- -curl -XGET 'localhost:9200/my-index1/message/1/_percolate' --------------------------------------------------- - -The response is the same as with the regular percolate API. - -[float] -=== Multi Percolate API - -The multi percolate API allows to bundle multiple percolate requests into a single request, similar to what the multi -search API does to search requests. The request body format is line based. Each percolate request item takes two lines, -the first line is the header and the second line is the body. - -The header can contain any parameter that normally would be set via the request path or query string parameters. -There are several percolate actions, because there are multiple types of percolate requests. - -.Supported actions: -* `percolate` - Action for defining a regular percolate request. -* `count` - Action for defining a count percolate request. - -Depending on the percolate action different parameters can be specified. For example the percolate and percolate existing -document actions support different parameters. - -.The following endpoints are supported -* `GET|POST /[index]/[type]/_mpercolate` -* `GET|POST /[index]/_mpercolate` -* `GET|POST /_mpercolate` - -The `index` and `type` defined in the url path are the default index and type. - -[float] -==== Example - -Request: - -[source,js] --------------------------------------------------- -curl -XGET 'localhost:9200/twitter/tweet/_mpercolate' --data-binary "@requests.txt"; echo --------------------------------------------------- - -The index `twitter` is the default index, and the type `tweet` is the default type and will be used in the case a header -doesn't specify an index or type. - -requests.txt: - -[source,js] --------------------------------------------------- -{"percolate" : {"index" : "twitter", "type" : "tweet"}} -{"doc" : {"message" : "some text"}} -{"percolate" : {"index" : "twitter", "type" : "tweet", "id" : "1"}} -{"percolate" : {"index" : "users", "type" : "user", "id" : "3", "percolate_index" : "users_2012" }} -{"size" : 10} -{"count" : {"index" : "twitter", "type" : "tweet"}} -{"doc" : {"message" : "some other text"}} -{"count" : {"index" : "twitter", "type" : "tweet", "id" : "1"}} --------------------------------------------------- - -For a percolate existing document item (headers with the `id` field), the response can be an empty JSON object. -All the required options are set in the header. - -Response: - -[source,js] --------------------------------------------------- -{ - "responses" : [ - { - "took" : 24, - "_shards" : { - "total" : 5, - "successful" : 5, - "failed" : 0, - }, - "total" : 3, - "matches" : [ - { - "_index": "twitter", - "_id": "1" - }, - { - "_index": "twitter", - "_id": "2" - }, - { - "_index": "twitter", - "_id": "3" - } - ] - }, - { - "took" : 12, - "_shards" : { - "total" : 5, - "successful" : 5, - "failed" : 0, - }, - "total" : 3, - "matches" : [ - { - "_index": "twitter", - "_id": "4" - }, - { - "_index": "twitter", - "_id": "5" - }, - { - "_index": "twitter", - "_id": "6" - } - ] - }, - { - "error" : "DocumentMissingException[[_na][_na] [user][3]: document missing]" - }, - { - "took" : 12, - "_shards" : { - "total" : 5, - "successful" : 5, - "failed" : 0, - }, - "total" : 3 - }, - { - "took" : 14, - "_shards" : { - "total" : 5, - "successful" : 5, - "failed" : 0, - }, - "total" : 3 - } - ] -} - --------------------------------------------------- - -Each item represents a percolate response, the order of the items maps to the order in which the percolate requests -were specified. In case a percolate request failed, the item response is substituted with an error message. - -[float] -=== How it Works Under the Hood - -When indexing a document that contains a query in an index and the `.percolator` type, the query part of the documents gets -parsed into a Lucene query and is kept in memory until that percolator document is removed or the index containing the -`.percolator` type gets removed. So, all the active percolator queries are kept in memory. - -At percolate time, the document specified in the request gets parsed into a Lucene document and is stored in a in-memory -Lucene index. This in-memory index can just hold this one document and it is optimized for that. Then all the queries -that are registered to the index that the percolate request is targeted for, are going to be executed on this single document -in-memory index. This happens on each shard the percolate request needs to execute. - -By using `routing`, `filter` or `query` features the amount of queries that need to be executed can be reduced and thus -the time the percolate API needs to run can be decreased. - -[float] -=== Important Notes - -Because the percolator API is processing one document at a time, it doesn't support queries and filters that run -against child documents such as `has_child` and `has_parent`. - -The `inner_hits` feature on the `nested` query isn't supported in the percolate api. - -The `wildcard` and `regexp` query natively use a lot of memory and because the percolator keeps the queries into memory -this can easily take up the available memory in the heap space. If possible try to use a `prefix` query or ngramming to -achieve the same result (with way less memory being used). - -The `delete-by-query` plugin doesn't work to unregister a query, it only deletes the percolate documents from disk. In order -to update the registered queries in memory the index needs be closed and opened. - -[float] -=== Forcing Unmapped Fields to be Handled as Strings - -In certain cases it is unknown what kind of percolator queries do get registered, and if no field mapping exists for fields -that are referred by percolator queries then adding a percolator query fails. This means the mapping needs to be updated -to have the field with the appropriate settings, and then the percolator query can be added. But sometimes it is sufficient -if all unmapped fields are handled as if these were default string fields. In those cases one can configure the -`index.percolator.map_unmapped_fields_as_string` setting to `true` (default to `false`) and then if a field referred in -a percolator query does not exist, it will be handled as a default string field so that adding the percolator query doesn't -fail. diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java index 7c95b8abf841..f21dd83e4260 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java @@ -67,10 +67,6 @@ import org.elasticsearch.action.get.MultiGetAction; import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.percolate.MultiPercolateAction; -import org.elasticsearch.action.percolate.MultiPercolateRequest; -import org.elasticsearch.action.percolate.PercolateAction; -import org.elasticsearch.action.percolate.PercolateRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; @@ -85,7 +81,6 @@ import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilders; @@ -464,51 +459,6 @@ public class IndicesRequestTests extends ESIntegTestCase { assertSameIndices(validateQueryRequest, validateQueryShardAction); } - public void testPercolate() { - String percolateShardAction = PercolateAction.NAME + "[s]"; - interceptTransportActions(percolateShardAction); - - client().prepareIndex("test-get", "type", "1").setSource("field","value").get(); - - PercolateRequest percolateRequest = new PercolateRequest().indices(randomIndicesOrAliases()).documentType("type"); - if (randomBoolean()) { - percolateRequest.getRequest(new GetRequest("test-get", "type", "1")); - } else { - percolateRequest.source("\"field\":\"value\""); - } - internalCluster().clientNodeClient().percolate(percolateRequest).actionGet(); - - clearInterceptedActions(); - assertSameIndices(percolateRequest, percolateShardAction); - } - - public void testMultiPercolate() { - String multiPercolateShardAction = MultiPercolateAction.NAME + "[shard][s]"; - interceptTransportActions(multiPercolateShardAction); - - client().prepareIndex("test-get", "type", "1").setSource("field", "value").get(); - - MultiPercolateRequest multiPercolateRequest = new MultiPercolateRequest(); - List indices = new ArrayList<>(); - int numRequests = iterations(1, 30); - for (int i = 0; i < numRequests; i++) { - String[] indicesOrAliases = randomIndicesOrAliases(); - Collections.addAll(indices, indicesOrAliases); - PercolateRequest percolateRequest = new PercolateRequest().indices(indicesOrAliases).documentType("type"); - if (randomBoolean()) { - percolateRequest.getRequest(new GetRequest("test-get", "type", "1")); - } else { - percolateRequest.source("\"field\":\"value\""); - } - multiPercolateRequest.add(percolateRequest); - } - - internalCluster().clientNodeClient().multiPercolate(multiPercolateRequest).actionGet(); - - clearInterceptedActions(); - assertIndicesSubset(indices, multiPercolateShardAction); - } - public void testOpenIndex() { interceptTransportActions(OpenIndexAction.NAME); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java index 78c400885756..f0f34493444a 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java @@ -153,7 +153,7 @@ public class TemplateQueryParserTests extends ESTestCase { } }); IndicesQueriesRegistry indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class); - context = new QueryShardContext(idxSettings, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry); + context = new QueryShardContext(idxSettings, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry, null); } @Override diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml index f264928c21b5..dfafd8335097 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml @@ -48,11 +48,7 @@ merges.total_docs .+ \n merges.total_size .+ \n merges.total_time .+ \n - percolate.current .+ \n - percolate.memory_size .+ \n percolate.queries .+ \n - percolate.time .+ \n - percolate.total .+ \n refresh.total .+ \n refresh.time .+ \n search.fetch_current .+ \n diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yaml index 8d59e7c139c8..780edacd7b61 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yaml @@ -29,14 +29,14 @@ - do: cat.thread_pool: - h: id,ba,fa,gea,ga,ia,maa,ma,fma,pa + h: id,ba,fa,gea,ga,ia,maa,ma,fma v: true full_id: true - match: $body: | - /^ id \s+ ba \s+ fa \s+ gea \s+ ga \s+ ia \s+ maa \s+ fma \s+ pa \n - (\S+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \n)+ $/ + /^ id \s+ ba \s+ fa \s+ gea \s+ ga \s+ ia \s+ maa \s+ fma \n + (\S+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \n)+ $/ - do: cat.thread_pool: @@ -108,16 +108,6 @@ /^ id \s+ force_merge.type \s+ force_merge.active \s+ force_merge.size \s+ force_merge.queue \s+ force_merge.queueSize \s+ force_merge.rejected \s+ force_merge.largest \s+ force_merge.completed \s+ force_merge.min \s+ force_merge.max \s+ force_merge.keepAlive \n (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/ - - do: - cat.thread_pool: - h: id,percolate.type,percolate.active,percolate.size,percolate.queue,percolate.queueSize,percolate.rejected,percolate.largest,percolate.completed,percolate.min,percolate.max,percolate.keepAlive - v: true - - - match: - $body: | - /^ id \s+ percolate.type \s+ percolate.active \s+ percolate.size \s+ percolate.queue \s+ percolate.queueSize \s+ percolate.rejected \s+ percolate.largest \s+ percolate.completed \s+ percolate.min \s+ percolate.max \s+ percolate.keepAlive \n - (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/ - - do: cat.thread_pool: h: id,refresh.type,refresh.active,refresh.size,refresh.queue,refresh.queueSize,refresh.rejected,refresh.largest,refresh.completed,refresh.min,refresh.max,refresh.keepAlive diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/11_metric.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/11_metric.yaml index ed676924f990..19598c7363ef 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/11_metric.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/11_metric.yaml @@ -31,7 +31,6 @@ setup: - is_true: _all.total.warmer - is_true: _all.total.query_cache - is_true: _all.total.fielddata - - is_true: _all.total.percolate - is_true: _all.total.completion - is_true: _all.total.segments - is_true: _all.total.translog @@ -54,7 +53,6 @@ setup: - is_true: _all.total.warmer - is_true: _all.total.query_cache - is_true: _all.total.fielddata - - is_true: _all.total.percolate - is_true: _all.total.completion - is_true: _all.total.segments - is_true: _all.total.translog @@ -77,7 +75,6 @@ setup: - is_false: _all.total.warmer - is_false: _all.total.query_cache - is_false: _all.total.fielddata - - is_false: _all.total.percolate - is_false: _all.total.completion - is_false: _all.total.segments - is_false: _all.total.translog @@ -100,7 +97,6 @@ setup: - is_false: _all.total.warmer - is_false: _all.total.query_cache - is_false: _all.total.fielddata - - is_false: _all.total.percolate - is_false: _all.total.completion - is_false: _all.total.segments - is_false: _all.total.translog @@ -124,7 +120,6 @@ setup: - is_false: _all.total.warmer - is_false: _all.total.query_cache - is_false: _all.total.fielddata - - is_false: _all.total.percolate - is_false: _all.total.completion - is_false: _all.total.segments - is_false: _all.total.translog diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/16_existing_doc.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/16_existing_doc.yaml index 2430fe63a024..c6f12131f356 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/16_existing_doc.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/16_existing_doc.yaml @@ -108,5 +108,5 @@ term: tag: tag1 - - match: {'matches': [{_index: percolator_index, _id: test_percolator}]} + - match: {'matches': [{_index: percolator_index, _id: test_percolator, _score: 1.0}]} diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 2c8a4c7e4581..891353ef589a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -41,6 +41,7 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.object.ObjectMapper; +import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; @@ -78,6 +79,7 @@ public class TestSearchContext extends SearchContext { final IndexService indexService; final IndexFieldDataService indexFieldDataService; final BitsetFilterCache fixedBitSetFilterCache; + final PercolatorQueryCache percolatorQueryCache; final ThreadPool threadPool; final Map, Collector> queryCollectors = new HashMap<>(); final IndexShard indexShard; @@ -105,6 +107,7 @@ public class TestSearchContext extends SearchContext { this.indexService = indexService; this.indexFieldDataService = indexService.fieldData(); this.fixedBitSetFilterCache = indexService.cache().bitsetFilterCache(); + this.percolatorQueryCache = indexService.cache().getPercolatorQueryCache(); this.threadPool = threadPool; this.indexShard = indexService.getShardOrNull(0); this.scriptService = scriptService; @@ -119,6 +122,7 @@ public class TestSearchContext extends SearchContext { this.indexFieldDataService = null; this.threadPool = null; this.fixedBitSetFilterCache = null; + this.percolatorQueryCache = null; this.indexShard = null; scriptService = null; this.queryShardContext = queryShardContext; @@ -330,6 +334,11 @@ public class TestSearchContext extends SearchContext { return indexFieldDataService; } + @Override + public PercolatorQueryCache percolatorQueryCache() { + return percolatorQueryCache; + } + @Override public long timeoutInMillis() { return 0; From 4335997017d3a7862757bc19dd817f86f5b34d9b Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Mon, 21 Mar 2016 11:04:31 +0000 Subject: [PATCH 317/320] Aggregations: Fixes the defaults for `keyed` in the percentiles aggregations During the aggregation refactoring the default value for `keyed` in the `percentiles` and `percentile_ranks` aggregation was inadvertently changed from `true` to `false`. This change reverts the defaults to the old (correct) value --- .../metrics/percentiles/PercentileRanksAggregatorBuilder.java | 2 +- .../metrics/percentiles/PercentilesAggregatorBuilder.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregatorBuilder.java index cd43777959ec..de66f68103fe 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregatorBuilder.java @@ -48,7 +48,7 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly Date: Thu, 17 Mar 2016 22:42:20 +0100 Subject: [PATCH 318/320] Remove ClusterService interface, in favor of it's only production instance #17183 We current have a ClusterService interface, implemented by InternalClusterService and a couple of test classes. Since the decoupling of the transport service and the cluster service, one can construct a ClusterService fairly easily, so we don't need this extra indirection. Closes #17183 --- .../resources/checkstyle_suppressions.xml | 1 - .../health/TransportClusterHealthAction.java | 2 +- .../TransportNodesHotThreadsAction.java | 2 +- .../node/info/TransportNodesInfoAction.java | 2 +- .../liveness/TransportLivenessAction.java | 2 +- .../node/stats/TransportNodesStatsAction.java | 2 +- .../cancel/TransportCancelTasksAction.java | 2 +- .../tasks/list/TransportListTasksAction.java | 2 +- .../TransportDeleteRepositoryAction.java | 2 +- .../get/TransportGetRepositoriesAction.java | 2 +- .../put/TransportPutRepositoryAction.java | 2 +- .../TransportVerifyRepositoryAction.java | 2 +- .../TransportClusterRerouteAction.java | 2 +- .../TransportClusterUpdateSettingsAction.java | 2 +- .../TransportClusterSearchShardsAction.java | 2 +- .../create/TransportCreateSnapshotAction.java | 2 +- .../delete/TransportDeleteSnapshotAction.java | 2 +- .../get/TransportGetSnapshotsAction.java | 2 +- .../TransportRestoreSnapshotAction.java | 2 +- .../status/TransportNodesSnapshotsStatus.java | 2 +- .../TransportSnapshotsStatusAction.java | 2 +- .../state/TransportClusterStateAction.java | 2 +- .../stats/TransportClusterStatsAction.java | 2 +- .../TransportPendingClusterTasksAction.java | 2 +- .../alias/TransportIndicesAliasesAction.java | 2 +- .../exists/TransportAliasesExistAction.java | 2 +- .../alias/get/TransportGetAliasesAction.java | 2 +- .../analyze/TransportAnalyzeAction.java | 2 +- .../TransportClearIndicesCacheAction.java | 2 +- .../close/TransportCloseIndexAction.java | 2 +- .../create/TransportCreateIndexAction.java | 2 +- .../delete/TransportDeleteIndexAction.java | 2 +- .../indices/TransportIndicesExistsAction.java | 2 +- .../types/TransportTypesExistsAction.java | 2 +- .../indices/flush/TransportFlushAction.java | 2 +- .../flush/TransportShardFlushAction.java | 3 +- .../forcemerge/TransportForceMergeAction.java | 2 +- .../indices/get/TransportGetIndexAction.java | 2 +- .../get/TransportGetFieldMappingsAction.java | 2 +- .../TransportGetFieldMappingsIndexAction.java | 2 +- .../get/TransportGetMappingsAction.java | 2 +- .../put/TransportPutMappingAction.java | 2 +- .../open/TransportOpenIndexAction.java | 2 +- .../recovery/TransportRecoveryAction.java | 2 +- .../refresh/TransportRefreshAction.java | 2 +- .../refresh/TransportShardRefreshAction.java | 3 +- .../TransportIndicesSegmentsAction.java | 2 +- .../get/TransportGetSettingsAction.java | 2 +- .../put/TransportUpdateSettingsAction.java | 2 +- .../TransportIndicesShardStoresAction.java | 2 +- .../stats/TransportIndicesStatsAction.java | 2 +- .../TransportDeleteIndexTemplateAction.java | 2 +- .../get/TransportGetIndexTemplatesAction.java | 2 +- .../put/TransportPutIndexTemplateAction.java | 2 +- .../get/TransportUpgradeStatusAction.java | 2 +- .../upgrade/post/TransportUpgradeAction.java | 2 +- .../post/TransportUpgradeSettingsAction.java | 2 +- .../query/TransportValidateQueryAction.java | 2 +- .../action/bulk/TransportBulkAction.java | 2 +- .../action/bulk/TransportShardBulkAction.java | 2 +- .../action/delete/TransportDeleteAction.java | 3 +- .../explain/TransportExplainAction.java | 2 +- .../TransportFieldStatsTransportAction.java | 2 +- .../action/get/TransportGetAction.java | 2 +- .../action/get/TransportMultiGetAction.java | 2 +- .../get/TransportShardMultiGetAction.java | 2 +- .../action/index/TransportIndexAction.java | 2 +- .../ingest/DeletePipelineTransportAction.java | 2 +- .../ingest/GetPipelineTransportAction.java | 2 +- .../ingest/IngestProxyActionFilter.java | 2 +- .../ingest/PutPipelineTransportAction.java | 2 +- .../search/AbstractSearchAsyncAction.java | 2 +- .../SearchDfsQueryAndFetchAsyncAction.java | 2 +- .../SearchDfsQueryThenFetchAsyncAction.java | 2 +- .../SearchQueryAndFetchAsyncAction.java | 2 +- .../SearchQueryThenFetchAsyncAction.java | 2 +- .../SearchScrollQueryAndFetchAsyncAction.java | 2 +- ...SearchScrollQueryThenFetchAsyncAction.java | 2 +- .../search/TransportClearScrollAction.java | 2 +- .../search/TransportMultiSearchAction.java | 2 +- .../action/search/TransportSearchAction.java | 2 +- .../search/TransportSearchScrollAction.java | 2 +- .../suggest/TransportSuggestAction.java | 2 +- .../broadcast/TransportBroadcastAction.java | 2 +- .../node/TransportBroadcastByNodeAction.java | 2 +- .../master/TransportMasterNodeAction.java | 2 +- .../master/TransportMasterNodeReadAction.java | 2 +- .../info/TransportClusterInfoAction.java | 2 +- .../support/nodes/TransportNodesAction.java | 2 +- .../TransportBroadcastReplicationAction.java | 3 +- .../TransportReplicationAction.java | 3 +- ...ransportInstanceSingleOperationAction.java | 2 +- .../shard/TransportSingleShardAction.java | 2 +- .../support/tasks/TransportTasksAction.java | 2 +- .../TransportMultiTermVectorsAction.java | 3 +- .../TransportShardMultiTermsVectorAction.java | 2 +- .../TransportTermVectorsAction.java | 2 +- .../action/update/TransportUpdateAction.java | 2 +- .../elasticsearch/cluster/ClusterModule.java | 4 +- .../elasticsearch/cluster/ClusterService.java | 156 --------- .../elasticsearch/cluster/ClusterState.java | 4 +- .../cluster/ClusterStateObserver.java | 3 +- .../cluster/InternalClusterInfoService.java | 1 + .../action/shard/ShardStateAction.java | 3 +- .../cluster/metadata/MetaData.java | 4 +- .../metadata/MetaDataCreateIndexService.java | 2 +- .../metadata/MetaDataDeleteIndexService.java | 5 +- .../metadata/MetaDataIndexAliasesService.java | 2 +- .../metadata/MetaDataIndexStateService.java | 3 +- .../MetaDataIndexTemplateService.java | 2 +- .../metadata/MetaDataMappingService.java | 3 +- .../MetaDataUpdateSettingsService.java | 2 +- .../cluster/routing/RoutingService.java | 2 +- ...lusterService.java => ClusterService.java} | 185 +++++++---- .../common/settings/ClusterSettings.java | 4 +- .../discovery/local/LocalDiscovery.java | 2 +- .../discovery/zen/NodeJoinController.java | 5 +- .../discovery/zen/ZenDiscovery.java | 9 +- .../zen/fd/MasterFaultDetection.java | 2 +- .../zen/membership/MembershipAction.java | 2 +- .../org/elasticsearch/gateway/Gateway.java | 2 +- .../gateway/GatewayAllocator.java | 2 +- .../elasticsearch/gateway/GatewayService.java | 2 +- .../gateway/LocalAllocateDangledIndices.java | 2 +- .../TransportNodesListGatewayMetaState.java | 2 +- ...ransportNodesListGatewayStartedShards.java | 2 +- .../BlobStoreIndexShardRepository.java | 2 +- .../elasticsearch/indices/IndicesService.java | 3 +- .../cluster/IndicesClusterStateService.java | 2 +- .../indices/flush/SyncedFlushService.java | 2 +- .../indices/recovery/RecoverySource.java | 2 +- .../recovery/RecoveryTargetService.java | 2 +- .../indices/store/IndicesStore.java | 2 +- .../TransportNodesListShardStoreMetaData.java | 2 +- .../indices/ttl/IndicesTTLService.java | 2 +- .../elasticsearch/ingest/PipelineStore.java | 4 +- .../java/org/elasticsearch/node/Node.java | 7 +- .../node/service/NodeService.java | 2 +- .../repositories/RepositoriesService.java | 2 +- .../VerifyNodeRepositoryAction.java | 2 +- .../uri/URLIndexShardRepository.java | 2 +- .../rest/action/main/RestMainAction.java | 2 +- .../elasticsearch/search/SearchService.java | 3 +- .../snapshots/RestoreService.java | 2 +- .../snapshots/SnapshotShardsService.java | 2 +- .../snapshots/SnapshotsService.java | 2 +- .../org/elasticsearch/tribe/TribeService.java | 3 +- .../node/tasks/CancellableTasksTests.java | 5 +- .../node/tasks/TaskManagerTestCase.java | 27 +- .../admin/cluster/node/tasks/TasksIT.java | 4 +- .../cluster/node/tasks/TestTaskPlugin.java | 2 +- .../node/tasks/TransportTasksActionTests.java | 3 +- .../bulk/TransportBulkActionTookTests.java | 14 +- .../ingest/IngestProxyActionFilterTests.java | 2 +- .../TransportBroadcastByNodeActionTests.java | 31 +- .../TransportMasterNodeActionTests.java | 43 ++- .../nodes/TransportNodesActionTests.java | 20 +- .../BroadcastReplicationTests.java | 34 +- .../TransportReplicationActionTests.java | 89 +++--- ...ortInstanceSingleOperationActionTests.java | 34 +- .../cluster/ClusterInfoServiceIT.java | 2 +- .../cluster/MinimumMasterNodesIT.java | 1 + .../action/shard/ShardStateActionTests.java | 47 +-- .../cluster/routing/RoutingServiceTests.java | 16 +- .../{ => service}/ClusterServiceIT.java | 25 +- .../cluster/service/ClusterServiceTests.java | 56 ++-- .../cluster/service/ClusterServiceUtils.java | 102 ++++++ .../DiscoveryWithServiceDisruptionsIT.java | 2 +- .../discovery/ZenFaultDetectionTests.java | 22 +- .../zen/NodeJoinControllerTests.java | 72 ++++- .../discovery/zen/ZenDiscoveryIT.java | 2 +- .../gateway/GatewayServiceTests.java | 10 +- .../gateway/MetaDataWriteDataNodesIT.java | 2 - .../mapper/DynamicMappingDisabledTests.java | 31 +- .../index/query/AbstractQueryTestCase.java | 69 ++-- .../index/shard/IndexShardTests.java | 4 +- .../indices/IndicesServiceTests.java | 3 +- .../flush/SyncedFlushSingleNodeTests.java | 2 +- .../indices/recovery/IndexRecoveryIT.java | 2 +- .../indices/state/RareClusterStateIT.java | 2 +- .../store/IndicesStoreIntegrationIT.java | 2 +- .../indices/store/IndicesStoreTests.java | 34 +- .../nodesinfo/SimpleNodesInfoIT.java | 2 +- .../elasticsearch/recovery/RelocationIT.java | 2 +- .../aggregations/AggregatorParsingTests.java | 187 +++++------ .../aggregations/BaseAggregationTestCase.java | 40 +-- .../BasePipelineAggregationTestCase.java | 38 ++- .../builder/SearchSourceBuilderTests.java | 102 +++--- .../AbstractSnapshotIntegTestCase.java | 2 +- .../DedicatedClusterSnapshotRestoreIT.java | 2 +- .../SharedClusterSnapshotRestoreIT.java | 2 +- .../snapshots/mockstore/MockRepository.java | 2 +- .../messy/tests/TemplateQueryParserTests.java | 2 +- .../AbstractBaseReindexRestHandler.java | 6 +- .../index/reindex/RestReindexAction.java | 18 +- .../reindex/RestUpdateByQueryAction.java | 14 +- .../index/reindex/TransportReindexAction.java | 2 +- .../MockInternalClusterInfoService.java | 1 + .../search/MockSearchService.java | 2 +- .../elasticsearch/test/ESIntegTestCase.java | 12 +- .../test/InternalTestCluster.java | 3 +- .../test/cluster/NoopClusterService.java | 184 ----------- .../test/cluster/TestClusterService.java | 301 ------------------ .../BlockClusterStateProcessing.java | 2 +- .../SlowClusterStateProcessing.java | 2 +- 205 files changed, 1045 insertions(+), 1369 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/cluster/ClusterService.java rename core/src/main/java/org/elasticsearch/cluster/service/{InternalClusterService.java => ClusterService.java} (85%) rename core/src/test/java/org/elasticsearch/cluster/{ => service}/ClusterServiceIT.java (97%) create mode 100644 core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceUtils.java delete mode 100644 test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java delete mode 100644 test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 287e14b347be..46ddcb0ad0e0 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -273,7 +273,6 @@ - diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java index 39d6a8daeb04..069f0ebe1b8e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStateUpdateTask; @@ -32,6 +31,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java index c743a1d2a91f..d53f651da453 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java @@ -24,8 +24,8 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java index b14450f9eb19..f52729faa4f2 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java @@ -23,8 +23,8 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java index 65913bc4b287..8b29b9379ac5 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.cluster.node.liveness; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java index a85540904730..8ba3d00558b3 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java @@ -23,8 +23,8 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java index 874f230587d8..336f4c845961 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java @@ -26,10 +26,10 @@ import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java index d867d9d85557..d5175e263e74 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java @@ -26,8 +26,8 @@ import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/TransportDeleteRepositoryAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/TransportDeleteRepositoryAction.java index 79e51f9a46ef..a17d2aac8929 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/TransportDeleteRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/TransportDeleteRepositoryAction.java @@ -22,12 +22,12 @@ package org.elasticsearch.action.admin.cluster.repositories.delete; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.RepositoriesService; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java index 39d9cacbda39..490d20f086cc 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java @@ -22,7 +22,6 @@ package org.elasticsearch.action.admin.cluster.repositories.get; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -30,6 +29,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoriesMetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java index efc45f16cbd9..d1639001352f 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java @@ -22,12 +22,12 @@ package org.elasticsearch.action.admin.cluster.repositories.put; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.RepositoriesService; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/TransportVerifyRepositoryAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/TransportVerifyRepositoryAction.java index 8b1d9816004f..2c75335dcaa2 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/TransportVerifyRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/TransportVerifyRepositoryAction.java @@ -23,11 +23,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.RepositoriesService; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java index d7ec84fb7a5e..e6116dbfbc41 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -31,6 +30,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingExplanations; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java index 75f94921e61b..60a0e7a8046c 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -33,6 +32,7 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.inject.Inject; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java index f8868e94bf0d..8b26fd6c04f5 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java @@ -22,7 +22,6 @@ package org.elasticsearch.action.admin.cluster.shards; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -31,6 +30,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java index 3bee1a74e1d0..2654ac0c2691 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java @@ -22,12 +22,12 @@ package org.elasticsearch.action.admin.cluster.snapshots.create; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.SnapshotId; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.snapshots.SnapshotInfo; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java index 4e3c777b7d10..423e38cd2588 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java @@ -22,12 +22,12 @@ package org.elasticsearch.action.admin.cluster.snapshots.delete; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.SnapshotId; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.snapshots.SnapshotsService; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 478146de357b..0198102a200b 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -22,12 +22,12 @@ package org.elasticsearch.action.admin.cluster.snapshots.get; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.SnapshotId; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java index ae802342824a..cbbc195370c4 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java @@ -22,12 +22,12 @@ package org.elasticsearch.action.admin.cluster.snapshots.restore; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.SnapshotId; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.snapshots.RestoreInfo; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java index 45c3f89919ea..0bc13db9644b 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java @@ -28,10 +28,10 @@ import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index fc19dd986de5..efa156eaa0cb 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -24,13 +24,13 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.SnapshotId; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java index 1dd88033f827..4bd826237c5d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -32,6 +31,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData.Custom; import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index 0055d31748b6..6d1614eb485a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -28,10 +28,10 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.health.ClusterStateHealth; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java index 105d596bad8d..370b668f6595 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java @@ -22,11 +22,11 @@ package org.elasticsearch.action.admin.cluster.tasks; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.PendingClusterTask; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java index 7cb7b225895e..218b84e68aec 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -31,6 +30,7 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.AliasAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataIndexAliasesService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.action.admin.indices.alias.delete.AliasesNotFoundException; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/TransportAliasesExistAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/TransportAliasesExistAction.java index ed14c51d442c..8ca09dbb67ed 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/TransportAliasesExistAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/TransportAliasesExistAction.java @@ -22,11 +22,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java index ae9916810d37..061f916c2e05 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java @@ -21,12 +21,12 @@ package org.elasticsearch.action.admin.indices.alias.get; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index 0541ac315058..0edae5eb1bcf 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -32,11 +32,11 @@ import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java index 7bc9f50252ae..59cd95044ccd 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java @@ -22,13 +22,13 @@ package org.elasticsearch.action.admin.indices.cache.clear; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java index 7f77424e84c2..4fbfc7e72abb 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java @@ -23,13 +23,13 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java index 98a002cc2fb5..7b47a46a236c 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java @@ -22,13 +22,13 @@ package org.elasticsearch.action.admin.indices.create; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.indices.IndexAlreadyExistsException; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java index c5e504b744f0..489001d9b89f 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java @@ -23,12 +23,12 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataDeleteIndexService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/TransportIndicesExistsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/TransportIndicesExistsAction.java index 08edd16e671d..c451e50b77cf 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/TransportIndicesExistsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/TransportIndicesExistsAction.java @@ -23,11 +23,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TransportTypesExistsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TransportTypesExistsAction.java index f76b3eb213a9..e1cf5be1acaf 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TransportTypesExistsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TransportTypesExistsAction.java @@ -21,12 +21,12 @@ package org.elasticsearch.action.admin.indices.exists.types; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java index d2a8f1abcbf9..8bb124d8fc49 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java @@ -23,8 +23,8 @@ import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.replication.TransportBroadcastReplicationAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.ShardId; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java index 5df7e9ad69b8..3c22209813f6 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java @@ -22,12 +22,11 @@ package org.elasticsearch.action.admin.indices.flush; import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.replication.TransportReplicationAction; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java index 0119b1693aaf..18ac88e1b305 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java @@ -22,13 +22,13 @@ package org.elasticsearch.action.admin.indices.forcemerge; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java index eb4dafd7fbc6..097444bcb68a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.info.TransportClusterInfoAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -32,6 +31,7 @@ import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java index c726090e4680..d9031807ae8e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java @@ -22,9 +22,9 @@ package org.elasticsearch.action.admin.indices.mapping.get; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java index 3d11df97dee2..e886af25fbba 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java @@ -23,12 +23,12 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java index cec337dd54d6..293f5a0e6771 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java @@ -22,12 +22,12 @@ package org.elasticsearch.action.admin.indices.mapping.get; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.info.TransportClusterInfoAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java index 0d8accd77556..465353501543 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java @@ -22,13 +22,13 @@ package org.elasticsearch.action.admin.indices.mapping.put; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataMappingService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java index b354c8bbfce9..50e79036694a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java @@ -23,13 +23,13 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java index 8590fc210a06..01f37527374f 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java @@ -22,13 +22,13 @@ package org.elasticsearch.action.admin.indices.recovery; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java index bd879e0eaa98..34bf39daabd1 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java @@ -24,8 +24,8 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.replication.BasicReplicationRequest; import org.elasticsearch.action.support.replication.TransportBroadcastReplicationAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.ShardId; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java index f5149ed8b23c..e3155614337a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java @@ -23,12 +23,11 @@ import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.replication.BasicReplicationRequest; import org.elasticsearch.action.support.replication.TransportReplicationAction; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java index f700a198e2c4..8df46719c7b7 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java @@ -22,13 +22,13 @@ package org.elasticsearch.action.admin.indices.segments; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java index 5f670dcd4fb7..f09d3fb559cb 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java @@ -22,12 +22,12 @@ package org.elasticsearch.action.admin.indices.settings.get; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java index dd7462ec20d2..36fa1895af45 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java @@ -22,7 +22,6 @@ package org.elasticsearch.action.admin.indices.settings.put; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -30,6 +29,7 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataUpdateSettingsService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java index c040085a89e3..bf4dbd3359ed 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -38,6 +37,7 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java index 6ab9bc074b32..8c12dfa9fda0 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java @@ -22,13 +22,13 @@ package org.elasticsearch.action.admin.indices.stats; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java index 9eab0f80e50b..0763f2327112 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java @@ -21,12 +21,12 @@ package org.elasticsearch.action.admin.indices.template.delete; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java index a43397e48dc4..672ca1a90803 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java @@ -22,12 +22,12 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java index c5fed57d0136..02aad2f7ff49 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java @@ -21,13 +21,13 @@ package org.elasticsearch.action.admin.indices.template.put; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java index 6b37f56ed4a7..cf288e0cc6fe 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java @@ -23,13 +23,13 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java index f3cf2da9fdd8..cdf6f585e531 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.PrimaryMissingActionException; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -35,6 +34,7 @@ import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java index 527adeaa3e5e..403456cb903d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java @@ -22,13 +22,13 @@ package org.elasticsearch.action.admin.indices.upgrade.post; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataUpdateSettingsService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index fe02a1541a09..320f06966059 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -29,13 +29,13 @@ import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cache.recycler.PageCacheRecycler; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.inject.Inject; diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 135147d824c4..9d9b36ba072f 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -37,13 +37,13 @@ import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.update.TransportUpdateAction; import org.elasticsearch.action.update.UpdateRequest; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 463f4ac23ac4..76402df8aa47 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -35,12 +35,12 @@ import org.elasticsearch.action.support.replication.TransportReplicationAction; import org.elasticsearch.action.update.UpdateHelper; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; diff --git a/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java b/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java index ec7a04ccc3f2..783fab08bae3 100644 --- a/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java +++ b/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java @@ -28,13 +28,12 @@ import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.support.replication.TransportReplicationAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/core/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java index 5969e20065ef..284e31406e25 100644 --- a/core/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java +++ b/core/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java @@ -27,10 +27,10 @@ import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cache.recycler.PageCacheRecycler; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; diff --git a/core/src/main/java/org/elasticsearch/action/fieldstats/TransportFieldStatsTransportAction.java b/core/src/main/java/org/elasticsearch/action/fieldstats/TransportFieldStatsTransportAction.java index 42360c5e0eb5..de56a0f5c2ec 100644 --- a/core/src/main/java/org/elasticsearch/action/fieldstats/TransportFieldStatsTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/fieldstats/TransportFieldStatsTransportAction.java @@ -28,13 +28,13 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; diff --git a/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index 2d6bafc96230..b84493c4dcaa 100644 --- a/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -22,12 +22,12 @@ package org.elasticsearch.action.get; import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.Preference; import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; diff --git a/core/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java b/core/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java index 62d21c283b8a..1858ac8ba717 100644 --- a/core/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java +++ b/core/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java @@ -22,10 +22,10 @@ package org.elasticsearch.action.get; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; diff --git a/core/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/core/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index 1f07a5e79e66..05b672105f8f 100644 --- a/core/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/core/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -23,10 +23,10 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; diff --git a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java index 0d2e7c2e0743..9be8e4cef895 100644 --- a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.support.replication.TransportReplicationAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.action.shard.ShardStateAction; @@ -36,6 +35,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java index 6378eb5757b8..74ce894b0532 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java @@ -22,11 +22,11 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.PipelineStore; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java index e762d0b8d332..8bac5c7b8043 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java @@ -22,11 +22,11 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.PipelineStore; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java index 39a4b1fa4e8c..62716c6dc0da 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java @@ -29,9 +29,9 @@ import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilterChain; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java index aafd9ee75a43..e1a34413e2cf 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java @@ -26,12 +26,12 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.PipelineStore; diff --git a/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index e928e681bbf9..732e9098ee7e 100644 --- a/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -25,7 +25,6 @@ import org.apache.lucene.search.TopDocs; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.support.TransportActions; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -34,6 +33,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.util.concurrent.AtomicArray; diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryAndFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryAndFetchAsyncAction.java index 56d0fedd40cf..f7cb72b22e96 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryAndFetchAsyncAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryAndFetchAsyncAction.java @@ -21,9 +21,9 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.action.SearchTransportService; diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java index f2dcefa7554c..c5f320f1b333 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java @@ -23,9 +23,9 @@ import com.carrotsearch.hppc.IntArrayList; import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.SearchShardTarget; diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchQueryAndFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchQueryAndFetchAsyncAction.java index dcbf9b5091f1..1b3388477627 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchQueryAndFetchAsyncAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchQueryAndFetchAsyncAction.java @@ -21,9 +21,9 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.search.action.SearchTransportService; import org.elasticsearch.search.controller.SearchPhaseController; diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java index e15b9da8acbb..3feb40411f93 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java @@ -23,9 +23,9 @@ import com.carrotsearch.hppc.IntArrayList; import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.SearchShardTarget; diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java index b5b95dc5cbed..4e0ee3ff5e58 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java @@ -21,9 +21,9 @@ package org.elasticsearch.action.search; import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.action.SearchTransportService; diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java index 864f17eee2ca..8e822302d2fa 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java @@ -22,9 +22,9 @@ package org.elasticsearch.action.search; import com.carrotsearch.hppc.IntArrayList; import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.action.SearchTransportService; diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java index 95f0796ba4fc..fb021fdd9f95 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java @@ -22,11 +22,11 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.CountDown; diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java index 437544275989..c0428cd531fd 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java @@ -22,10 +22,10 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 0b53008ddf54..8fe99c1e5901 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -22,9 +22,9 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java index 03009cc01028..3bcadda1725c 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java @@ -22,8 +22,8 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.action.SearchTransportService; diff --git a/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java b/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java index af6890e045ec..95bf111ed718 100644 --- a/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java +++ b/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java @@ -25,13 +25,13 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index f2d7e306c387..182d922fc399 100644 --- a/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.TransportActions; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -33,6 +32,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index 37b101e19352..3ed3e8ffb7a9 100644 --- a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -33,7 +33,6 @@ import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -41,6 +40,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; diff --git a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index 4b19c3e6ebac..1a7092c3a97a 100644 --- a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.ThreadedActionListener; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.MasterNodeChangePredicate; @@ -34,6 +33,7 @@ import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.Discovery; diff --git a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java index f53355f24e34..5a6ddcfb34ef 100644 --- a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java @@ -21,8 +21,8 @@ package org.elasticsearch.action.support.master; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java b/core/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java index 829f90bfc511..66b9fce5d711 100644 --- a/core/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java @@ -22,9 +22,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; diff --git a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 7e2702afd8a7..9c021efbe405 100644 --- a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -26,11 +26,11 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ChildTaskRequest; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java index 8ace072fa88a..25de821e2271 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.support.replication; import com.carrotsearch.hppc.cursors.IntObjectCursor; - import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ReplicationResponse; @@ -32,11 +31,11 @@ import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.index.shard.ShardId; diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 28d21c6dbd9c..1ddddbf88886 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -28,10 +28,8 @@ import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.TransportActions; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; -import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -43,6 +41,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java index dd1b4260a3c0..fba7d80299bf 100644 --- a/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -34,6 +33,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; diff --git a/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java index 649ef1e85755..0c7f0627c662 100644 --- a/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.TransportActions; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -34,6 +33,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java index 53c0d8519977..a14c6e00e14e 100644 --- a/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java @@ -28,11 +28,11 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ChildTaskRequest; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java b/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java index 4f617876e202..d71958cefde4 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java @@ -20,13 +20,12 @@ package org.elasticsearch.action.termvectors; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.DocumentRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java b/core/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java index 94b0e745a8e8..197ab4260b2d 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java @@ -23,10 +23,10 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java b/core/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java index 5b0b9fd27262..da8754542ab9 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java @@ -23,10 +23,10 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; diff --git a/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index 75feeb8fbca4..0363ef8fe431 100644 --- a/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -35,13 +35,13 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.instance.TransportInstanceSingleOperationAction; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.PlainShardIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 42290e71779e..47dd2ce9ae6c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -53,7 +53,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationD import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; -import org.elasticsearch.cluster.service.InternalClusterService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; @@ -137,7 +137,7 @@ public class ClusterModule extends AbstractModule { bind(GatewayAllocator.class).asEagerSingleton(); bind(AllocationService.class).asEagerSingleton(); bind(DiscoveryNodeService.class).asEagerSingleton(); - bind(ClusterService.class).to(InternalClusterService.class).asEagerSingleton(); + bind(ClusterService.class).asEagerSingleton(); bind(NodeConnectionsService.class).asEagerSingleton(); bind(OperationRouting.class).asEagerSingleton(); bind(MetaDataCreateIndexService.class).asEagerSingleton(); diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterService.java b/core/src/main/java/org/elasticsearch/cluster/ClusterService.java deleted file mode 100644 index 10d547afc5c5..000000000000 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterService.java +++ /dev/null @@ -1,156 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cluster; - -import org.elasticsearch.cluster.block.ClusterBlock; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.OperationRouting; -import org.elasticsearch.cluster.service.PendingClusterTask; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.component.LifecycleComponent; -import org.elasticsearch.common.unit.TimeValue; - -import java.util.List; - -/** - * The cluster service allowing to both register for cluster state events ({@link ClusterStateListener}) - * and submit state update tasks ({@link ClusterStateUpdateTask}. - */ -public interface ClusterService extends LifecycleComponent { - - /** - * The local node. - */ - DiscoveryNode localNode(); - - /** - * The current state. - */ - ClusterState state(); - - /** - * Adds an initial block to be set on the first cluster state created. - */ - void addInitialStateBlock(ClusterBlock block) throws IllegalStateException; - - /** - * Remove an initial block to be set on the first cluster state created. - */ - void removeInitialStateBlock(ClusterBlock block) throws IllegalStateException; - - /** - * Remove an initial block to be set on the first cluster state created. - */ - void removeInitialStateBlock(int blockId) throws IllegalStateException; - - /** - * The operation routing. - */ - OperationRouting operationRouting(); - - /** - * Adds a priority listener for updated cluster states. - */ - void addFirst(ClusterStateListener listener); - - /** - * Adds last listener. - */ - void addLast(ClusterStateListener listener); - - /** - * Adds a listener for updated cluster states. - */ - void add(ClusterStateListener listener); - - /** - * Removes a listener for updated cluster states. - */ - void remove(ClusterStateListener listener); - - /** - * Add a listener for on/off local node master events - */ - void add(LocalNodeMasterListener listener); - - /** - * Remove the given listener for on/off local master events - */ - void remove(LocalNodeMasterListener listener); - - /** - * Adds a cluster state listener that will timeout after the provided timeout, - * and is executed after the clusterstate has been successfully applied ie. is - * in state {@link org.elasticsearch.cluster.ClusterState.ClusterStateStatus#APPLIED} - * NOTE: a {@code null} timeout means that the listener will never be removed - * automatically - */ - void add(@Nullable TimeValue timeout, TimeoutClusterStateListener listener); - - /** - * Submits a cluster state update task; submitted updates will be - * batched across the same instance of executor. The exact batching - * semantics depend on the underlying implementation but a rough - * guideline is that if the update task is submitted while there - * are pending update tasks for the same executor, these update - * tasks will all be executed on the executor in a single batch - * - * @param source the source of the cluster state update task - * @param task the state needed for the cluster state update task - * @param config the cluster state update task configuration - * @param executor the cluster state update task executor; tasks - * that share the same executor will be executed - * batches on this executor - * @param listener callback after the cluster state update task - * completes - * @param the type of the cluster state update task state - */ - void submitStateUpdateTask(final String source, final T task, - final ClusterStateTaskConfig config, - final ClusterStateTaskExecutor executor, - final ClusterStateTaskListener listener); - - /** - * Submits a cluster state update task; unlike {@link #submitStateUpdateTask(String, Object, ClusterStateTaskConfig, ClusterStateTaskExecutor, ClusterStateTaskListener)}, - * submitted updates will not be batched. - * - * @param source the source of the cluster state update task - * @param updateTask the full context for the cluster state update - * task - */ - void submitStateUpdateTask(final String source, final ClusterStateUpdateTask updateTask); - - /** - * Returns the tasks that are pending. - */ - List pendingTasks(); - - /** - * Returns the number of currently pending tasks. - */ - int numberOfPendingTasks(); - - /** - * Returns the maximum wait time for tasks in the queue - * - * @return A zero time value if the queue is empty, otherwise the time value oldest task waiting in the queue - */ - TimeValue getMaxTaskWaitTime(); -} diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java index 1b3ddcfebf9d..e6cc335a4780 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -37,7 +37,7 @@ import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.service.InternalClusterService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; @@ -68,7 +68,7 @@ import java.util.Set; * exception of the {@link RoutingNodes} structure, which is built on demand from the {@link RoutingTable}, * and cluster state {@link #status}, which is updated during cluster state publishing and applying * processing. The cluster state can be updated only on the master node. All updates are performed by on a - * single thread and controlled by the {@link InternalClusterService}. After every update the + * single thread and controlled by the {@link ClusterService}. After every update the * {@link Discovery#publish} method publishes new version of the cluster state to all other nodes in the * cluster. The actual publishing mechanism is delegated to the {@link Discovery#publish} method and depends on * the type of discovery. For example, for local discovery it is implemented by the {@link LocalDiscovery#publish} diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java b/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java index dd30a7116883..d79a00dc3fec 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.unit.TimeValue; @@ -44,7 +45,7 @@ public class ClusterStateObserver { } }; - private final ClusterService clusterService; + private final ClusterService clusterService; private final ThreadContext contextHolder; volatile TimeValue timeOutValue; diff --git a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index 896793f1bf35..9a9ee06ce19b 100644 --- a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -33,6 +33,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 3baf91a9dc83..68926368ddb6 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.cluster.action.shard; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStateTaskConfig; @@ -30,7 +29,6 @@ import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.MasterNodeChangePredicate; import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingService; @@ -38,6 +36,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractComponent; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index c19346cf74f6..db6871b06454 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -30,7 +30,7 @@ import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; -import org.elasticsearch.cluster.service.InternalClusterService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; @@ -738,7 +738,7 @@ public class MetaData implements Iterable, Diffable, Fr InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING.getKey(), InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING.getKey(), DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), - InternalClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.getKey())); + ClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.getKey())); /** As of 2.0 we require units for time and byte-sized settings. This methods adds default units to any cluster settings that don't * specify a unit. */ diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 177c46e5537f..e0db19cb5169 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -28,7 +28,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlock; @@ -39,6 +38,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.ValidationException; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java index 5492325b6510..5e6d35aacfe1 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.action.support.master.MasterNodeRequest; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction; @@ -28,19 +27,17 @@ import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.FutureUtils; -import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.threadpool.ThreadPool; -import java.util.HashSet; import java.util.Set; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicBoolean; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java index 1f0eaf0cda08..e39b86a16113 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java @@ -23,9 +23,9 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesClusterStateUpdateRequest; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java index e68b0be36b6f..71962d6356a3 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.close.CloseIndexClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexClusterStateUpdateRequest; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlock; @@ -32,12 +31,12 @@ import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.RestoreService; import org.elasticsearch.snapshots.SnapshotsService; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java index da2fc064dc47..1206185a6091 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java @@ -21,9 +21,9 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.support.master.MasterNodeRequest; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.ValidationException; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 4857fcc27b9d..cafdc4581a18 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -23,12 +23,12 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest; import org.elasticsearch.cluster.AckedClusterStateTaskListener; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskConfig; import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.collect.Tuple; @@ -38,7 +38,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.mapper.DocumentMapper; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index b27647344691..1c57f446074f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsCluster import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeSettingsClusterStateUpdateRequest; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; @@ -34,6 +33,7 @@ import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java index c683f0200dcc..90565a6569df 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java @@ -20,12 +20,12 @@ package org.elasticsearch.cluster.routing; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/ClusterService.java similarity index 85% rename from core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java rename to core/src/main/java/org/elasticsearch/cluster/service/ClusterService.java index c57cfd5a57cc..fa9b3492685c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/ClusterService.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster.service; import org.elasticsearch.cluster.AckedClusterStateTaskListener; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState.Builder; import org.elasticsearch.cluster.ClusterStateListener; @@ -88,11 +87,11 @@ import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadF /** * */ -public class InternalClusterService extends AbstractLifecycleComponent implements ClusterService { +public class ClusterService extends AbstractLifecycleComponent { public static final Setting CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING = - Setting.positiveTimeSetting("cluster.service.slow_task_logging_threshold", TimeValue.timeValueSeconds(30), - Property.Dynamic, Property.NodeScope); + Setting.positiveTimeSetting("cluster.service.slow_task_logging_threshold", TimeValue.timeValueSeconds(30), + Property.Dynamic, Property.NodeScope); public static final String UPDATE_THREAD_NAME = "clusterService#updateTask"; private final ThreadPool threadPool; @@ -116,7 +115,8 @@ public class InternalClusterService extends AbstractLifecycleComponent> updateTasksPerExecutor = new HashMap<>(); // TODO this is rather frequently changing I guess a Synced Set would be better here and a dedicated remove API private final Collection postAppliedListeners = new CopyOnWriteArrayList<>(); - private final Iterable preAppliedListeners = Iterables.concat(priorityClusterStateListeners, clusterStateListeners, lastClusterStateListeners); + private final Iterable preAppliedListeners = Iterables.concat(priorityClusterStateListeners, + clusterStateListeners, lastClusterStateListeners); private final LocalNodeMasterListeners localNodeMasterListeners; @@ -129,8 +129,8 @@ public class InternalClusterService extends AbstractLifecycleComponent the type of the cluster state update task state + */ public void submitStateUpdateTask(final String source, final T task, final ClusterStateTaskConfig config, final ClusterStateTaskExecutor executor, @@ -317,9 +370,9 @@ public class InternalClusterService extends AbstractLifecycleComponent void innerSubmitStateUpdateTask(final String source, final T task, - final ClusterStateTaskConfig config, - final ClusterStateTaskExecutor executor, - final SafeClusterStateTaskListener listener) { + final ClusterStateTaskConfig config, + final ClusterStateTaskExecutor executor, + final SafeClusterStateTaskListener listener) { if (!lifecycle.started()) { return; } @@ -335,7 +388,8 @@ public class InternalClusterService extends AbstractLifecycleComponent pendingTasks() { PrioritizedEsThreadPoolExecutor.Pending[] pendings = updateTasksExecutor.getPending(); List pendingClusterTasks = new ArrayList<>(pendings.length); @@ -369,24 +425,32 @@ public class InternalClusterService extends AbstractLifecycleComponentbuilder().failures(toExecute.stream().map(updateTask -> updateTask.task)::iterator, e).build(previousClusterState); + batchResult = ClusterStateTaskExecutor.BatchResult.builder() + .failures(toExecute.stream().map(updateTask -> updateTask.task)::iterator, e) + .build(previousClusterState); } assert batchResult.executionResults != null; assert batchResult.executionResults.size() == toExecute.size() - : String.format(Locale.ROOT, "expected [%d] task result%s but was [%d]", toExecute.size(), toExecute.size() == 1 ? "" : "s", batchResult.executionResults.size()); + : String.format(Locale.ROOT, "expected [%d] task result%s but was [%d]", toExecute.size(), + toExecute.size() == 1 ? "" : "s", batchResult.executionResults.size()); boolean assertsEnabled = false; assert (assertsEnabled = true); if (assertsEnabled) { @@ -469,11 +537,11 @@ public class InternalClusterService extends AbstractLifecycleComponent proccessedListeners.add(updateTask), - ex -> { - logger.debug("cluster state update task [{}] failed", ex, updateTask.source); - updateTask.listener.onFailure(updateTask.source, ex); - } + () -> proccessedListeners.add(updateTask), + ex -> { + logger.debug("cluster state update task [{}] failed", ex, updateTask.source); + updateTask.listener.onFailure(updateTask.source, ex); + } ); } @@ -497,7 +565,8 @@ public class InternalClusterService extends AbstractLifecycleComponent executor, ClusterStateTaskListener listener) { + UpdateTask(String source, T task, ClusterStateTaskConfig config, ClusterStateTaskExecutor executor, + ClusterStateTaskListener listener) { super(config.priority(), source); this.task = task; this.config = config; @@ -738,7 +811,8 @@ public class InternalClusterService extends AbstractLifecycleComponent slowTaskLoggingThreshold.getMillis()) { - logger.warn("cluster state update task [{}] took [{}] above the warn threshold of {}", source, executionTime, slowTaskLoggingThreshold); + logger.warn("cluster state update task [{}] took [{}] above the warn threshold of {}", source, executionTime, + slowTaskLoggingThreshold); } } @@ -873,7 +947,8 @@ public class InternalClusterService extends AbstractLifecycleComponent ackTimeoutCallback; private Throwable lastFailure; - AckCountDownListener(AckedClusterStateTaskListener ackedTaskListener, long clusterStateVersion, DiscoveryNodes nodes, ThreadPool threadPool) { + AckCountDownListener(AckedClusterStateTaskListener ackedTaskListener, long clusterStateVersion, DiscoveryNodes nodes, + ThreadPool threadPool) { this.ackedTaskListener = ackedTaskListener; this.clusterStateVersion = clusterStateVersion; this.nodes = nodes; diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index af3504cd8aa2..14e795866177 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -43,7 +43,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDeci import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; -import org.elasticsearch.cluster.service.InternalClusterService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; @@ -255,7 +255,7 @@ public final class ClusterSettings extends AbstractScopedSettings { HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, - InternalClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, SearchService.DEFAULT_SEARCH_TIMEOUT_SETTING, ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING, TransportService.TRACE_LOG_EXCLUDE_SETTING, diff --git a/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java index 0462d6a8d8dd..cf697871d357 100644 --- a/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java @@ -21,7 +21,6 @@ package org.elasticsearch.discovery.local; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.Diff; @@ -31,6 +30,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.BytesStreamOutput; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java b/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java index 19a2cf06bf4c..0edbf8841ad3 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java @@ -19,7 +19,6 @@ package org.elasticsearch.discovery.zen; import org.elasticsearch.ElasticsearchTimeoutException; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.NotMasterException; @@ -28,7 +27,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.service.InternalClusterService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; @@ -346,7 +345,7 @@ public class NodeJoinController extends AbstractComponent { } private void assertClusterStateThread() { - assert clusterService instanceof InternalClusterService == false || ((InternalClusterService) clusterService).assertClusterStateThread(); + assert clusterService instanceof ClusterService == false || ((ClusterService) clusterService).assertClusterStateThread(); } } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index a19f4fa4af16..07cd3853cb6d 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -24,7 +24,6 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.NotMasterException; @@ -35,7 +34,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.service.InternalClusterService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.Lifecycle; @@ -929,7 +928,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen protected ClusterState rejoin(ClusterState clusterState, String reason) { // *** called from within an cluster state update task *** // - assert Thread.currentThread().getName().contains(InternalClusterService.UPDATE_THREAD_NAME); + assert Thread.currentThread().getName().contains(ClusterService.UPDATE_THREAD_NAME); logger.warn("{}, current nodes: {}", reason, clusterState.nodes()); nodesFD.stop(); @@ -959,7 +958,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen private ClusterState handleAnotherMaster(ClusterState localClusterState, final DiscoveryNode otherMaster, long otherClusterStateVersion, String reason) { assert localClusterState.nodes().localNodeMaster() : "handleAnotherMaster called but current node is not a master"; - assert Thread.currentThread().getName().contains(InternalClusterService.UPDATE_THREAD_NAME) : "not called from the cluster state update thread"; + assert Thread.currentThread().getName().contains(ClusterService.UPDATE_THREAD_NAME) : "not called from the cluster state update thread"; if (otherClusterStateVersion > localClusterState.version()) { return rejoin(localClusterState, "zen-disco-discovered another master with a new cluster_state [" + otherMaster + "][" + reason + "]"); @@ -1197,7 +1196,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen } private void assertClusterStateThread() { - assert clusterService instanceof InternalClusterService == false || ((InternalClusterService) clusterService).assertClusterStateThread(); + assert clusterService instanceof ClusterService == false || ((ClusterService) clusterService).assertClusterStateThread(); } } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java b/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java index 73be1d3bb286..96ed7f76419c 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java @@ -21,12 +21,12 @@ package org.elasticsearch.discovery.zen.fd; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java b/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java index 04af8207c37b..de4caf664eaf 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java @@ -19,9 +19,9 @@ package org.elasticsearch.discovery.zen.membership; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/main/java/org/elasticsearch/gateway/Gateway.java b/core/src/main/java/org/elasticsearch/gateway/Gateway.java index 2d1d48cbd83a..f5d38112c4fd 100644 --- a/core/src/main/java/org/elasticsearch/gateway/Gateway.java +++ b/core/src/main/java/org/elasticsearch/gateway/Gateway.java @@ -25,11 +25,11 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.Discovery; diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java b/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java index acd650bc6f72..0059a0ef61bf 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java @@ -22,7 +22,6 @@ package org.elasticsearch.gateway; import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingNodes; @@ -31,6 +30,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.StartedRerouteAllocation; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lease.Releasables; diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java index 6d85fb2f41db..16d67a84c4a3 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java @@ -21,7 +21,6 @@ package org.elasticsearch.gateway; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateUpdateTask; @@ -34,6 +33,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; diff --git a/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java b/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java index 041b8cafeccc..b14dcc6d1a4e 100644 --- a/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java +++ b/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java @@ -19,7 +19,6 @@ package org.elasticsearch.gateway; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlocks; @@ -30,6 +29,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java index fb174f4bd454..0fd1fd35809f 100644 --- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java +++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java @@ -29,10 +29,10 @@ import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java index d1aa2a8b3834..7a0902088181 100644 --- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java @@ -29,11 +29,11 @@ import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java index 330787a68a35..c15d2cfcdbe5 100644 --- a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java +++ b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java @@ -32,9 +32,9 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index 58f128242dce..b43d33b1bd96 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -30,10 +30,10 @@ import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; import org.elasticsearch.action.admin.indices.stats.IndexShardStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.bytes.BytesReference; @@ -93,7 +93,6 @@ import java.nio.file.Files; import java.util.ArrayList; import java.util.EnumSet; import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; diff --git a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 5d501b65686f..46ead3fbf369 100644 --- a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -22,7 +22,6 @@ package org.elasticsearch.indices.cluster; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction; @@ -38,6 +37,7 @@ import org.elasticsearch.cluster.routing.RestoreSource; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.compress.CompressedXContent; diff --git a/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java index 6943ecd75040..b1d7af7ff9c4 100644 --- a/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java +++ b/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -31,6 +30,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java index 934730c7c930..aaf351f60561 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java @@ -20,9 +20,9 @@ package org.elasticsearch.indices.recovery; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetService.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetService.java index ab8c87cd636b..d57cfbb98c8b 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetService.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetService.java @@ -24,10 +24,10 @@ import org.apache.lucene.store.RateLimiter; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; diff --git a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index 5dc8af41e80b..d2db41a7a0cc 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -21,7 +21,6 @@ package org.elasticsearch.indices.store; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateObserver; @@ -31,6 +30,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; diff --git a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index 0422c0944e24..35a34ebea1b2 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -29,11 +29,11 @@ import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java index 422a08042ce8..a30ae49ff7b5 100644 --- a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java +++ b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java @@ -30,9 +30,9 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.DeleteRequest; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.uid.Versions; diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java index ac2df419f55a..7e0dc1b4ffa9 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java @@ -28,11 +28,11 @@ import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.ingest.WritePipelineResponse; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; @@ -40,7 +40,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.ingest.core.IngestInfo; import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.ingest.core.Processor; -import org.elasticsearch.ingest.core.ProcessorInfo; import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.script.ScriptService; @@ -51,7 +50,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Objects; public class PipelineStore extends AbstractComponent implements Closeable, ClusterStateListener { diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index 7052ca63189c..6e270ffc3ff5 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -30,7 +30,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.node.NodeClientModule; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.ClusterNameModule; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.MasterNodeChangePredicate; @@ -39,7 +38,7 @@ import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.cluster.routing.RoutingService; -import org.elasticsearch.cluster.service.InternalClusterService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.StopWatch; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleComponent; @@ -296,9 +295,7 @@ public class Node implements Closeable { injector.getInstance(MonitorService.class).start(); injector.getInstance(RestController.class).start(); - assert injector.getInstance(ClusterService.class) instanceof InternalClusterService : - "node cluster service implementation must inherit from InternalClusterService"; - final InternalClusterService clusterService = (InternalClusterService) injector.getInstance(ClusterService.class); + final ClusterService clusterService = injector.getInstance(ClusterService.class); final NodeConnectionsService nodeConnectionsService = injector.getInstance(NodeConnectionsService.class); nodeConnectionsService.start(); diff --git a/core/src/main/java/org/elasticsearch/node/service/NodeService.java b/core/src/main/java/org/elasticsearch/node/service/NodeService.java index 7096b7cc56cb..cb11fc02443f 100644 --- a/core/src/main/java/org/elasticsearch/node/service/NodeService.java +++ b/core/src/main/java/org/elasticsearch/node/service/NodeService.java @@ -24,7 +24,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; -import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; diff --git a/core/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/core/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index 6eb32cfb06f2..da2d96880958 100644 --- a/core/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/core/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -22,7 +22,6 @@ package org.elasticsearch.repositories; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ack.ClusterStateUpdateRequest; @@ -31,6 +30,7 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoriesMetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Injector; diff --git a/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java b/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java index 91600488332e..48ffbd5c1cbf 100644 --- a/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java @@ -22,9 +22,9 @@ package org.elasticsearch.repositories; import com.carrotsearch.hppc.ObjectContainer; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/main/java/org/elasticsearch/repositories/uri/URLIndexShardRepository.java b/core/src/main/java/org/elasticsearch/repositories/uri/URLIndexShardRepository.java index ab9ec72463a5..616a36d50666 100644 --- a/core/src/main/java/org/elasticsearch/repositories/uri/URLIndexShardRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/uri/URLIndexShardRepository.java @@ -19,7 +19,7 @@ package org.elasticsearch.repositories.uri; -import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; diff --git a/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java b/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java index bf3f0a3e5df8..205bea92f961 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 1694533f99af..be2e52b5aa31 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -20,13 +20,12 @@ package org.elasticsearch.search; import com.carrotsearch.hppc.ObjectFloatHashMap; - import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.TopDocs; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.cache.recycler.PageCacheRecycler; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesReference; diff --git a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java index acbc15be72c0..be2d6ccfea19 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -26,7 +26,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateUpdateTask; @@ -49,6 +48,7 @@ import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index 949d4607b638..4a15dbdac2e9 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -23,13 +23,13 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.apache.lucene.index.IndexCommit; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index fb878d6cb38a..9305d5f941f7 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -26,7 +26,6 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateUpdateTask; @@ -44,6 +43,7 @@ import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.Tuple; diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index 5846b1d5b0ff..f46c6034626a 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -23,7 +23,6 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateTaskConfig; @@ -37,6 +36,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -55,7 +55,6 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.node.Node; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportSettings; import java.util.Arrays; diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java index 586f178d12d6..379cb5942a2e 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java @@ -29,8 +29,8 @@ import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; @@ -49,6 +49,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicReference; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -324,7 +325,7 @@ public class CancellableTasksTests extends TaskManagerTestCase { DiscoveryNode master = discoveryNodes[0]; for (int i = 1; i < testNodes.length; i++) { // Notify only nodes that should remain in the cluster - testNodes[i].clusterService.setState(ClusterStateCreationUtils.state(testNodes[i].discoveryNode, master, discoveryNodes)); + setState(testNodes[i].clusterService, ClusterStateCreationUtils.state(testNodes[i].discoveryNode, master, discoveryNodes)); } if (simulateBanBeforeLeaving) { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index bc92635c9480..48d9f8fed40c 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -30,9 +30,9 @@ import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -40,7 +40,6 @@ import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.tasks.MockTaskManager; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -58,6 +57,9 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.function.Supplier; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; + /** * The test case for unit testing task manager and related transport actions */ @@ -137,14 +139,14 @@ public abstract class TaskManagerTestCase extends ESTestCase { * Simulates node-based task that can be used to block node tasks so they are guaranteed to be registered by task manager */ abstract class AbstractTestNodesAction, NodeRequest extends BaseNodeRequest> - extends TransportNodesAction { + extends TransportNodesAction { AbstractTestNodesAction(Settings settings, String actionName, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, Supplier request, Supplier nodeRequest) { super(settings, actionName, clusterName, threadPool, clusterService, transportService, - new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), - request, nodeRequest, ThreadPool.Names.GENERIC); + new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), + request, nodeRequest, ThreadPool.Names.GENERIC); } @Override @@ -182,8 +184,8 @@ public abstract class TaskManagerTestCase extends ESTestCase { public static class TestNode implements Releasable { public TestNode(String name, ThreadPool threadPool, Settings settings) { transportService = new TransportService(settings, - new LocalTransport(settings, threadPool, Version.CURRENT, new NamedWriteableRegistry()), - threadPool) { + new LocalTransport(settings, threadPool, Version.CURRENT, new NamedWriteableRegistry()), + threadPool) { @Override protected TaskManager createTaskManager() { if (MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.get(settings)) { @@ -194,19 +196,19 @@ public abstract class TaskManagerTestCase extends ESTestCase { } }; transportService.start(); - clusterService = new TestClusterService(threadPool); + clusterService = createClusterService(threadPool); clusterService.add(transportService.getTaskManager()); discoveryNode = new DiscoveryNode(name, transportService.boundAddress().publishAddress(), Version.CURRENT); IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(settings); ActionFilters actionFilters = new ActionFilters(Collections.emptySet()); transportListTasksAction = new TransportListTasksAction(settings, clusterName, threadPool, clusterService, transportService, - actionFilters, indexNameExpressionResolver); + actionFilters, indexNameExpressionResolver); transportCancelTasksAction = new TransportCancelTasksAction(settings, clusterName, threadPool, clusterService, transportService, - actionFilters, indexNameExpressionResolver); + actionFilters, indexNameExpressionResolver); transportService.acceptIncomingRequests(); } - public final TestClusterService clusterService; + public final ClusterService clusterService; public final TransportService transportService; public final DiscoveryNode discoveryNode; public final TransportListTasksAction transportListTasksAction; @@ -214,6 +216,7 @@ public abstract class TaskManagerTestCase extends ESTestCase { @Override public void close() { + clusterService.close(); transportService.close(); } } @@ -225,7 +228,7 @@ public abstract class TaskManagerTestCase extends ESTestCase { } DiscoveryNode master = discoveryNodes[0]; for (TestNode node : nodes) { - node.clusterService.setState(ClusterStateCreationUtils.state(node.discoveryNode, master, discoveryNodes)); + setState(node.clusterService, ClusterStateCreationUtils.state(node.discoveryNode, master, discoveryNodes)); } for (TestNode nodeA : nodes) { for (TestNode nodeB : nodes) { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index 3aa27b2d1756..b22d93ef6b22 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -31,10 +31,8 @@ import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; import org.elasticsearch.action.fieldstats.FieldStatsAction; import org.elasticsearch.action.index.IndexAction; -import org.elasticsearch.action.percolate.PercolateAction; -import org.elasticsearch.action.percolate.PercolateSourceBuilder; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index e8dcd228e50d..72ea730f881b 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -35,10 +35,10 @@ import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index 2fe79d25ebb7..64d69a4864fe 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -35,9 +35,9 @@ import org.elasticsearch.action.support.tasks.BaseTasksRequest; import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -48,7 +48,6 @@ import org.elasticsearch.test.tasks.MockTaskManager; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; - import java.io.IOException; import java.util.ArrayList; import java.util.Collections; diff --git a/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index d0a0e094dcb7..e36b4e4f0287 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -28,19 +28,19 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.junit.After; import org.junit.Before; import java.nio.charset.StandardCharsets; @@ -49,6 +49,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.function.LongSupplier; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -57,16 +58,23 @@ import static org.mockito.Mockito.mock; public class TransportBulkActionTookTests extends ESTestCase { private ThreadPool threadPool; + private ClusterService clusterService; @Before public void setUp() throws Exception { super.setUp(); threadPool = mock(ThreadPool.class); + clusterService = createClusterService(threadPool); + } + + @After + public void tearDown() throws Exception { + super.tearDown(); + clusterService.close(); } private TransportBulkAction createAction(boolean controlled, AtomicLong expected) { CapturingTransport capturingTransport = new CapturingTransport(); - ClusterService clusterService = new TestClusterService(threadPool); TransportService transportService = new TransportService(capturingTransport, threadPool); transportService.start(); transportService.acceptIncomingRequests(); diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java index fa9728c4cd1f..c027e4ecd18a 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java @@ -29,10 +29,10 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; diff --git a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index 925d4a929015..b166f5f45c39 100644 --- a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -44,6 +44,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; @@ -51,13 +52,13 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportResponseOptions; import org.elasticsearch.transport.TransportService; +import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -73,6 +74,8 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.object.HasToString.hasToString; @@ -83,7 +86,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { private static final String TEST_CLUSTER = "test-cluster"; private static ThreadPool THREAD_POOL; - private TestClusterService clusterService; + private ClusterService clusterService; private CapturingTransport transport; private TestTransportBroadcastByNodeAction action; @@ -182,7 +185,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { public void setUp() throws Exception { super.setUp(); transport = new CapturingTransport(); - clusterService = new TestClusterService(THREAD_POOL); + clusterService = createClusterService(THREAD_POOL); final TransportService transportService = new TransportService(transport, THREAD_POOL); transportService.start(); transportService.acceptIncomingRequests(); @@ -197,10 +200,16 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { ); } - void setClusterState(TestClusterService clusterService, String index) { + @After + public void tearDown() throws Exception { + super.tearDown(); + clusterService.close(); + } + + void setClusterState(ClusterService clusterService, String index) { int numberOfNodes = randomIntBetween(3, 5); DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); - IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(new Index(index,"_na_")); + IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(new Index(index, "_na_")); int shardIndex = -1; for (int i = 0; i < numberOfNodes; i++) { @@ -221,7 +230,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { stateBuilder.nodes(discoBuilder); stateBuilder.routingTable(RoutingTable.builder().add(indexRoutingTable.build()).build()); ClusterState clusterState = stateBuilder.build(); - clusterService.setState(clusterState); + setState(clusterService, clusterState); } static DiscoveryNode newNode(int nodeId) { @@ -241,7 +250,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { ClusterBlocks.Builder block = ClusterBlocks.builder() .addGlobalBlock(new ClusterBlock(1, "test-block", false, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL)); - clusterService.setState(ClusterState.builder(clusterService.state()).blocks(block)); + setState(clusterService, ClusterState.builder(clusterService.state()).blocks(block)); try { action.new AsyncAction(null, request, listener).start(); fail("expected ClusterBlockException"); @@ -256,7 +265,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { ClusterBlocks.Builder block = ClusterBlocks.builder() .addIndexBlock(TEST_INDEX, new ClusterBlock(1, "test-block", false, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL)); - clusterService.setState(ClusterState.builder(clusterService.state()).blocks(block)); + setState(clusterService, ClusterState.builder(clusterService.state()).blocks(block)); try { action.new AsyncAction(null, request, listener).start(); fail("expected ClusterBlockException"); @@ -301,7 +310,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { DiscoveryNodes.Builder builder = DiscoveryNodes.builder(clusterService.state().getNodes()); builder.remove(masterNode.id()); - clusterService.setState(ClusterState.builder(clusterService.state()).nodes(builder)); + setState(clusterService, ClusterState.builder(clusterService.state()).nodes(builder)); action.new AsyncAction(null, request, listener).start(); @@ -348,7 +357,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { TransportResponse response = channel.getCapturedResponse(); assertTrue(response instanceof TransportBroadcastByNodeAction.NodeResponse); - TransportBroadcastByNodeAction.NodeResponse nodeResponse = (TransportBroadcastByNodeAction.NodeResponse)response; + TransportBroadcastByNodeAction.NodeResponse nodeResponse = (TransportBroadcastByNodeAction.NodeResponse) response; // check the operation was executed on the correct node assertEquals("node id", nodeId, nodeResponse.getNodeId()); @@ -387,7 +396,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { builder.remove(failedMasterNode.id()); builder.masterNodeId(null); - clusterService.setState(ClusterState.builder(clusterService.state()).nodes(builder)); + setState(clusterService, ClusterState.builder(clusterService.state()).nodes(builder)); } action.new AsyncAction(null, request, listener).start(); diff --git a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index 860f95ace55c..226099e32f79 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.ThreadedActionListener; import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.block.ClusterBlock; @@ -36,6 +35,7 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.unit.TimeValue; @@ -44,11 +44,11 @@ import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TransportService; +import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -58,13 +58,15 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; public class TransportMasterNodeActionTests extends ESTestCase { private static ThreadPool threadPool; - private TestClusterService clusterService; + private ClusterService clusterService; private TransportService transportService; private CapturingTransport transport; private DiscoveryNode localNode; @@ -81,13 +83,20 @@ public class TransportMasterNodeActionTests extends ESTestCase { public void setUp() throws Exception { super.setUp(); transport = new CapturingTransport(); - clusterService = new TestClusterService(threadPool); + clusterService = createClusterService(threadPool); transportService = new TransportService(transport, threadPool); transportService.start(); transportService.acceptIncomingRequests(); localNode = new DiscoveryNode("local_node", DummyTransportAddress.INSTANCE, Version.CURRENT); remoteNode = new DiscoveryNode("remote_node", DummyTransportAddress.INSTANCE, Version.CURRENT); - allNodes = new DiscoveryNode[] { localNode, remoteNode }; + allNodes = new DiscoveryNode[]{localNode, remoteNode}; + } + + @After + public void tearDown() throws Exception { + super.tearDown(); + clusterService.close(); + transportService.close(); } @AfterClass @@ -157,7 +166,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { final Throwable exception = new Throwable(); final Response response = new Response(); - clusterService.setState(ClusterStateCreationUtils.state(localNode, localNode, allNodes)); + setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, allNodes)); new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) { @Override @@ -194,7 +203,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { randomFrom(RestStatus.values()), ClusterBlockLevel.ALL); ClusterState stateWithBlock = ClusterState.builder(ClusterStateCreationUtils.state(localNode, localNode, allNodes)) .blocks(ClusterBlocks.builder().addGlobalBlock(block)).build(); - clusterService.setState(stateWithBlock); + setState(clusterService, stateWithBlock); new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) { @Override @@ -206,7 +215,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { if (retryableBlock && unblockBeforeTimeout) { assertFalse(listener.isDone()); - clusterService.setState(ClusterState.builder(ClusterStateCreationUtils.state(localNode, localNode, allNodes)) + setState(clusterService, ClusterState.builder(ClusterStateCreationUtils.state(localNode, localNode, allNodes)) .blocks(ClusterBlocks.EMPTY_CLUSTER_BLOCK).build()); assertTrue(listener.isDone()); listener.get(); @@ -231,7 +240,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { Request request = new Request(); PlainActionFuture listener = new PlainActionFuture<>(); - clusterService.setState(ClusterStateCreationUtils.state(localNode, randomFrom(null, localNode, remoteNode), allNodes)); + setState(clusterService, ClusterStateCreationUtils.state(localNode, randomFrom(null, localNode, remoteNode), allNodes)); new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) { @Override @@ -246,7 +255,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { public void testMasterNotAvailable() throws ExecutionException, InterruptedException { Request request = new Request().masterNodeTimeout(TimeValue.timeValueSeconds(0)); - clusterService.setState(ClusterStateCreationUtils.state(localNode, null, allNodes)); + setState(clusterService, ClusterStateCreationUtils.state(localNode, null, allNodes)); PlainActionFuture listener = new PlainActionFuture<>(); new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool).execute(request, listener); assertTrue(listener.isDone()); @@ -255,18 +264,18 @@ public class TransportMasterNodeActionTests extends ESTestCase { public void testMasterBecomesAvailable() throws ExecutionException, InterruptedException { Request request = new Request(); - clusterService.setState(ClusterStateCreationUtils.state(localNode, null, allNodes)); + setState(clusterService, ClusterStateCreationUtils.state(localNode, null, allNodes)); PlainActionFuture listener = new PlainActionFuture<>(); new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool).execute(request, listener); assertFalse(listener.isDone()); - clusterService.setState(ClusterStateCreationUtils.state(localNode, localNode, allNodes)); + setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, allNodes)); assertTrue(listener.isDone()); listener.get(); } public void testDelegateToMaster() throws ExecutionException, InterruptedException { Request request = new Request(); - clusterService.setState(ClusterStateCreationUtils.state(localNode, remoteNode, allNodes)); + setState(clusterService, ClusterStateCreationUtils.state(localNode, remoteNode, allNodes)); PlainActionFuture listener = new PlainActionFuture<>(); new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool).execute(request, listener); @@ -286,7 +295,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { public void testDelegateToFailingMaster() throws ExecutionException, InterruptedException { boolean failsWithConnectTransportException = randomBoolean(); Request request = new Request().masterNodeTimeout(TimeValue.timeValueSeconds(failsWithConnectTransportException ? 60 : 0)); - clusterService.setState(ClusterStateCreationUtils.state(localNode, remoteNode, allNodes)); + setState(clusterService, ClusterStateCreationUtils.state(localNode, remoteNode, allNodes)); PlainActionFuture listener = new PlainActionFuture<>(); new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool).execute(request, listener); @@ -300,7 +309,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { if (failsWithConnectTransportException) { transport.handleRemoteError(capturedRequest.requestId, new ConnectTransportException(remoteNode, "Fake error")); assertFalse(listener.isDone()); - clusterService.setState(ClusterStateCreationUtils.state(localNode, localNode, allNodes)); + setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, allNodes)); assertTrue(listener.isDone()); listener.get(); } else { @@ -322,13 +331,13 @@ public class TransportMasterNodeActionTests extends ESTestCase { final Response response = new Response(); - clusterService.setState(ClusterStateCreationUtils.state(localNode, localNode, allNodes)); + setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, allNodes)); new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) { @Override protected void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception { // The other node has become master, simulate failures of this node while publishing cluster state through ZenDiscovery - TransportMasterNodeActionTests.this.clusterService.setState(ClusterStateCreationUtils.state(localNode, remoteNode, allNodes)); + setState(clusterService, ClusterStateCreationUtils.state(localNode, remoteNode, allNodes)); Throwable failure = randomBoolean() ? new Discovery.FailedToCommitClusterStateException("Fake error") : new NotMasterException("Fake error"); diff --git a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java index 0a6f94366f9c..f41b540822e2 100644 --- a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java @@ -24,17 +24,17 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeActionTests; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -50,12 +50,15 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.function.Supplier; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; + public class TransportNodesActionTests extends ESTestCase { private static ThreadPool THREAD_POOL; private static ClusterName CLUSTER_NAME = new ClusterName("test-cluster"); - private TestClusterService clusterService; + private ClusterService clusterService; private CapturingTransport transport; private TestTransportNodesAction action; @@ -114,7 +117,7 @@ public class TransportNodesActionTests extends ESTestCase { public void setUp() throws Exception { super.setUp(); transport = new CapturingTransport(); - clusterService = new TestClusterService(THREAD_POOL); + clusterService = createClusterService(THREAD_POOL); final TransportService transportService = new TransportService(transport, THREAD_POOL); transportService.start(); transportService.acceptIncomingRequests(); @@ -142,7 +145,7 @@ public class TransportNodesActionTests extends ESTestCase { ClusterState.Builder stateBuilder = ClusterState.builder(CLUSTER_NAME); stateBuilder.nodes(discoBuilder); ClusterState clusterState = stateBuilder.build(); - clusterService.setState(clusterState); + setState(clusterService, clusterState); action = new TestTransportNodesAction( Settings.EMPTY, THREAD_POOL, @@ -155,6 +158,13 @@ public class TransportNodesActionTests extends ESTestCase { ); } + @After + public void tearDown() throws Exception { + super.tearDown(); + clusterService.close(); + transport.close(); + } + private static DiscoveryNode newNode(int nodeId, Map attributes) { String node = "node_" + nodeId; return new DiscoveryNode(node, node, DummyTransportAddress.INSTANCE, attributes, Version.CURRENT); diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index 03869974444c..4125f02b956e 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -31,10 +31,10 @@ import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.action.support.broadcast.BroadcastResponse; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; @@ -43,10 +43,10 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.local.LocalTransport; +import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -63,6 +63,8 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.state; import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithAssignedPrimariesAndOneReplica; import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithNoShard; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -70,9 +72,8 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; public class BroadcastReplicationTests extends ESTestCase { private static ThreadPool threadPool; - private TestClusterService clusterService; + private ClusterService clusterService; private TransportService transportService; - private LocalTransport transport; private TestBroadcastReplicationAction broadcastReplicationAction; @BeforeClass @@ -84,14 +85,21 @@ public class BroadcastReplicationTests extends ESTestCase { @Before public void setUp() throws Exception { super.setUp(); - transport = new LocalTransport(Settings.EMPTY, threadPool, Version.CURRENT, new NamedWriteableRegistry()); - clusterService = new TestClusterService(threadPool); + LocalTransport transport = new LocalTransport(Settings.EMPTY, threadPool, Version.CURRENT, new NamedWriteableRegistry()); + clusterService = createClusterService(threadPool); transportService = new TransportService(transport, threadPool); transportService.start(); transportService.acceptIncomingRequests(); broadcastReplicationAction = new TestBroadcastReplicationAction(Settings.EMPTY, threadPool, clusterService, transportService, new ActionFilters(new HashSet()), new IndexNameExpressionResolver(Settings.EMPTY), null); } + @After + public void tearDown() throws Exception { + super.tearDown(); + clusterService.close(); + transportService.close(); + } + @AfterClass public static void afterClass() { ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); @@ -100,7 +108,7 @@ public class BroadcastReplicationTests extends ESTestCase { public void testNotStartedPrimary() throws InterruptedException, ExecutionException, IOException { final String index = "test"; - clusterService.setState(state(index, randomBoolean(), + setState(clusterService, state(index, randomBoolean(), randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED, ShardRoutingState.UNASSIGNED)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); Future response = (broadcastReplicationAction.execute(new DummyBroadcastRequest().indices(index))); @@ -119,7 +127,7 @@ public class BroadcastReplicationTests extends ESTestCase { public void testStartedPrimary() throws InterruptedException, ExecutionException, IOException { final String index = "test"; - clusterService.setState(state(index, randomBoolean(), + setState(clusterService, state(index, randomBoolean(), ShardRoutingState.STARTED)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); Future response = (broadcastReplicationAction.execute(new DummyBroadcastRequest().indices(index))); @@ -135,7 +143,7 @@ public class BroadcastReplicationTests extends ESTestCase { public void testResultCombine() throws InterruptedException, ExecutionException, IOException { final String index = "test"; int numShards = randomInt(3); - clusterService.setState(stateWithAssignedPrimariesAndOneReplica(index, numShards)); + setState(clusterService, stateWithAssignedPrimariesAndOneReplica(index, numShards)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); Future response = (broadcastReplicationAction.execute(new DummyBroadcastRequest().indices(index))); int succeeded = 0; @@ -165,7 +173,7 @@ public class BroadcastReplicationTests extends ESTestCase { } public void testNoShards() throws InterruptedException, ExecutionException, IOException { - clusterService.setState(stateWithNoShard()); + setState(clusterService, stateWithNoShard()); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); BroadcastResponse response = executeAndAssertImmediateResponse(broadcastReplicationAction, new DummyBroadcastRequest()); assertBroadcastResponse(0, 0, 0, response, null); @@ -186,8 +194,8 @@ public class BroadcastReplicationTests extends ESTestCase { protected final Set>> capturedShardRequests = ConcurrentCollections.newConcurrentSet(); public TestBroadcastReplicationAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, - TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - TransportReplicationAction replicatedBroadcastShardAction) { + TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + TransportReplicationAction replicatedBroadcastShardAction) { super("test-broadcast-replication-action", DummyBroadcastRequest::new, settings, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, replicatedBroadcastShardAction); } @@ -204,7 +212,7 @@ public class BroadcastReplicationTests extends ESTestCase { @Override protected BroadcastResponse newResponse(int successfulShards, int failedShards, int totalNumCopies, - List shardFailures) { + List shardFailures) { return new BroadcastResponse(totalNumCopies, successfulShards, failedShards, shardFailures); } diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index 631dad5d66dc..1fc94dcb533e 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.action.shard.ShardStateAction; @@ -44,6 +43,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; @@ -57,7 +57,6 @@ import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESAllocationTestCase; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; @@ -65,6 +64,7 @@ import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportResponseOptions; import org.elasticsearch.transport.TransportService; import org.hamcrest.Matcher; +import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -86,6 +86,8 @@ import java.util.function.Consumer; import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.state; import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithActivePrimary; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.arrayWithSize; @@ -102,7 +104,7 @@ public class TransportReplicationActionTests extends ESTestCase { private static ThreadPool threadPool; - private TestClusterService clusterService; + private ClusterService clusterService; private TransportService transportService; private CapturingTransport transport; private Action action; @@ -121,7 +123,7 @@ public class TransportReplicationActionTests extends ESTestCase { public void setUp() throws Exception { super.setUp(); transport = new CapturingTransport(); - clusterService = new TestClusterService(threadPool); + clusterService = createClusterService(threadPool); transportService = new TransportService(transport, threadPool); transportService.start(); transportService.acceptIncomingRequests(); @@ -129,6 +131,12 @@ public class TransportReplicationActionTests extends ESTestCase { count.set(1); } + @After + public void tearDown() throws Exception { + super.tearDown(); + clusterService.close(); + } + @AfterClass public static void afterClass() { ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); @@ -151,7 +159,7 @@ public class TransportReplicationActionTests extends ESTestCase { ClusterBlocks.Builder block = ClusterBlocks.builder() .addGlobalBlock(new ClusterBlock(1, "non retryable", false, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL)); - clusterService.setState(ClusterState.builder(clusterService.state()).blocks(block)); + setState(clusterService, ClusterState.builder(clusterService.state()).blocks(block)); TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(task, request, listener); reroutePhase.run(); assertListenerThrows("primary phase should fail operation", listener, ClusterBlockException.class); @@ -159,7 +167,7 @@ public class TransportReplicationActionTests extends ESTestCase { block = ClusterBlocks.builder() .addGlobalBlock(new ClusterBlock(1, "retryable", true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL)); - clusterService.setState(ClusterState.builder(clusterService.state()).blocks(block)); + setState(clusterService, ClusterState.builder(clusterService.state()).blocks(block)); listener = new PlainActionFuture<>(); reroutePhase = action.new ReroutePhase(task, new Request().timeout("5ms"), listener); reroutePhase.run(); @@ -174,7 +182,7 @@ public class TransportReplicationActionTests extends ESTestCase { block = ClusterBlocks.builder() .addGlobalBlock(new ClusterBlock(1, "non retryable", false, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL)); - clusterService.setState(ClusterState.builder(clusterService.state()).blocks(block)); + setState(clusterService, ClusterState.builder(clusterService.state()).blocks(block)); assertListenerThrows("primary phase should fail operation when moving from a retryable block to a non-retryable one", listener, ClusterBlockException.class); assertIndexShardUninitialized(); } @@ -187,7 +195,7 @@ public class TransportReplicationActionTests extends ESTestCase { final String index = "test"; final ShardId shardId = new ShardId(index, "_na_", 0); // no replicas in oder to skip the replication part - clusterService.setState(state(index, true, + setState(clusterService, state(index, true, randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED)); ReplicationTask task = maybeTask(); @@ -207,13 +215,13 @@ public class TransportReplicationActionTests extends ESTestCase { assertFalse("unassigned primary didn't cause a retry", listener.isDone()); assertPhase(task, "waiting_for_retry"); - clusterService.setState(state(index, true, ShardRoutingState.STARTED)); + setState(clusterService, state(index, true, ShardRoutingState.STARTED)); logger.debug("--> primary assigned state:\n{}", clusterService.state().prettyPrint()); final IndexShardRoutingTable shardRoutingTable = clusterService.state().routingTable().index(index).shard(shardId.id()); final String primaryNodeId = shardRoutingTable.primaryShard().currentNodeId(); final List capturedRequests = - transport.getCapturedRequestsByTargetNodeAndClear().get(primaryNodeId); + transport.getCapturedRequestsByTargetNodeAndClear().get(primaryNodeId); assertThat(capturedRequests, notNullValue()); assertThat(capturedRequests.size(), equalTo(1)); assertThat(capturedRequests.get(0).action, equalTo("testAction[p]")); @@ -236,7 +244,7 @@ public class TransportReplicationActionTests extends ESTestCase { ClusterState state = state(index, true, ShardRoutingState.RELOCATING); String relocationTargetNode = state.getRoutingTable().shardRoutingTable(shardId).primaryShard().relocatingNodeId(); state = ClusterState.builder(state).nodes(DiscoveryNodes.builder(state.nodes()).localNodeId(relocationTargetNode)).build(); - clusterService.setState(state); + setState(clusterService, state); logger.debug("--> relocation ongoing state:\n{}", clusterService.state().prettyPrint()); Request request = new Request(shardId).timeout("1ms").routedBasedOnClusterVersion(clusterService.state().version() + 1); @@ -257,13 +265,13 @@ public class TransportReplicationActionTests extends ESTestCase { RoutingAllocation.Result result = allocationService.applyStartedShards(state, Arrays.asList(relocationTarget)); ClusterState updatedState = ClusterState.builder(clusterService.state()).routingResult(result).build(); - clusterService.setState(updatedState); + setState(clusterService, updatedState); logger.debug("--> relocation complete state:\n{}", clusterService.state().prettyPrint()); IndexShardRoutingTable shardRoutingTable = clusterService.state().routingTable().index(index).shard(shardId.id()); final String primaryNodeId = shardRoutingTable.primaryShard().currentNodeId(); final List capturedRequests = - transport.getCapturedRequestsByTargetNodeAndClear().get(primaryNodeId); + transport.getCapturedRequestsByTargetNodeAndClear().get(primaryNodeId); assertThat(capturedRequests, notNullValue()); assertThat(capturedRequests.size(), equalTo(1)); assertThat(capturedRequests.get(0).action, equalTo("testAction[p]")); @@ -273,7 +281,7 @@ public class TransportReplicationActionTests extends ESTestCase { public void testUnknownIndexOrShardOnReroute() throws InterruptedException { final String index = "test"; // no replicas in oder to skip the replication part - clusterService.setState(state(index, true, + setState(clusterService, state(index, true, randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); Request request = new Request(new ShardId("unknown_index", "_na_", 0)).timeout("1ms"); @@ -296,7 +304,7 @@ public class TransportReplicationActionTests extends ESTestCase { final ShardId shardId = new ShardId(index, "_na_", 0); ReplicationTask task = maybeTask(); - clusterService.setState(stateWithActivePrimary(index, randomBoolean(), 3)); + setState(clusterService, stateWithActivePrimary(index, randomBoolean(), 3)); logger.debug("using state: \n{}", clusterService.state().prettyPrint()); final IndexShardRoutingTable shardRoutingTable = clusterService.state().routingTable().index(index).shard(shardId.id()); @@ -325,7 +333,7 @@ public class TransportReplicationActionTests extends ESTestCase { final String index = "test"; final ShardId shardId = new ShardId(index, "_na_", 0); ClusterState state = stateWithActivePrimary(index, true, randomInt(5)); - clusterService.setState(state); + setState(clusterService, state); Request request = new Request(shardId).timeout("1ms"); PlainActionFuture listener = new PlainActionFuture<>(); ReplicationTask task = maybeTask(); @@ -365,7 +373,7 @@ public class TransportReplicationActionTests extends ESTestCase { String primaryTargetNodeId = state.getRoutingTable().shardRoutingTable(shardId).primaryShard().relocatingNodeId(); // simulate execution of the primary phase on the relocation target node state = ClusterState.builder(state).nodes(DiscoveryNodes.builder(state.nodes()).localNodeId(primaryTargetNodeId)).build(); - clusterService.setState(state); + setState(clusterService, state); Request request = new Request(shardId).timeout("1ms"); PlainActionFuture listener = new PlainActionFuture<>(); ReplicationTask task = maybeTask(); @@ -387,7 +395,7 @@ public class TransportReplicationActionTests extends ESTestCase { final String index = "test"; final ShardId shardId = new ShardId(index, "_na_", 0); // start with no replicas - clusterService.setState(stateWithActivePrimary(index, true, 0)); + setState(clusterService, stateWithActivePrimary(index, true, 0)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); final ClusterState stateWithAddedReplicas = state(index, true, ShardRoutingState.STARTED, randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.STARTED); ReplicationTask task = maybeTask(); @@ -397,7 +405,7 @@ public class TransportReplicationActionTests extends ESTestCase { protected Tuple shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Exception { final Tuple operationOnPrimary = super.shardOperationOnPrimary(metaData, shardRequest); // add replicas after primary operation - ((TestClusterService) clusterService).setState(stateWithAddedReplicas); + setState(clusterService, stateWithAddedReplicas); logger.debug("--> state after primary operation:\n{}", clusterService.state().prettyPrint()); return operationOnPrimary; } @@ -422,7 +430,7 @@ public class TransportReplicationActionTests extends ESTestCase { final String index = "test"; final ShardId shardId = new ShardId(index, "_na_", 0); // start with a replica - clusterService.setState(state(index, true, ShardRoutingState.STARTED, randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.STARTED)); + setState(clusterService, state(index, true, ShardRoutingState.STARTED, randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.STARTED)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); final ClusterState stateWithRelocatingReplica = state(index, true, ShardRoutingState.STARTED, ShardRoutingState.RELOCATING); @@ -431,7 +439,7 @@ public class TransportReplicationActionTests extends ESTestCase { protected Tuple shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Exception { final Tuple operationOnPrimary = super.shardOperationOnPrimary(metaData, shardRequest); // set replica to relocating - ((TestClusterService) clusterService).setState(stateWithRelocatingReplica); + setState(clusterService, stateWithRelocatingReplica); logger.debug("--> state after primary operation:\n{}", clusterService.state().prettyPrint()); return operationOnPrimary; } @@ -447,7 +455,7 @@ public class TransportReplicationActionTests extends ESTestCase { ShardRouting relocatingReplicaShard = stateWithRelocatingReplica.getRoutingTable().shardRoutingTable(index, shardId.id()).replicaShards().get(0); Map> capturedRequestsByTargetNode = transport.getCapturedRequestsByTargetNodeAndClear(); assertPhase(task, "replicating"); - for (String node : new String[] {relocatingReplicaShard.currentNodeId(), relocatingReplicaShard.relocatingNodeId()}) { + for (String node : new String[]{relocatingReplicaShard.currentNodeId(), relocatingReplicaShard.relocatingNodeId()}) { List requests = capturedRequestsByTargetNode.get(node); assertThat(requests, notNullValue()); assertThat(requests.size(), equalTo(1)); @@ -458,7 +466,7 @@ public class TransportReplicationActionTests extends ESTestCase { public void testIndexDeletedAfterPrimaryOperation() { final String index = "test"; final ShardId shardId = new ShardId(index, "_na_", 0); - clusterService.setState(state(index, true, ShardRoutingState.STARTED, ShardRoutingState.STARTED)); + setState(clusterService, state(index, true, ShardRoutingState.STARTED, ShardRoutingState.STARTED)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); final ClusterState stateWithDeletedIndex = state(index + "_new", true, ShardRoutingState.STARTED, ShardRoutingState.RELOCATING); @@ -467,7 +475,7 @@ public class TransportReplicationActionTests extends ESTestCase { protected Tuple shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Exception { final Tuple operationOnPrimary = super.shardOperationOnPrimary(metaData, shardRequest); // delete index after primary op - ((TestClusterService) clusterService).setState(stateWithDeletedIndex); + setState(clusterService, stateWithDeletedIndex); logger.debug("--> state after primary operation:\n{}", clusterService.state().prettyPrint()); return operationOnPrimary; } @@ -519,7 +527,7 @@ public class TransportReplicationActionTests extends ESTestCase { replicaStates[i] = ShardRoutingState.UNASSIGNED; } - clusterService.setState(state(index, true, ShardRoutingState.STARTED, replicaStates)); + setState(clusterService, state(index, true, ShardRoutingState.STARTED, replicaStates)); logger.debug("using consistency level of [{}], assigned shards [{}], total shards [{}]. expecting op to [{}]. using state: \n{}", request.consistencyLevel(), 1 + assignedReplicas, 1 + assignedReplicas + unassignedReplicas, passesWriteConsistency ? "succeed" : "retry", clusterService.state().prettyPrint()); @@ -547,7 +555,7 @@ public class TransportReplicationActionTests extends ESTestCase { for (int i = 0; i < replicaStates.length; i++) { replicaStates[i] = ShardRoutingState.STARTED; } - clusterService.setState(state(index, true, ShardRoutingState.STARTED, replicaStates)); + setState(clusterService, state(index, true, ShardRoutingState.STARTED, replicaStates)); listener = new PlainActionFuture<>(); primaryPhase = action.new PrimaryPhase(task, request, createTransportChannel(listener)); primaryPhase.run(); @@ -567,7 +575,7 @@ public class TransportReplicationActionTests extends ESTestCase { // simulate execution of the replication phase on the relocation target node after relocation source was marked as relocated state = ClusterState.builder(state).nodes(DiscoveryNodes.builder(state.nodes()).localNodeId(primaryShard.relocatingNodeId())).build(); } - clusterService.setState(state); + setState(clusterService, state); final IndexShardRoutingTable shardRoutingTable = clusterService.state().routingTable().index(index).shard(shardId.id()); int assignedReplicas = 0; @@ -602,7 +610,7 @@ public class TransportReplicationActionTests extends ESTestCase { // simulate execution of the primary phase on the relocation target node state = ClusterState.builder(state).nodes(DiscoveryNodes.builder(state.nodes()).localNodeId(primaryShard.relocatingNodeId())).build(); } - clusterService.setState(state); + setState(clusterService, state); final IndexShardRoutingTable shardRoutingTable = clusterService.state().routingTable().index(index).shard(shardId.id()); int assignedReplicas = 0; @@ -646,7 +654,7 @@ public class TransportReplicationActionTests extends ESTestCase { HashMap nodesSentTo = new HashMap<>(); boolean executeOnReplica = - action.shouldExecuteReplication(clusterService.state().getMetaData().index(shardId.getIndex()).getSettings()); + action.shouldExecuteReplication(clusterService.state().getMetaData().index(shardId.getIndex()).getSettings()); for (CapturingTransport.CapturedRequest capturedRequest : capturedRequests) { // no duplicate requests Request replicationRequest = (Request) capturedRequest.request; @@ -703,7 +711,7 @@ public class TransportReplicationActionTests extends ESTestCase { // get the shard the request was sent to ShardRouting routing = clusterService.state().getRoutingNodes().node(capturedRequest.node.id()).get(request.shardId.id()); // and the shard that was requested to be failed - ShardStateAction.ShardRoutingEntry shardRoutingEntry = (ShardStateAction.ShardRoutingEntry)shardFailedRequest.request; + ShardStateAction.ShardRoutingEntry shardRoutingEntry = (ShardStateAction.ShardRoutingEntry) shardFailedRequest.request; // the shard the request was sent to and the shard to be failed should be the same assertEquals(shardRoutingEntry.getShardRouting(), routing); failures.add(shardFailedRequest); @@ -714,7 +722,7 @@ public class TransportReplicationActionTests extends ESTestCase { CapturingTransport.CapturedRequest currentRequest = shardFailedRequest; for (int retryNumber = 0; retryNumber < numberOfRetries; retryNumber++) { // force a new cluster state to simulate a new master having been elected - clusterService.setState(ClusterState.builder(clusterService.state())); + setState(clusterService, ClusterState.builder(clusterService.state())); transport.handleRemoteError(currentRequest.requestId, new NotMasterException("shard-failed-test")); CapturingTransport.CapturedRequest[] retryRequests = transport.getCapturedRequestsAndClear(); assertEquals(1, retryRequests.length); @@ -765,7 +773,7 @@ public class TransportReplicationActionTests extends ESTestCase { final String index = "test"; final ShardId shardId = new ShardId(index, "_na_", 0); // no replica, we only want to test on primary - clusterService.setState(state(index, true, ShardRoutingState.STARTED)); + setState(clusterService, state(index, true, ShardRoutingState.STARTED)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); Request request = new Request(shardId).timeout("100ms"); PlainActionFuture listener = new PlainActionFuture<>(); @@ -805,7 +813,7 @@ public class TransportReplicationActionTests extends ESTestCase { final String index = "test"; final ShardId shardId = new ShardId(index, "_na_", 0); // one replica to make sure replication is attempted - clusterService.setState(state(index, true, + setState(clusterService, state(index, true, ShardRoutingState.STARTED, ShardRoutingState.STARTED)); ShardRouting primaryShard = clusterService.state().routingTable().shardRoutingTable(shardId).primaryShard(); indexShardRouting.set(primaryShard); @@ -842,7 +850,7 @@ public class TransportReplicationActionTests extends ESTestCase { public void testReplicasCounter() throws Exception { final ShardId shardId = new ShardId("test", "_na_", 0); - clusterService.setState(state(shardId.getIndexName(), true, + setState(clusterService, state(shardId.getIndexName(), true, ShardRoutingState.STARTED, ShardRoutingState.STARTED)); action = new ActionWithDelay(Settings.EMPTY, "testActionWithExceptions", transportService, clusterService, threadPool); final Action.ReplicaOperationTransportHandler replicaOperationTransportHandler = action.new ReplicaOperationTransportHandler(); @@ -881,7 +889,7 @@ public class TransportReplicationActionTests extends ESTestCase { action = new ActionWithExceptions(Settings.EMPTY, "testActionWithExceptions", transportService, clusterService, threadPool); final String index = "test"; final ShardId shardId = new ShardId(index, "_na_", 0); - clusterService.setState(state(index, true, + setState(clusterService, state(index, true, ShardRoutingState.STARTED, ShardRoutingState.STARTED)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); Request request = new Request(shardId).timeout("100ms"); @@ -901,7 +909,7 @@ public class TransportReplicationActionTests extends ESTestCase { final String index = "test"; final ShardId shardId = new ShardId(index, "_na_", 0); boolean localPrimary = true; - clusterService.setState(state(index, localPrimary, + setState(clusterService, state(index, localPrimary, ShardRoutingState.STARTED, ShardRoutingState.STARTED)); Action action = new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) { @Override @@ -953,8 +961,8 @@ public class TransportReplicationActionTests extends ESTestCase { // publish a new cluster state boolean localPrimaryOnRetry = randomBoolean(); - clusterService.setState(state(index, localPrimaryOnRetry, - ShardRoutingState.STARTED, ShardRoutingState.STARTED)); + setState(clusterService, state(index, localPrimaryOnRetry, + ShardRoutingState.STARTED, ShardRoutingState.STARTED)); CapturingTransport.CapturedRequest[] primaryRetry = transport.getCapturedRequestsAndClear(); // the request should be retried @@ -1066,7 +1074,7 @@ public class TransportReplicationActionTests extends ESTestCase { ThreadPool threadPool) { super(settings, actionName, transportService, clusterService, null, threadPool, new ShardStateAction(settings, clusterService, transportService, null, null, threadPool), - new ActionFilters(new HashSet()), new IndexNameExpressionResolver(Settings.EMPTY), Request::new, Request::new, ThreadPool.Names.SAME); + new ActionFilters(new HashSet()), new IndexNameExpressionResolver(Settings.EMPTY), Request::new, Request::new, ThreadPool.Names.SAME); } @Override @@ -1190,7 +1198,8 @@ public class TransportReplicationActionTests extends ESTestCase { * Transport channel that is needed for replica operation testing. */ public TransportChannel createTransportChannel(final PlainActionFuture listener) { - return createTransportChannel(listener, error -> {}); + return createTransportChannel(listener, error -> { + }); } public TransportChannel createTransportChannel(final PlainActionFuture listener, Consumer consumer) { diff --git a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java index 1bf1188ef53c..2dd31548cb9a 100644 --- a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java @@ -36,17 +36,18 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportService; +import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -59,13 +60,15 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.function.Supplier; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; import static org.hamcrest.core.IsEqual.equalTo; public class TransportInstanceSingleOperationActionTests extends ESTestCase { private static ThreadPool THREAD_POOL; - private TestClusterService clusterService; + private ClusterService clusterService; private CapturingTransport transport; private TransportService transportService; @@ -137,7 +140,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { public void setUp() throws Exception { super.setUp(); transport = new CapturingTransport(); - clusterService = new TestClusterService(THREAD_POOL); + clusterService = createClusterService(THREAD_POOL); transportService = new TransportService(transport, THREAD_POOL); transportService.start(); transportService.acceptIncomingRequests(); @@ -151,6 +154,13 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { ); } + @After + public void tearDown() throws Exception { + super.tearDown(); + clusterService.close(); + transportService.close(); + } + @AfterClass public static void destroyThreadPool() { ThreadPool.terminate(THREAD_POOL, 30, TimeUnit.SECONDS); @@ -163,7 +173,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { PlainActionFuture listener = new PlainActionFuture<>(); ClusterBlocks.Builder block = ClusterBlocks.builder() .addGlobalBlock(new ClusterBlock(1, "", false, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL)); - clusterService.setState(ClusterState.builder(clusterService.state()).blocks(block)); + setState(clusterService, ClusterState.builder(clusterService.state()).blocks(block)); try { action.new AsyncSingleAction(request, listener).start(); listener.get(); @@ -180,7 +190,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { Request request = new Request().index("test"); request.shardId = new ShardId("test", "_na_", 0); PlainActionFuture listener = new PlainActionFuture<>(); - clusterService.setState(ClusterStateCreationUtils.state("test", randomBoolean(), ShardRoutingState.STARTED)); + setState(clusterService, ClusterStateCreationUtils.state("test", randomBoolean(), ShardRoutingState.STARTED)); action.new AsyncSingleAction(request, listener).start(); assertThat(transport.capturedRequests().length, equalTo(1)); transport.handleResponse(transport.capturedRequests()[0].requestId, new Response()); @@ -191,7 +201,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { Request request = new Request().index("test"); request.shardId = new ShardId("test", "_na_", 0); PlainActionFuture listener = new PlainActionFuture<>(); - clusterService.setState(ClusterStateCreationUtils.state("test", randomBoolean(), ShardRoutingState.STARTED)); + setState(clusterService, ClusterStateCreationUtils.state("test", randomBoolean(), ShardRoutingState.STARTED)); action.new AsyncSingleAction(request, listener).start(); assertThat(transport.capturedRequests().length, equalTo(1)); @@ -218,11 +228,11 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { request.shardId = new ShardId("test", "_na_", 0); PlainActionFuture listener = new PlainActionFuture<>(); boolean local = randomBoolean(); - clusterService.setState(ClusterStateCreationUtils.state("test", local, ShardRoutingState.INITIALIZING)); + setState(clusterService, ClusterStateCreationUtils.state("test", local, ShardRoutingState.INITIALIZING)); action.new AsyncSingleAction(request, listener).start(); // this should fail because primary not initialized assertThat(transport.capturedRequests().length, equalTo(0)); - clusterService.setState(ClusterStateCreationUtils.state("test", local, ShardRoutingState.STARTED)); + setState(clusterService, ClusterStateCreationUtils.state("test", local, ShardRoutingState.STARTED)); // this time it should work assertThat(transport.capturedRequests().length, equalTo(1)); transport.handleResponse(transport.capturedRequests()[0].requestId, new Response()); @@ -234,7 +244,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { request.shardId = new ShardId("test", "_na_", 0); PlainActionFuture listener = new PlainActionFuture<>(); boolean local = randomBoolean(); - clusterService.setState(ClusterStateCreationUtils.state("test", local, ShardRoutingState.STARTED)); + setState(clusterService, ClusterStateCreationUtils.state("test", local, ShardRoutingState.STARTED)); action.new AsyncSingleAction(request, listener).start(); assertThat(transport.capturedRequests().length, equalTo(1)); long requestId = transport.capturedRequests()[0].requestId; @@ -242,7 +252,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { DiscoveryNode node = clusterService.state().getNodes().getLocalNode(); transport.handleLocalError(requestId, new ConnectTransportException(node, "test exception")); // trigger cluster state observer - clusterService.setState(ClusterStateCreationUtils.state("test", local, ShardRoutingState.STARTED)); + setState(clusterService, ClusterStateCreationUtils.state("test", local, ShardRoutingState.STARTED)); assertThat(transport.capturedRequests().length, equalTo(1)); transport.handleResponse(transport.capturedRequests()[0].requestId, new Response()); listener.get(); @@ -252,7 +262,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { Request request = new Request().index("test").timeout(new TimeValue(0, TimeUnit.MILLISECONDS)); request.shardId = new ShardId("test", "_na_", 0); PlainActionFuture listener = new PlainActionFuture<>(); - clusterService.setState(ClusterStateCreationUtils.state("test", randomBoolean(), ShardRoutingState.STARTED)); + setState(clusterService, ClusterStateCreationUtils.state("test", randomBoolean(), ShardRoutingState.STARTED)); action.new AsyncSingleAction(request, listener).start(); assertThat(transport.capturedRequests().length, equalTo(1)); long requestId = transport.capturedRequests()[0].requestId; @@ -301,7 +311,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { Request request = new Request().index("test"); request.shardId = new ShardId("test", "_na_", 0); PlainActionFuture listener = new PlainActionFuture<>(); - clusterService.setState(ClusterStateCreationUtils.state("test", randomBoolean(), ShardRoutingState.STARTED)); + setState(clusterService, ClusterStateCreationUtils.state("test", randomBoolean(), ShardRoutingState.STARTED)); action.new AsyncSingleAction(request, listener).start(); assertThat(transport.capturedRequests().length, equalTo(0)); try { diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java index 9a8e8fb72687..28b0b7e18cfe 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionModule; @@ -35,6 +34,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; diff --git a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index 78128fe30f22..9c004a95f3b5 100644 --- a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -22,6 +22,7 @@ package org.elasticsearch.cluster; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index c4031edc2d69..be8984830e4c 100644 --- a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.cluster.action.shard; import org.apache.lucene.index.CorruptIndexException; import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.NotMasterException; @@ -34,10 +33,10 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.NodeDisconnectedException; @@ -57,6 +56,8 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.LongConsumer; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -67,7 +68,7 @@ public class ShardStateActionTests extends ESTestCase { private TestShardStateAction shardStateAction; private CapturingTransport transport; private TransportService transportService; - private TestClusterService clusterService; + private ClusterService clusterService; private static class TestShardStateAction extends ShardStateAction { public TestShardStateAction(Settings settings, ClusterService clusterService, TransportService transportService, AllocationService allocationService, RoutingService routingService) { @@ -104,19 +105,22 @@ public class ShardStateActionTests extends ESTestCase { public void setUp() throws Exception { super.setUp(); this.transport = new CapturingTransport(); - clusterService = new TestClusterService(THREAD_POOL); + clusterService = createClusterService(THREAD_POOL); transportService = new TransportService(transport, THREAD_POOL); transportService.start(); transportService.acceptIncomingRequests(); shardStateAction = new TestShardStateAction(Settings.EMPTY, clusterService, transportService, null, null); - shardStateAction.setOnBeforeWaitForNewMasterAndRetry(() -> {}); - shardStateAction.setOnAfterWaitForNewMasterAndRetry(() -> {}); + shardStateAction.setOnBeforeWaitForNewMasterAndRetry(() -> { + }); + shardStateAction.setOnAfterWaitForNewMasterAndRetry(() -> { + }); } @Override @After public void tearDown() throws Exception { - transportService.stop(); + clusterService.close(); + transportService.close(); super.tearDown(); } @@ -129,7 +133,7 @@ public class ShardStateActionTests extends ESTestCase { public void testSuccess() throws InterruptedException { final String index = "test"; - clusterService.setState(ClusterStateCreationUtils.stateWithActivePrimary(index, true, randomInt(5))); + setState(clusterService, ClusterStateCreationUtils.stateWithActivePrimary(index, true, randomInt(5))); AtomicBoolean success = new AtomicBoolean(); CountDownLatch latch = new CountDownLatch(1); @@ -154,7 +158,7 @@ public class ShardStateActionTests extends ESTestCase { assertEquals(1, capturedRequests.length); // the request is a shard failed request assertThat(capturedRequests[0].request, is(instanceOf(ShardStateAction.ShardRoutingEntry.class))); - ShardStateAction.ShardRoutingEntry shardRoutingEntry = (ShardStateAction.ShardRoutingEntry)capturedRequests[0].request; + ShardStateAction.ShardRoutingEntry shardRoutingEntry = (ShardStateAction.ShardRoutingEntry) capturedRequests[0].request; // for the right shard assertEquals(shardRouting, shardRoutingEntry.getShardRouting()); // sent to the master @@ -169,17 +173,18 @@ public class ShardStateActionTests extends ESTestCase { public void testNoMaster() throws InterruptedException { final String index = "test"; - clusterService.setState(ClusterStateCreationUtils.stateWithActivePrimary(index, true, randomInt(5))); + setState(clusterService, ClusterStateCreationUtils.stateWithActivePrimary(index, true, randomInt(5))); DiscoveryNodes.Builder noMasterBuilder = DiscoveryNodes.builder(clusterService.state().nodes()); noMasterBuilder.masterNodeId(null); - clusterService.setState(ClusterState.builder(clusterService.state()).nodes(noMasterBuilder)); + setState(clusterService, ClusterState.builder(clusterService.state()).nodes(noMasterBuilder)); CountDownLatch latch = new CountDownLatch(1); AtomicInteger retries = new AtomicInteger(); AtomicBoolean success = new AtomicBoolean(); - setUpMasterRetryVerification(1, retries, latch, requestId -> {}); + setUpMasterRetryVerification(1, retries, latch, requestId -> { + }); ShardRouting failedShard = getRandomShardRouting(index); shardStateAction.shardFailed(failedShard, failedShard, "test", getSimulatedFailure(), new ShardStateAction.Listener() { @@ -206,7 +211,7 @@ public class ShardStateActionTests extends ESTestCase { public void testMasterChannelException() throws InterruptedException { final String index = "test"; - clusterService.setState(ClusterStateCreationUtils.stateWithActivePrimary(index, true, randomInt(5))); + setState(clusterService, ClusterStateCreationUtils.stateWithActivePrimary(index, true, randomInt(5))); CountDownLatch latch = new CountDownLatch(1); AtomicInteger retries = new AtomicInteger(); @@ -216,8 +221,8 @@ public class ShardStateActionTests extends ESTestCase { LongConsumer retryLoop = requestId -> { if (randomBoolean()) { transport.handleRemoteError( - requestId, - randomFrom(new NotMasterException("simulated"), new Discovery.FailedToCommitClusterStateException("simulated"))); + requestId, + randomFrom(new NotMasterException("simulated"), new Discovery.FailedToCommitClusterStateException("simulated"))); } else { if (randomBoolean()) { transport.handleLocalError(requestId, new NodeNotConnectedException(null, "simulated")); @@ -262,7 +267,7 @@ public class ShardStateActionTests extends ESTestCase { public void testUnhandledFailure() { final String index = "test"; - clusterService.setState(ClusterStateCreationUtils.stateWithActivePrimary(index, true, randomInt(5))); + setState(clusterService, ClusterStateCreationUtils.stateWithActivePrimary(index, true, randomInt(5))); AtomicBoolean failure = new AtomicBoolean(); @@ -291,14 +296,14 @@ public class ShardStateActionTests extends ESTestCase { public void testShardNotFound() throws InterruptedException { final String index = "test"; - clusterService.setState(ClusterStateCreationUtils.stateWithActivePrimary(index, true, randomInt(5))); + setState(clusterService, ClusterStateCreationUtils.stateWithActivePrimary(index, true, randomInt(5))); AtomicBoolean success = new AtomicBoolean(); CountDownLatch latch = new CountDownLatch(1); ShardRouting failedShard = getRandomShardRouting(index); RoutingTable routingTable = RoutingTable.builder(clusterService.state().getRoutingTable()).remove(index).build(); - clusterService.setState(ClusterState.builder(clusterService.state()).routingTable(routingTable)); + setState(clusterService, ClusterState.builder(clusterService.state()).routingTable(routingTable)); shardStateAction.shardFailed(failedShard, failedShard, "test", getSimulatedFailure(), new ShardStateAction.Listener() { @Override public void onSuccess() { @@ -324,7 +329,7 @@ public class ShardStateActionTests extends ESTestCase { public void testNoLongerPrimaryShardException() throws InterruptedException { final String index = "test"; - clusterService.setState(ClusterStateCreationUtils.stateWithActivePrimary(index, true, randomInt(5))); + setState(clusterService, ClusterStateCreationUtils.stateWithActivePrimary(index, true, randomInt(5))); ShardRouting failedShard = getRandomShardRouting(index); @@ -349,7 +354,7 @@ public class ShardStateActionTests extends ESTestCase { }); ShardStateAction.NoLongerPrimaryShardException catastrophicError = - new ShardStateAction.NoLongerPrimaryShardException(failedShard.shardId(), "source shard [" + sourceFailedShard + " is neither the local allocation nor the primary allocation"); + new ShardStateAction.NoLongerPrimaryShardException(failedShard.shardId(), "source shard [" + sourceFailedShard + " is neither the local allocation nor the primary allocation"); CapturingTransport.CapturedRequest[] capturedRequests = transport.getCapturedRequestsAndClear(); transport.handleRemoteError(capturedRequests[0].requestId, catastrophicError); @@ -371,7 +376,7 @@ public class ShardStateActionTests extends ESTestCase { shardStateAction.setOnBeforeWaitForNewMasterAndRetry(() -> { DiscoveryNodes.Builder masterBuilder = DiscoveryNodes.builder(clusterService.state().nodes()); masterBuilder.masterNodeId(clusterService.state().nodes().masterNodes().iterator().next().value.id()); - clusterService.setState(ClusterState.builder(clusterService.state()).nodes(masterBuilder)); + setState(clusterService, ClusterState.builder(clusterService.state()).nodes(masterBuilder)); }); shardStateAction.setOnAfterWaitForNewMasterAndRetry(() -> verifyRetry(numberOfRetries, retries, latch, retryLoop)); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java index 5c922f07e462..d35b896f7057 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java @@ -27,10 +27,10 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.allocation.AllocationService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESAllocationTestCase; -import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Before; @@ -42,6 +42,8 @@ import java.util.concurrent.atomic.AtomicBoolean; import static java.util.Collections.singletonMap; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; /** @@ -140,7 +142,7 @@ public class RoutingServiceTests extends ESAllocationTestCase { */ public void testDelayedUnassignedScheduleRerouteAfterDelayedReroute() throws Exception { final ThreadPool testThreadPool = new ThreadPool(getTestName()); - + ClusterService clusterService = createClusterService(testThreadPool); try { MockAllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator()); MetaData metaData = MetaData.builder() @@ -152,8 +154,8 @@ public class RoutingServiceTests extends ESAllocationTestCase { ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData) .routingTable(RoutingTable.builder().addAsNew(metaData.index("short_delay")).addAsNew(metaData.index("long_delay")).build()) .nodes(DiscoveryNodes.builder() - .put(newNode("node0", singletonMap("data", Boolean.FALSE.toString()))).localNodeId("node0").masterNodeId("node0") - .put(newNode("node1")).put(newNode("node2")).put(newNode("node3")).put(newNode("node4"))).build(); + .put(newNode("node0", singletonMap("data", Boolean.FALSE.toString()))).localNodeId("node0").masterNodeId("node0") + .put(newNode("node1")).put(newNode("node2")).put(newNode("node3")).put(newNode("node4"))).build(); // allocate shards clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build(); // start primaries @@ -209,8 +211,7 @@ public class RoutingServiceTests extends ESAllocationTestCase { // manually trigger a clusterChanged event on routingService ClusterState newState = clusterState; - // create fake cluster service - TestClusterService clusterService = new TestClusterService(newState, testThreadPool); + setState(clusterService, newState); // create routing service, also registers listener on cluster service RoutingService routingService = new RoutingService(Settings.EMPTY, testThreadPool, clusterService, allocation); routingService.start(); // just so performReroute does not prematurely return @@ -221,11 +222,12 @@ public class RoutingServiceTests extends ESAllocationTestCase { clusterService.addLast(event -> latch.countDown()); // instead of clusterService calling clusterChanged, we call it directly here routingService.clusterChanged(new ClusterChangedEvent("test", newState, prevState)); - // cluster service should have updated state and called routingService with clusterChanged + // cluster service should have updated state and called routingService with clusterChanged latch.await(); // verify the registration has been set to the delay of longDelayReplica/longDelayUnassignedReplica assertThat(routingService.getMinDelaySettingAtLastSchedulingNanos(), equalTo(TimeValue.timeValueSeconds(10).nanos())); } finally { + clusterService.stop(); terminate(testThreadPool); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java similarity index 97% rename from core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java rename to core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java index 351959460b15..cce3a873b7b5 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java @@ -16,12 +16,15 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.cluster; +package org.elasticsearch.cluster.service; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; +import org.elasticsearch.cluster.AckedClusterStateUpdateTask; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.LocalNodeMasterListener; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.service.PendingClusterTask; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -213,6 +216,7 @@ public class ClusterServiceIT extends ESIntegTestCase { assertThat(processedLatch.await(1, TimeUnit.SECONDS), equalTo(true)); } + public void testAckedUpdateTaskNoAckExpected() throws Exception { Settings settings = settingsBuilder() .put("discovery.type", "local") @@ -425,7 +429,7 @@ public class ClusterServiceIT extends ESIntegTestCase { assertTrue(controlSources.isEmpty()); controlSources = new HashSet<>(Arrays.asList("1", "2", "3", "4", "5", "6", "7", "8", "9", "10")); - PendingClusterTasksResponse response = internalCluster().clientNodeClient().admin().cluster().preparePendingClusterTasks().execute().actionGet(); + PendingClusterTasksResponse response = internalCluster().clientNodeClient().admin().cluster().preparePendingClusterTasks().get(); assertThat(response.pendingTasks().size(), greaterThanOrEqualTo(10)); assertThat(response.pendingTasks().get(0).getSource().string(), equalTo("1")); assertThat(response.pendingTasks().get(0).isExecuting(), equalTo(true)); @@ -509,7 +513,8 @@ public class ClusterServiceIT extends ESIntegTestCase { ClusterService clusterService = internalCluster().getInstance(ClusterService.class); MasterAwareService testService = internalCluster().getInstance(MasterAwareService.class); - ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNodes("1").get(); + ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID) + .setWaitForNodes("1").get(); assertThat(clusterHealth.isTimedOut(), equalTo(false)); // the first node should be a master as the minimum required is 1 @@ -549,7 +554,8 @@ public class ClusterServiceIT extends ESIntegTestCase { internalCluster().stopRandomNonMasterNode(); // there should not be any master as the minimum number of required eligible masters is not met - awaitBusy(() -> clusterService1.state().nodes().masterNode() == null && clusterService1.state().status() == ClusterState.ClusterStateStatus.APPLIED); + awaitBusy(() -> clusterService1.state().nodes().masterNode() == null && + clusterService1.state().status() == ClusterState.ClusterStateStatus.APPLIED); assertThat(testService1.master(), is(false)); // bring the node back up @@ -557,9 +563,12 @@ public class ClusterServiceIT extends ESIntegTestCase { ClusterService clusterService2 = internalCluster().getInstance(ClusterService.class, node_2); MasterAwareService testService2 = internalCluster().getInstance(MasterAwareService.class, node_2); - // make sure both nodes see each other otherwise the masternode below could be null if node 2 is master and node 1 did'r receive the updated cluster state... - assertThat(internalCluster().client(node_1).admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setLocal(true).setWaitForNodes("2").get().isTimedOut(), is(false)); - assertThat(internalCluster().client(node_2).admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setLocal(true).setWaitForNodes("2").get().isTimedOut(), is(false)); + // make sure both nodes see each other otherwise the masternode below could be null if node 2 is master and node 1 did'r receive + // the updated cluster state... + assertThat(internalCluster().client(node_1).admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setLocal(true) + .setWaitForNodes("2").get().isTimedOut(), is(false)); + assertThat(internalCluster().client(node_2).admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setLocal(true) + .setWaitForNodes("2").get().isTimedOut(), is(false)); // now that we started node1 again, a new master should be elected assertThat(clusterService2.state().nodes().masterNode(), is(notNullValue())); diff --git a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java index ff55de45649f..a62f99ab4595 100644 --- a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java @@ -23,7 +23,6 @@ import org.apache.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskConfig; import org.elasticsearch.cluster.ClusterStateTaskExecutor; @@ -63,6 +62,7 @@ import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; @@ -89,7 +89,7 @@ public class ClusterServiceTests extends ESTestCase { @Before public void setUp() throws Exception { super.setUp(); - clusterService = createClusterService(true); + clusterService = createTimedClusterService(true); } @After @@ -98,12 +98,12 @@ public class ClusterServiceTests extends ESTestCase { super.tearDown(); } - TimedClusterService createClusterService(boolean makeMaster) throws InterruptedException { - TimedClusterService test = new TimedClusterService(Settings.EMPTY, null, + TimedClusterService createTimedClusterService(boolean makeMaster) throws InterruptedException { + TimedClusterService timedClusterService = new TimedClusterService(Settings.EMPTY, null, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), threadPool, new ClusterName("ClusterServiceTests")); - test.setLocalNode(new DiscoveryNode("node1", DummyTransportAddress.INSTANCE, Version.CURRENT)); - test.setNodeConnectionsService(new NodeConnectionsService(Settings.EMPTY, null, null) { + timedClusterService.setLocalNode(new DiscoveryNode("node1", DummyTransportAddress.INSTANCE, Version.CURRENT)); + timedClusterService.setNodeConnectionsService(new NodeConnectionsService(Settings.EMPTY, null, null) { @Override public void connectToAddedNodes(ClusterChangedEvent event) { // skip @@ -114,37 +114,17 @@ public class ClusterServiceTests extends ESTestCase { // skip } }); - test.setClusterStatePublisher((event, ackListener) -> { + timedClusterService.setClusterStatePublisher((event, ackListener) -> { }); - test.start(); - CountDownLatch latch = new CountDownLatch(1); - test.submitStateUpdateTask("making a master", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - final DiscoveryNodes nodes = currentState.nodes(); - final DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(nodes) - .masterNodeId(makeMaster ? nodes.localNodeId() : null); - return ClusterState.builder(currentState).blocks(ClusterBlocks.EMPTY_CLUSTER_BLOCK).nodes(nodesBuilder).build(); - } - - @Override - public boolean runOnlyOnMaster() { - return false; - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - latch.countDown(); - } - - @Override - public void onFailure(String source, Throwable t) { - logger.warn("unexpected exception", t); - fail("unexpected exception" + t); - } - }); - latch.await(); - return test; + timedClusterService.start(); + ClusterState state = timedClusterService.state(); + final DiscoveryNodes nodes = state.nodes(); + final DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(nodes) + .masterNodeId(makeMaster ? nodes.localNodeId() : null); + state = ClusterState.builder(state).blocks(ClusterBlocks.EMPTY_CLUSTER_BLOCK) + .nodes(nodesBuilder).build(); + setState(timedClusterService, state); + return timedClusterService; } public void testTimeoutUpdateTask() throws Exception { @@ -212,7 +192,7 @@ public class ClusterServiceTests extends ESTestCase { public void testMasterAwareExecution() throws Exception { - ClusterService nonMaster = createClusterService(false); + ClusterService nonMaster = createTimedClusterService(false); final boolean[] taskFailed = {false}; final CountDownLatch latch1 = new CountDownLatch(1); @@ -804,7 +784,7 @@ public class ClusterServiceTests extends ESTestCase { } } - static class TimedClusterService extends InternalClusterService { + static class TimedClusterService extends ClusterService { public volatile Long currentTimeOverride = null; diff --git a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceUtils.java b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceUtils.java new file mode 100644 index 000000000000..28b921c82da6 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceUtils.java @@ -0,0 +1,102 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster.service; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.NodeConnectionsService; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.concurrent.CountDownLatch; + +import static junit.framework.TestCase.fail; + +public class ClusterServiceUtils { + + public static ClusterService createClusterService(ThreadPool threadPool) { + ClusterService clusterService = new ClusterService(Settings.EMPTY, null, + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), + threadPool, new ClusterName("ClusterServiceTests")); + clusterService.setLocalNode(new DiscoveryNode("node", DummyTransportAddress.INSTANCE, Version.CURRENT)); + clusterService.setNodeConnectionsService(new NodeConnectionsService(Settings.EMPTY, null, null) { + @Override + public void connectToAddedNodes(ClusterChangedEvent event) { + // skip + } + + @Override + public void disconnectFromRemovedNodes(ClusterChangedEvent event) { + // skip + } + }); + clusterService.setClusterStatePublisher((event, ackListener) -> { + }); + clusterService.start(); + return clusterService; + } + + public static ClusterService createClusterService(ClusterState initialState, ThreadPool threadPool) { + ClusterService clusterService = createClusterService(threadPool); + setState(clusterService, initialState); + return clusterService; + } + + public static void setState(ClusterService clusterService, ClusterState.Builder clusterStateBuilder) { + setState(clusterService, clusterStateBuilder.build()); + } + + public static void setState(ClusterService clusterService, ClusterState clusterState) { + CountDownLatch latch = new CountDownLatch(1); + clusterService.submitStateUpdateTask("test setting state", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + // make sure we increment versions as listener may depend on it for change + return ClusterState.builder(clusterState).version(currentState.version() + 1).build(); + } + + @Override + public boolean runOnlyOnMaster() { + return false; + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + latch.countDown(); + } + + @Override + public void onFailure(String source, Throwable t) { + fail("unexpected exception" + t); + } + }); + try { + latch.await(); + } catch (InterruptedException e) { + throw new ElasticsearchException("unexpected interruption", e); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index 29997aec8f64..f588652ac8cc 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -27,7 +27,6 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateUpdateTask; @@ -41,6 +40,7 @@ import org.elasticsearch.cluster.routing.Murmur3HashFunction; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; diff --git a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java index 06f2de8317aa..b58354b77d12 100644 --- a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java @@ -24,13 +24,13 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.zen.fd.FaultDetection; import org.elasticsearch.discovery.zen.fd.MasterFaultDetection; import org.elasticsearch.discovery.zen.fd.NodesFaultDetection; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.cluster.NoopClusterService; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportConnectionListener; @@ -44,10 +44,13 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import static java.util.Collections.emptyMap; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; public class ZenFaultDetectionTests extends ESTestCase { protected ThreadPool threadPool; + protected ClusterService clusterService; protected static final Version version0 = Version.fromId(/*0*/99); protected DiscoveryNode nodeA; @@ -62,6 +65,7 @@ public class ZenFaultDetectionTests extends ESTestCase { public void setUp() throws Exception { super.setUp(); threadPool = new ThreadPool(getClass().getName()); + clusterService = createClusterService(threadPool); serviceA = build(Settings.builder().put("name", "TS_A").build(), version0); nodeA = new DiscoveryNode("TS_A", "TS_A", serviceA.boundAddress().publishAddress(), emptyMap(), version0); serviceB = build(Settings.builder().put("name", "TS_B").build(), version1); @@ -100,6 +104,7 @@ public class ZenFaultDetectionTests extends ESTestCase { super.tearDown(); serviceA.close(); serviceB.close(); + clusterService.close(); terminate(threadPool); } @@ -186,21 +191,18 @@ public class ZenFaultDetectionTests extends ESTestCase { .put(FaultDetection.PING_INTERVAL_SETTING.getKey(), "5m"); ClusterName clusterName = new ClusterName(randomAsciiOfLengthBetween(3, 20)); final ClusterState state = ClusterState.builder(clusterName).nodes(buildNodesForA(false)).build(); + setState(clusterService, state); MasterFaultDetection masterFD = new MasterFaultDetection(settings.build(), threadPool, serviceA, clusterName, - new NoopClusterService(state)); + clusterService); masterFD.start(nodeB, "test"); final String[] failureReason = new String[1]; final DiscoveryNode[] failureNode = new DiscoveryNode[1]; final CountDownLatch notified = new CountDownLatch(1); - masterFD.addListener(new MasterFaultDetection.Listener() { - - @Override - public void onMasterFailure(DiscoveryNode masterNode, Throwable cause, String reason) { - failureNode[0] = masterNode; - failureReason[0] = reason; - notified.countDown(); - } + masterFD.addListener((masterNode, cause, reason) -> { + failureNode[0] = masterNode; + failureReason[0] = reason; + notified.countDown(); }); // will raise a disconnect on A serviceB.stop(); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java index 67501d55a956..96e7a90ece6c 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; @@ -40,9 +41,12 @@ import org.elasticsearch.common.util.concurrent.BaseFuture; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.membership.MembershipAction; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.AfterClass; import org.junit.Before; +import org.junit.BeforeClass; import java.util.ArrayList; import java.util.Collections; @@ -55,28 +59,52 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @TestLogging("discovery.zen:TRACE") public class NodeJoinControllerTests extends ESTestCase { - private TestClusterService clusterService; + private static ThreadPool threadPool; + + private ClusterService clusterService; private NodeJoinController nodeJoinController; + @BeforeClass + public static void beforeClass() { + threadPool = new ThreadPool("ShardReplicationTests"); + } + + @AfterClass + public static void afterClass() { + ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + threadPool = null; + } + @Before public void setUp() throws Exception { super.setUp(); - clusterService = new TestClusterService(); + clusterService = createClusterService(threadPool); final DiscoveryNodes initialNodes = clusterService.state().nodes(); final DiscoveryNode localNode = initialNodes.localNode(); // make sure we have a master - clusterService.setState(ClusterState.builder(clusterService.state()).nodes(DiscoveryNodes.builder(initialNodes).masterNodeId(localNode.id()))); + setState(clusterService, ClusterState.builder(clusterService.state()).nodes( + DiscoveryNodes.builder(initialNodes).masterNodeId(localNode.id()))); nodeJoinController = new NodeJoinController(clusterService, new NoopRoutingService(Settings.EMPTY), - new DiscoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), Settings.EMPTY); + new DiscoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), + Settings.EMPTY); + } + + @After + public void tearDown() throws Exception { + super.tearDown(); + clusterService.close(); } public void testSimpleJoinAccumulation() throws InterruptedException, ExecutionException { @@ -97,21 +125,29 @@ public class NodeJoinControllerTests extends ESTestCase { pendingJoins.add(joinNodeAsync(node)); } nodeJoinController.stopAccumulatingJoins("test"); + boolean hadSyncJoin = false; for (int i = randomInt(5); i > 0; i--) { DiscoveryNode node = newNode(nodeId++); nodes.add(node); joinNode(node); + hadSyncJoin = true; + } + if (hadSyncJoin) { + for (Future joinFuture : pendingJoins) { + assertThat(joinFuture.isDone(), equalTo(true)); + } } - assertNodesInCurrentState(nodes); for (Future joinFuture : pendingJoins) { - assertThat(joinFuture.isDone(), equalTo(true)); + joinFuture.get(); } + + assertNodesInCurrentState(nodes); } public void testFailingJoinsWhenNotMaster() throws ExecutionException, InterruptedException { // remove current master flag DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(clusterService.state().nodes()).masterNodeId(null); - clusterService.setState(ClusterState.builder(clusterService.state()).nodes(nodes)); + setState(clusterService, ClusterState.builder(clusterService.state()).nodes(nodes)); int nodeId = 0; try { joinNode(newNode(nodeId++)); @@ -142,7 +178,7 @@ public class NodeJoinControllerTests extends ESTestCase { public void testSimpleMasterElectionWithoutRequiredJoins() throws InterruptedException, ExecutionException { DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(clusterService.state().nodes()).masterNodeId(null); - clusterService.setState(ClusterState.builder(clusterService.state()).nodes(nodes)); + setState(clusterService, ClusterState.builder(clusterService.state()).nodes(nodes)); int nodeId = 0; final int requiredJoins = 0; logger.debug("--> using requiredJoins [{}]", requiredJoins); @@ -184,13 +220,13 @@ public class NodeJoinControllerTests extends ESTestCase { }); masterElection.start(); - logger.debug("--> requiredJoins is set to 0. verifying election finished"); - electionFuture.get(); + logger.debug("--> requiredJoins is set to 0. verifying election finished"); + electionFuture.get(); } public void testSimpleMasterElection() throws InterruptedException, ExecutionException { DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(clusterService.state().nodes()).masterNodeId(null); - clusterService.setState(ClusterState.builder(clusterService.state()).nodes(nodes)); + setState(clusterService, ClusterState.builder(clusterService.state()).nodes(nodes)); int nodeId = 0; final int requiredJoins = 1 + randomInt(5); logger.debug("--> using requiredJoins [{}]", requiredJoins); @@ -301,7 +337,7 @@ public class NodeJoinControllerTests extends ESTestCase { public void testMasterElectionTimeout() throws InterruptedException { DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(clusterService.state().nodes()).masterNodeId(null); - clusterService.setState(ClusterState.builder(clusterService.state()).nodes(nodes)); + setState(clusterService, ClusterState.builder(clusterService.state()).nodes(nodes)); int nodeId = 0; final int requiredJoins = 1 + randomInt(5); logger.debug("--> using requiredJoins [{}]", requiredJoins); @@ -367,7 +403,7 @@ public class NodeJoinControllerTests extends ESTestCase { final DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(state.nodes()); final DiscoveryNode other_node = new DiscoveryNode("other_node", DummyTransportAddress.INSTANCE, Version.CURRENT); nodesBuilder.put(other_node); - clusterService.setState(ClusterState.builder(state).nodes(nodesBuilder)); + setState(clusterService, ClusterState.builder(state).nodes(nodesBuilder)); state = clusterService.state(); joinNode(other_node); @@ -413,7 +449,7 @@ public class NodeJoinControllerTests extends ESTestCase { public void testElectionWithConcurrentJoins() throws InterruptedException, BrokenBarrierException { DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(clusterService.state().nodes()).masterNodeId(null); - clusterService.setState(ClusterState.builder(clusterService.state()).nodes(nodesBuilder)); + setState(clusterService, ClusterState.builder(clusterService.state()).nodes(nodesBuilder)); nodeJoinController.startAccumulatingJoins(); @@ -512,11 +548,13 @@ public class NodeJoinControllerTests extends ESTestCase { } protected void assertNodesInCurrentState(List expectedNodes) { - DiscoveryNodes discoveryNodes = clusterService.state().nodes(); - assertThat(discoveryNodes.prettyPrint() + "\nexpected: " + expectedNodes.toString(), discoveryNodes.size(), equalTo(expectedNodes.size())); + final ClusterState state = clusterService.state(); + logger.info("assert for [{}] in:\n{}", expectedNodes, state.prettyPrint()); + DiscoveryNodes discoveryNodes = state.nodes(); for (DiscoveryNode node : expectedNodes) { assertThat("missing " + node + "\n" + discoveryNodes.prettyPrint(), discoveryNodes.get(node.id()), equalTo(node)); } + assertThat(discoveryNodes.size(), equalTo(expectedNodes.size())); } static class SimpleFuture extends BaseFuture { diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java index 9ad10cc38886..b7ce4c305e1f 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java @@ -25,12 +25,12 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java b/core/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java index 118f32a6564c..bf9921a2e23e 100644 --- a/core/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java @@ -19,22 +19,28 @@ package org.elasticsearch.gateway; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.NoopDiscovery; -import org.elasticsearch.test.cluster.NoopClusterService; import org.hamcrest.Matchers; import java.io.IOException; public class GatewayServiceTests extends ESTestCase { + private GatewayService createService(Settings.Builder settings) { + ClusterService clusterService = new ClusterService(Settings.EMPTY, null, + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), + null, new ClusterName("ClusterServiceTests")); return new GatewayService(Settings.builder() .put("http.enabled", "false") .put("discovery.type", "local") .put(settings.build()).build(), - null, new NoopClusterService(), null, null, null, null, new NoopDiscovery()); + null, clusterService, null, null, null, null, new NoopDiscovery()); } diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java index cecaef6c1e75..2f1454b85022 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java @@ -21,10 +21,8 @@ package org.elasticsearch.gateway; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.NodeEnvironment; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java index 748dd0a0a1a9..51e88e50edd3 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java @@ -28,27 +28,30 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.local.LocalTransport; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.test.cluster.TestClusterService; +import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; -import static org.hamcrest.CoreMatchers.instanceOf; import java.util.Collections; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.hamcrest.CoreMatchers.instanceOf; + public class DynamicMappingDisabledTests extends ESSingleNodeTestCase { private static ThreadPool THREAD_POOL; - private TestClusterService clusterService; + private ClusterService clusterService; private LocalTransport transport; private TransportService transportService; private IndicesService indicesService; @@ -67,9 +70,9 @@ public class DynamicMappingDisabledTests extends ESSingleNodeTestCase { public void setUp() throws Exception { super.setUp(); settings = Settings.builder() - .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING.getKey(), false) - .build(); - clusterService = new TestClusterService(THREAD_POOL); + .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING.getKey(), false) + .build(); + clusterService = createClusterService(THREAD_POOL); transport = new LocalTransport(settings, THREAD_POOL, Version.CURRENT, new NamedWriteableRegistry()); transportService = new TransportService(transport, THREAD_POOL); indicesService = getInstanceFromNode(IndicesService.class); @@ -79,6 +82,14 @@ public class DynamicMappingDisabledTests extends ESSingleNodeTestCase { autoCreateIndex = new AutoCreateIndex(settings, indexNameExpressionResolver); } + @After + public void tearDown() throws Exception { + super.tearDown(); + clusterService.close(); + transportService.close(); + } + + @AfterClass public static void destroyThreadPool() { ThreadPool.terminate(THREAD_POOL, 30, TimeUnit.SECONDS); @@ -88,8 +99,8 @@ public class DynamicMappingDisabledTests extends ESSingleNodeTestCase { public void testDynamicDisabled() { TransportIndexAction action = new TransportIndexAction(settings, transportService, clusterService, - indicesService, THREAD_POOL, shardStateAction, null, null, actionFilters, indexNameExpressionResolver, - autoCreateIndex); + indicesService, THREAD_POOL, shardStateAction, null, null, actionFilters, indexNameExpressionResolver, + autoCreateIndex); IndexRequest request = new IndexRequest("index", "type", "1"); request.source("foo", 3); diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index e0f44fd84910..f92ef2d3fbe3 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.query; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.io.JsonStringEncoder; - import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -37,10 +36,10 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesArray; @@ -100,7 +99,6 @@ import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPoolModule; import org.joda.time.DateTime; @@ -123,6 +121,8 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; @@ -142,10 +142,10 @@ public abstract class AbstractQueryTestCase> protected static final String GEO_POINT_FIELD_NAME = "mapped_geo_point"; protected static final String GEO_POINT_FIELD_MAPPING = "type=geo_point,lat_lon=true,geohash=true,geohash_prefix=true"; protected static final String GEO_SHAPE_FIELD_NAME = "mapped_geo_shape"; - protected static final String[] MAPPED_FIELD_NAMES = new String[] { STRING_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME, - BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, GEO_SHAPE_FIELD_NAME }; - protected static final String[] MAPPED_LEAF_FIELD_NAMES = new String[] { STRING_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME, - BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, GEO_POINT_FIELD_NAME }; + protected static final String[] MAPPED_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME, + BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, GEO_SHAPE_FIELD_NAME}; + protected static final String[] MAPPED_LEAF_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME, + BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, GEO_POINT_FIELD_NAME}; private static final int NUMBER_OF_TESTQUERIES = 20; private static Injector injector; @@ -194,11 +194,13 @@ public abstract class AbstractQueryTestCase> .build(); Settings indexSettings = Settings.settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); + final ThreadPool threadPool = new ThreadPool(settings); index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_"); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings); - final TestClusterService clusterService = new TestClusterService(); - clusterService.setState(new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder().put( + ClusterService clusterService = createClusterService(threadPool); + setState(clusterService, new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder().put( new IndexMetaData.Builder(index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0)))); + SettingsModule settingsModule = new SettingsModule(settings); settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); final Client proxy = (Client) Proxy.newProxyInstance( @@ -210,10 +212,10 @@ public abstract class AbstractQueryTestCase> @Override protected void configure() { Settings settings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - // no file watching, so we don't need a ResourceWatcherService - .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false) - .build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + // no file watching, so we don't need a ResourceWatcherService + .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false) + .build(); MockScriptEngine mockScriptEngine = new MockScriptEngine(); Multibinder multibinder = Multibinder.newSetBinder(binder(), ScriptEngineService.class); multibinder.addBinding().toInstance(mockScriptEngine); @@ -229,7 +231,7 @@ public abstract class AbstractQueryTestCase> try { ScriptService scriptService = new ScriptService(settings, new Environment(settings), engines, null, scriptEngineRegistry, scriptContextRegistry, scriptSettings); bind(ScriptService.class).toInstance(scriptService); - } catch(IOException e) { + } catch (IOException e) { throw new IllegalStateException("error while binding ScriptService", e); } } @@ -238,7 +240,7 @@ public abstract class AbstractQueryTestCase> injector = new ModulesBuilder().add( new EnvironmentModule(new Environment(settings)), settingsModule, - new ThreadPoolModule(new ThreadPool(settings)), + new ThreadPoolModule(threadPool), new IndicesModule() { @Override public void configure() { @@ -253,6 +255,7 @@ public abstract class AbstractQueryTestCase> protected void configureSearch() { // Skip me } + @Override protected void configureSuggesters() { // Skip me @@ -306,8 +309,8 @@ public abstract class AbstractQueryTestCase> GEO_SHAPE_FIELD_NAME, "type=geo_shape" ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); // also add mappings for two inner field in the object field - mapperService.merge(type, new CompressedXContent("{\"properties\":{\""+OBJECT_FIELD_NAME+"\":{\"type\":\"object\"," - + "\"properties\":{\""+DATE_FIELD_NAME+"\":{\"type\":\"date\"},\""+INT_FIELD_NAME+"\":{\"type\":\"integer\"}}}}}"), + mapperService.merge(type, new CompressedXContent("{\"properties\":{\"" + OBJECT_FIELD_NAME + "\":{\"type\":\"object\"," + + "\"properties\":{\"" + DATE_FIELD_NAME + "\":{\"type\":\"date\"},\"" + INT_FIELD_NAME + "\":{\"type\":\"integer\"}}}}}"), MapperService.MergeReason.MAPPING_UPDATE, false); currentTypes[i] = type; } @@ -316,6 +319,7 @@ public abstract class AbstractQueryTestCase> @AfterClass public static void afterClass() throws Exception { + injector.getInstance(ClusterService.class).close(); terminate(injector.getInstance(ThreadPool.class)); injector = null; index = null; @@ -421,9 +425,9 @@ public abstract class AbstractQueryTestCase> // we'd like to see the offending field name here assertThat(e.getMessage(), containsString("bogusField")); } - } + } - /** + /** * Test that adding additional object into otherwise correct query string * should always trigger some kind of Parsing Exception. */ @@ -692,7 +696,7 @@ public abstract class AbstractQueryTestCase> return new QueryParser() { @Override public String[] names() { - return new String[] {EmptyQueryBuilder.NAME}; + return new String[]{EmptyQueryBuilder.NAME}; } @Override @@ -716,7 +720,7 @@ public abstract class AbstractQueryTestCase> try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { QueryBuilder prototype = queryParser(query.getName()).getBuilderPrototype(); @SuppressWarnings("unchecked") - QB secondQuery = (QB)prototype.readFrom(in); + QB secondQuery = (QB) prototype.readFrom(in); return secondQuery; } } @@ -832,7 +836,7 @@ public abstract class AbstractQueryTestCase> } } else { if (randomBoolean()) { - types = new String[] { MetaData.ALL }; + types = new String[]{MetaData.ALL}; } else { types = new String[0]; } @@ -892,6 +896,7 @@ public abstract class AbstractQueryTestCase> private static class ClientInvocationHandler implements InvocationHandler { AbstractQueryTestCase delegate; + @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { if (method.equals(Client.class.getMethod("get", GetRequest.class))) { @@ -902,12 +907,12 @@ public abstract class AbstractQueryTestCase> } }; } else if (method.equals(Client.class.getMethod("multiTermVectors", MultiTermVectorsRequest.class))) { - return new PlainActionFuture() { - @Override - public MultiTermVectorsResponse get() throws InterruptedException, ExecutionException { - return delegate.executeMultiTermVectors((MultiTermVectorsRequest) args[0]); - } - }; + return new PlainActionFuture() { + @Override + public MultiTermVectorsResponse get() throws InterruptedException, ExecutionException { + return delegate.executeMultiTermVectors((MultiTermVectorsRequest) args[0]); + } + }; } else if (method.equals(Object.class.getMethod("toString"))) { return "MockClient"; } @@ -952,8 +957,8 @@ public abstract class AbstractQueryTestCase> source.toXContent(builder, ToXContent.EMPTY_PARAMS); assertEquals( msg(expected, builder.string()), - expected.replaceAll("\\s+",""), - builder.string().replaceAll("\\s+","")); + expected.replaceAll("\\s+", ""), + builder.string().replaceAll("\\s+", "")); } private static String msg(String left, String right) { @@ -966,7 +971,7 @@ public abstract class AbstractQueryTestCase> } else { builder.append(">> ").append("until offset: ").append(i) .append(" [").append(left.charAt(i)).append(" vs.").append(right.charAt(i)) - .append("] [").append((int)left.charAt(i) ).append(" vs.").append((int)right.charAt(i)).append(']'); + .append("] [").append((int) left.charAt(i)).append(" vs.").append((int) right.charAt(i)).append(']'); return builder.toString(); } } @@ -975,7 +980,7 @@ public abstract class AbstractQueryTestCase> int rightEnd = Math.max(size, right.length()) - 1; builder.append(">> ").append("until offset: ").append(size) .append(" [").append(left.charAt(leftEnd)).append(" vs.").append(right.charAt(rightEnd)) - .append("] [").append((int)left.charAt(leftEnd)).append(" vs.").append((int)right.charAt(rightEnd)).append(']'); + .append("] [").append((int) left.charAt(leftEnd)).append(" vs.").append((int) right.charAt(rightEnd)).append(']'); return builder.toString(); } return ""; diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index c44a08a14376..cf4a0e2e0aa2 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -40,7 +40,6 @@ import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterInfoService; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -50,9 +49,9 @@ import org.elasticsearch.cluster.routing.AllocationId; import org.elasticsearch.cluster.routing.RestoreSource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingHelper; -import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -99,7 +98,6 @@ import org.elasticsearch.test.FieldMaskingReader; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.test.junit.annotations.TestLogging; import java.io.IOException; import java.nio.file.Files; diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index 336d5a84a8d4..344c40a92ffc 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -20,14 +20,13 @@ package org.elasticsearch.indices; import org.apache.lucene.store.LockObtainFailedException; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.gateway.GatewayMetaState; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.ShardId; diff --git a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java index 936e8ac600a5..da1bb7ae303c 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java @@ -18,10 +18,10 @@ */ package org.elasticsearch.indices.flush; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.index.IndexService; diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 98d4f84c6ef6..140ff153b9f0 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -30,10 +30,10 @@ import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; diff --git a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java index 35624085c940..49819e1180ad 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.cluster.ClusterInfo; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlocks; @@ -40,6 +39,7 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index 9342ab043872..84d86324fee1 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.indices.store; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.health.ClusterHealthStatus; @@ -36,6 +35,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java index e909af62668a..badcbde193b9 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java @@ -30,19 +30,25 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.cluster.TestClusterService; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.junit.After; +import org.junit.AfterClass; import org.junit.Before; +import org.junit.BeforeClass; import java.util.Arrays; import java.util.HashSet; import java.util.Set; +import java.util.concurrent.TimeUnit; import static org.elasticsearch.Version.CURRENT; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.VersionUtils.randomVersion; /** @@ -57,13 +63,35 @@ public class IndicesStoreTests extends ESTestCase { NOT_STARTED_STATES = set.toArray(new ShardRoutingState[set.size()]); } + private static ThreadPool threadPool; + private IndicesStore indicesStore; private DiscoveryNode localNode; + private ClusterService clusterService; + + @BeforeClass + public static void beforeClass() { + threadPool = new ThreadPool("ShardReplicationTests"); + } + + @AfterClass + public static void afterClass() { + ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + threadPool = null; + } + @Before public void before() { localNode = new DiscoveryNode("abc", new LocalTransportAddress("abc"), Version.CURRENT); - indicesStore = new IndicesStore(Settings.EMPTY, null, new TestClusterService(), new TransportService(null, null), null); + clusterService = createClusterService(threadPool); + indicesStore = new IndicesStore(Settings.EMPTY, null, clusterService, new TransportService(null, null), null); + } + + @After + public void tearDown() throws Exception { + super.tearDown(); + clusterService.close(); } public void testShardCanBeDeletedNoShardRouting() throws Exception { @@ -87,7 +115,7 @@ public class IndicesStoreTests extends ESTestCase { for (int i = 0; i < numShards; i++) { int unStartedShard = randomInt(numReplicas); - for (int j=0; j <= numReplicas; j++) { + for (int j = 0; j <= numReplicas; j++) { ShardRoutingState state; if (j == unStartedShard) { state = randomFrom(NOT_STARTED_STATES); diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java index a4632079b35e..03b7a258f4f4 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java @@ -21,7 +21,7 @@ package org.elasticsearch.nodesinfo; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.test.ESIntegTestCase; diff --git a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java index 1fd44959a597..7e4559131518 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java @@ -27,13 +27,13 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java index bc6393986b24..4cd69ef604b9 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java @@ -20,10 +20,10 @@ package org.elasticsearch.search.aggregations; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.inject.AbstractModule; @@ -59,7 +59,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPoolModule; import org.junit.AfterClass; @@ -75,6 +74,8 @@ import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.containsString; public class AggregatorParsingTests extends ESTestCase { @@ -110,8 +111,9 @@ public class AggregatorParsingTests extends ESTestCase { namedWriteableRegistry = new NamedWriteableRegistry(); index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_"); Settings indexSettings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - final TestClusterService clusterService = new TestClusterService(); - clusterService.setState(new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder() + final ThreadPool threadPool = new ThreadPool(settings); + final ClusterService clusterService = createClusterService(threadPool); + setState(clusterService, new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder() .put(new IndexMetaData.Builder(index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0)))); SettingsModule settingsModule = new SettingsModule(settings); settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); @@ -146,7 +148,7 @@ public class AggregatorParsingTests extends ESTestCase { }; scriptModule.prepareSettings(settingsModule); injector = new ModulesBuilder().add(new EnvironmentModule(new Environment(settings)), settingsModule, - new ThreadPoolModule(new ThreadPool(settings)), scriptModule, new IndicesModule() { + new ThreadPoolModule(threadPool), scriptModule, new IndicesModule() { @Override protected void configure() { @@ -186,6 +188,7 @@ public class AggregatorParsingTests extends ESTestCase { @AfterClass public static void afterClass() throws Exception { + injector.getInstance(ClusterService.class).close(); terminate(injector.getInstance(ThreadPool.class)); injector = null; index = null; @@ -196,20 +199,20 @@ public class AggregatorParsingTests extends ESTestCase { public void testTwoTypes() throws Exception { String source = JsonXContent.contentBuilder() - .startObject() - .startObject("in_stock") - .startObject("filter") - .startObject("range") - .startObject("stock") - .field("gt", 0) - .endObject() - .endObject() - .endObject() - .startObject("terms") - .field("field", "stock") - .endObject() - .endObject() - .endObject().string(); + .startObject() + .startObject("in_stock") + .startObject("filter") + .startObject("range") + .startObject("stock") + .field("gt", 0) + .endObject() + .endObject() + .endObject() + .startObject("terms") + .field("field", "stock") + .endObject() + .endObject() + .endObject().string(); try { XContentParser parser = XContentFactory.xContent(source).createParser(source); QueryParseContext parseContext = new QueryParseContext(queriesRegistry); @@ -225,27 +228,27 @@ public class AggregatorParsingTests extends ESTestCase { public void testTwoAggs() throws Exception { String source = JsonXContent.contentBuilder() - .startObject() - .startObject("by_date") - .startObject("date_histogram") - .field("field", "timestamp") - .field("interval", "month") - .endObject() - .startObject("aggs") - .startObject("tag_count") - .startObject("cardinality") - .field("field", "tag") - .endObject() - .endObject() - .endObject() - .startObject("aggs") // 2nd "aggs": illegal - .startObject("tag_count2") - .startObject("cardinality") - .field("field", "tag") - .endObject() - .endObject() - .endObject() - .endObject().string(); + .startObject() + .startObject("by_date") + .startObject("date_histogram") + .field("field", "timestamp") + .field("interval", "month") + .endObject() + .startObject("aggs") + .startObject("tag_count") + .startObject("cardinality") + .field("field", "tag") + .endObject() + .endObject() + .endObject() + .startObject("aggs") // 2nd "aggs": illegal + .startObject("tag_count2") + .startObject("cardinality") + .field("field", "tag") + .endObject() + .endObject() + .endObject() + .endObject().string(); try { XContentParser parser = XContentFactory.xContent(source).createParser(source); QueryParseContext parseContext = new QueryParseContext(queriesRegistry); @@ -276,16 +279,16 @@ public class AggregatorParsingTests extends ESTestCase { } String source = JsonXContent.contentBuilder() - .startObject() - .startObject(name) - .startObject("filter") - .startObject("range") - .startObject("stock") - .field("gt", 0) - .endObject() - .endObject() - .endObject() - .endObject().string(); + .startObject() + .startObject(name) + .startObject("filter") + .startObject("range") + .startObject("stock") + .field("gt", 0) + .endObject() + .endObject() + .endObject() + .endObject().string(); try { XContentParser parser = XContentFactory.xContent(source).createParser(source); QueryParseContext parseContext = new QueryParseContext(queriesRegistry); @@ -302,18 +305,18 @@ public class AggregatorParsingTests extends ESTestCase { public void testSameAggregationName() throws Exception { final String name = randomAsciiOfLengthBetween(1, 10); String source = JsonXContent.contentBuilder() - .startObject() - .startObject(name) - .startObject("terms") - .field("field", "a") - .endObject() - .endObject() - .startObject(name) - .startObject("terms") - .field("field", "b") - .endObject() - .endObject() - .endObject().string(); + .startObject() + .startObject(name) + .startObject("terms") + .field("field", "a") + .endObject() + .endObject() + .startObject(name) + .startObject("terms") + .field("field", "b") + .endObject() + .endObject() + .endObject().string(); try { XContentParser parser = XContentFactory.xContent(source).createParser(source); QueryParseContext parseContext = new QueryParseContext(queriesRegistry); @@ -329,21 +332,21 @@ public class AggregatorParsingTests extends ESTestCase { public void testMissingName() throws Exception { String source = JsonXContent.contentBuilder() - .startObject() - .startObject("by_date") - .startObject("date_histogram") - .field("field", "timestamp") - .field("interval", "month") - .endObject() - .startObject("aggs") - // the aggregation name is missing - //.startObject("tag_count") - .startObject("cardinality") - .field("field", "tag") - .endObject() - //.endObject() - .endObject() - .endObject().string(); + .startObject() + .startObject("by_date") + .startObject("date_histogram") + .field("field", "timestamp") + .field("interval", "month") + .endObject() + .startObject("aggs") + // the aggregation name is missing + //.startObject("tag_count") + .startObject("cardinality") + .field("field", "tag") + .endObject() + //.endObject() + .endObject() + .endObject().string(); try { XContentParser parser = XContentFactory.xContent(source).createParser(source); QueryParseContext parseContext = new QueryParseContext(queriesRegistry); @@ -359,21 +362,21 @@ public class AggregatorParsingTests extends ESTestCase { public void testMissingType() throws Exception { String source = JsonXContent.contentBuilder() - .startObject() - .startObject("by_date") - .startObject("date_histogram") - .field("field", "timestamp") - .field("interval", "month") - .endObject() - .startObject("aggs") - .startObject("tag_count") - // the aggregation type is missing - //.startObject("cardinality") - .field("field", "tag") - //.endObject() - .endObject() - .endObject() - .endObject().string(); + .startObject() + .startObject("by_date") + .startObject("date_histogram") + .field("field", "timestamp") + .field("interval", "month") + .endObject() + .startObject("aggs") + .startObject("tag_count") + // the aggregation type is missing + //.startObject("cardinality") + .field("field", "tag") + //.endObject() + .endObject() + .endObject() + .endObject().string(); try { XContentParser parser = XContentFactory.xContent(source).createParser(source); QueryParseContext parseContext = new QueryParseContext(queriesRegistry); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java index eeffbb73f8bc..4a12072da440 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java @@ -20,10 +20,10 @@ package org.elasticsearch.search.aggregations; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Injector; @@ -60,7 +60,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPoolModule; import org.junit.AfterClass; @@ -73,6 +72,8 @@ import java.util.HashSet; import java.util.List; import java.util.Set; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; public abstract class BaseAggregationTestCase> extends ESTestCase { @@ -83,8 +84,8 @@ public abstract class BaseAggregationTestCase> protected static final String BOOLEAN_FIELD_NAME = "mapped_boolean"; protected static final String DATE_FIELD_NAME = "mapped_date"; protected static final String OBJECT_FIELD_NAME = "mapped_object"; - protected static final String[] mappedFieldNames = new String[] { STRING_FIELD_NAME, INT_FIELD_NAME, - DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, OBJECT_FIELD_NAME }; + protected static final String[] mappedFieldNames = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, + DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, OBJECT_FIELD_NAME}; private static Injector injector; private static Index index; @@ -117,11 +118,12 @@ public abstract class BaseAggregationTestCase> .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false) .build(); - namedWriteableRegistry = new NamedWriteableRegistry(); + namedWriteableRegistry = new NamedWriteableRegistry(); index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_"); Settings indexSettings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - final TestClusterService clusterService = new TestClusterService(); - clusterService.setState(new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder() + final ThreadPool threadPool = new ThreadPool(settings); + final ClusterService clusterService = createClusterService(threadPool); + setState(clusterService, new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder() .put(new IndexMetaData.Builder(index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0)))); SettingsModule settingsModule = new SettingsModule(settings); settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); @@ -158,7 +160,7 @@ public abstract class BaseAggregationTestCase> injector = new ModulesBuilder().add( new EnvironmentModule(new Environment(settings)), settingsModule, - new ThreadPoolModule(new ThreadPool(settings)), + new ThreadPoolModule(threadPool), scriptModule, new IndicesModule() { @@ -171,6 +173,7 @@ public abstract class BaseAggregationTestCase> protected void configureSearch() { // Skip me } + @Override protected void configureSuggesters() { // Skip me @@ -200,6 +203,7 @@ public abstract class BaseAggregationTestCase> @AfterClass public static void afterClass() throws Exception { + injector.getInstance(ClusterService.class).close(); terminate(injector.getInstance(ThreadPool.class)); injector = null; index = null; @@ -249,7 +253,7 @@ public abstract class BaseAggregationTestCase> testAgg.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { AggregatorBuilder prototype = (AggregatorBuilder) namedWriteableRegistry.getPrototype(AggregatorBuilder.class, - testAgg.getWriteableName()); + testAgg.getWriteableName()); AggregatorBuilder deserializedQuery = prototype.readFrom(in); assertEquals(deserializedQuery, testAgg); assertEquals(deserializedQuery.hashCode(), testAgg.hashCode()); @@ -291,7 +295,7 @@ public abstract class BaseAggregationTestCase> agg.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { AggregatorBuilder prototype = (AggregatorBuilder) namedWriteableRegistry.getPrototype(AggregatorBuilder.class, - agg.getWriteableName()); + agg.getWriteableName()); @SuppressWarnings("unchecked") AB secondAgg = (AB) prototype.readFrom(in); return secondAgg; @@ -309,7 +313,7 @@ public abstract class BaseAggregationTestCase> } } else { if (randomBoolean()) { - types = new String[] { MetaData.ALL }; + types = new String[]{MetaData.ALL}; } else { types = new String[0]; } @@ -320,13 +324,13 @@ public abstract class BaseAggregationTestCase> public String randomNumericField() { int randomInt = randomInt(3); switch (randomInt) { - case 0: - return DATE_FIELD_NAME; - case 1: - return DOUBLE_FIELD_NAME; - case 2: - default: - return INT_FIELD_NAME; + case 0: + return DATE_FIELD_NAME; + case 1: + return DOUBLE_FIELD_NAME; + case 2: + default: + return INT_FIELD_NAME; } } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java index 667cc9008a0f..cac2100ddded 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java @@ -20,10 +20,10 @@ package org.elasticsearch.search.aggregations; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Injector; @@ -61,7 +61,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPoolModule; import org.junit.AfterClass; @@ -74,6 +73,8 @@ import java.util.HashSet; import java.util.List; import java.util.Set; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; public abstract class BasePipelineAggregationTestCase extends ESTestCase { @@ -84,8 +85,8 @@ public abstract class BasePipelineAggregationTestCase, Response extends BulkIndexByScrollResponse, TA extends TransportAction> extends BaseRestHandler { protected final IndicesQueriesRegistry indicesQueriesRegistry; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java index 44d0d8fcb30b..f78d4e282d31 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java @@ -19,19 +19,11 @@ package org.elasticsearch.index.reindex; -import static org.elasticsearch.common.unit.TimeValue.parseTimeValue; -import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.inject.Inject; @@ -53,6 +45,14 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.suggest.Suggesters; +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.common.unit.TimeValue.parseTimeValue; +import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; + /** * Expose IndexBySearchRequest over rest. */ diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java index 2c2da6011ce0..8da8b2c904f3 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java @@ -19,15 +19,9 @@ package org.elasticsearch.index.reindex; -import static org.elasticsearch.index.reindex.AbstractBulkByScrollRequest.SIZE_ALL_MATCHES; -import static org.elasticsearch.index.reindex.RestReindexAction.parseCommon; -import static org.elasticsearch.rest.RestRequest.Method.POST; - -import java.util.Map; - import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; @@ -46,6 +40,12 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.suggest.Suggesters; +import java.util.Map; + +import static org.elasticsearch.index.reindex.AbstractBulkByScrollRequest.SIZE_ALL_MATCHES; +import static org.elasticsearch.index.reindex.RestReindexAction.parseCommon; +import static org.elasticsearch.rest.RestRequest.Method.POST; + public class RestUpdateByQueryAction extends AbstractBaseReindexRestHandler { @Inject diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 87fa15cd9ac7..482f101653d3 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -29,9 +29,9 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.uid.Versions; diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java index c52c2d8cbf59..5b973f67abc6 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java @@ -27,6 +27,7 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.TransportIndicesStatsAction; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; diff --git a/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java index 41750b5c25d9..4022366c1d0b 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java +++ b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java @@ -20,7 +20,7 @@ package org.elasticsearch.search; import org.elasticsearch.cache.recycler.PageCacheRecycler; -import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index f0df49cf3922..24a7360d9213 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -57,7 +57,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.AdminClient; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -69,6 +68,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; @@ -100,19 +100,19 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.MockEngineFactoryPlugin; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexWarmer; +import org.elasticsearch.index.MergePolicyConfig; +import org.elasticsearch.index.MergeSchedulerConfig; +import org.elasticsearch.index.MockEngineFactoryPlugin; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Loading; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; -import org.elasticsearch.index.MergePolicyConfig; -import org.elasticsearch.index.MergeSchedulerConfig; import org.elasticsearch.index.translog.Translog; -import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.index.IndexWarmer; import org.elasticsearch.indices.IndicesRequestCache; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.node.NodeMocksPlugin; import org.elasticsearch.plugins.Plugin; diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index a9d0f483e5aa..207593725db9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -34,7 +34,6 @@ import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -45,6 +44,7 @@ import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreaker; @@ -66,7 +66,6 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.Engine; diff --git a/test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java b/test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java deleted file mode 100644 index ad73a097c1e0..000000000000 --- a/test/framework/src/main/java/org/elasticsearch/test/cluster/NoopClusterService.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.test.cluster; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.ClusterStateTaskConfig; -import org.elasticsearch.cluster.ClusterStateTaskExecutor; -import org.elasticsearch.cluster.ClusterStateTaskListener; -import org.elasticsearch.cluster.ClusterStateUpdateTask; -import org.elasticsearch.cluster.LocalNodeMasterListener; -import org.elasticsearch.cluster.TimeoutClusterStateListener; -import org.elasticsearch.cluster.block.ClusterBlock; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.OperationRouting; -import org.elasticsearch.cluster.service.PendingClusterTask; -import org.elasticsearch.common.component.Lifecycle; -import org.elasticsearch.common.component.LifecycleListener; -import org.elasticsearch.common.transport.DummyTransportAddress; -import org.elasticsearch.common.unit.TimeValue; - -import java.util.List; - -public class NoopClusterService implements ClusterService { - - final ClusterState state; - - public NoopClusterService() { - this(ClusterState.builder(new ClusterName("noop")).build()); - } - - public NoopClusterService(ClusterState state) { - if (state.getNodes().size() == 0) { - state = ClusterState.builder(state).nodes( - DiscoveryNodes.builder() - .put(new DiscoveryNode("noop_id", DummyTransportAddress.INSTANCE, Version.CURRENT)) - .localNodeId("noop_id")).build(); - } - - assert state.getNodes().localNode() != null; - this.state = state; - - } - - @Override - public DiscoveryNode localNode() { - return state.getNodes().localNode(); - } - - @Override - public ClusterState state() { - return state; - } - - @Override - public void addInitialStateBlock(ClusterBlock block) throws IllegalStateException { - - } - - @Override - public void removeInitialStateBlock(ClusterBlock block) throws IllegalStateException { - - } - - @Override - public void removeInitialStateBlock(int blockId) throws IllegalStateException { - - } - - @Override - public OperationRouting operationRouting() { - return null; - } - - @Override - public void addFirst(ClusterStateListener listener) { - - } - - @Override - public void addLast(ClusterStateListener listener) { - - } - - @Override - public void add(ClusterStateListener listener) { - - } - - @Override - public void remove(ClusterStateListener listener) { - - } - - @Override - public void add(LocalNodeMasterListener listener) { - - } - - @Override - public void remove(LocalNodeMasterListener listener) { - - } - - @Override - public void add(TimeValue timeout, TimeoutClusterStateListener listener) { - - } - - @Override - public void submitStateUpdateTask(String source, ClusterStateUpdateTask updateTask) { - - } - - @Override - public void submitStateUpdateTask(String source, T task, ClusterStateTaskConfig config, ClusterStateTaskExecutor executor, ClusterStateTaskListener listener) { - - } - - @Override - public List pendingTasks() { - return null; - } - - @Override - public int numberOfPendingTasks() { - return 0; - } - - @Override - public TimeValue getMaxTaskWaitTime() { - return TimeValue.timeValueMillis(0); - } - - @Override - public Lifecycle.State lifecycleState() { - return null; - } - - @Override - public void addLifecycleListener(LifecycleListener listener) { - - } - - @Override - public void removeLifecycleListener(LifecycleListener listener) { - - } - - @Override - public ClusterService start() { - return null; - } - - @Override - public ClusterService stop() { - return null; - } - - @Override - public void close() { - - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java b/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java deleted file mode 100644 index ebae5cc99474..000000000000 --- a/test/framework/src/main/java/org/elasticsearch/test/cluster/TestClusterService.java +++ /dev/null @@ -1,301 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.test.cluster; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.ClusterStateTaskConfig; -import org.elasticsearch.cluster.ClusterStateTaskExecutor; -import org.elasticsearch.cluster.ClusterStateTaskListener; -import org.elasticsearch.cluster.ClusterStateUpdateTask; -import org.elasticsearch.cluster.LocalNodeMasterListener; -import org.elasticsearch.cluster.TimeoutClusterStateListener; -import org.elasticsearch.cluster.block.ClusterBlock; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.OperationRouting; -import org.elasticsearch.cluster.routing.allocation.decider.AwarenessAllocationDecider; -import org.elasticsearch.cluster.service.PendingClusterTask; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.component.Lifecycle; -import org.elasticsearch.common.component.LifecycleListener; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.util.concurrent.FutureUtils; -import org.elasticsearch.threadpool.ThreadPool; - -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; -import java.util.Queue; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.ScheduledFuture; - -/** a class that simulate simple cluster service features, like state storage and listeners */ -public class TestClusterService implements ClusterService { - - volatile ClusterState state; - private final List listeners = new CopyOnWriteArrayList<>(); - private final Queue onGoingTimeouts = ConcurrentCollections.newQueue(); - private final ThreadPool threadPool; - private final ESLogger logger = Loggers.getLogger(getClass(), Settings.EMPTY); - private final OperationRouting operationRouting = new OperationRouting(Settings.Builder.EMPTY_SETTINGS, new AwarenessAllocationDecider()); - - public TestClusterService() { - this(ClusterState.builder(new ClusterName("test")).build()); - } - - public TestClusterService(ThreadPool threadPool) { - this(ClusterState.builder(new ClusterName("test")).build(), threadPool); - } - - public TestClusterService(ClusterState state) { - this(state, null); - } - - public TestClusterService(ClusterState state, @Nullable ThreadPool threadPool) { - if (state.getNodes().size() == 0) { - state = ClusterState.builder(state).nodes( - DiscoveryNodes.builder() - .put(new DiscoveryNode("test_node", DummyTransportAddress.INSTANCE, Version.CURRENT)) - .localNodeId("test_node")).build(); - } - - assert state.getNodes().localNode() != null; - this.state = state; - this.threadPool = threadPool; - - } - - - /** set the current state and trigger any registered listeners about the change, mimicking an update task */ - synchronized public ClusterState setState(ClusterState state) { - assert state.getNodes().localNode() != null; - // make sure we have a version increment - state = ClusterState.builder(state).version(this.state.version() + 1).build(); - return setStateAndNotifyListeners(state); - } - - private ClusterState setStateAndNotifyListeners(ClusterState state) { - ClusterChangedEvent event = new ClusterChangedEvent("test", state, this.state); - this.state = state; - for (ClusterStateListener listener : listeners) { - listener.clusterChanged(event); - } - return state; - } - - /** set the current state and trigger any registered listeners about the change */ - public ClusterState setState(ClusterState.Builder state) { - return setState(state.build()); - } - - @Override - public DiscoveryNode localNode() { - return state.getNodes().localNode(); - } - - @Override - public ClusterState state() { - return state; - } - - @Override - public void addInitialStateBlock(ClusterBlock block) throws IllegalStateException { - throw new UnsupportedOperationException(); - - } - - @Override - public void removeInitialStateBlock(ClusterBlock block) throws IllegalStateException { - throw new UnsupportedOperationException(); - } - - @Override - public void removeInitialStateBlock(int blockId) throws IllegalStateException { - throw new UnsupportedOperationException(); - } - - @Override - public OperationRouting operationRouting() { - return operationRouting; - } - - @Override - public void addFirst(ClusterStateListener listener) { - listeners.add(0, listener); - } - - @Override - public void addLast(ClusterStateListener listener) { - listeners.add(listener); - } - - @Override - public void add(ClusterStateListener listener) { - listeners.add(listener); - } - - @Override - public void remove(ClusterStateListener listener) { - listeners.remove(listener); - for (Iterator it = onGoingTimeouts.iterator(); it.hasNext(); ) { - NotifyTimeout timeout = it.next(); - if (timeout.listener.equals(listener)) { - timeout.cancel(); - it.remove(); - } - } - } - - @Override - public void add(LocalNodeMasterListener listener) { - throw new UnsupportedOperationException(); - } - - @Override - public void remove(LocalNodeMasterListener listener) { - throw new UnsupportedOperationException(); - } - - @Override - public void add(final TimeValue timeout, final TimeoutClusterStateListener listener) { - if (threadPool == null) { - throw new UnsupportedOperationException("TestClusterService wasn't initialized with a thread pool"); - } - if (timeout != null) { - NotifyTimeout notifyTimeout = new NotifyTimeout(listener, timeout); - notifyTimeout.future = threadPool.schedule(timeout, ThreadPool.Names.GENERIC, notifyTimeout); - onGoingTimeouts.add(notifyTimeout); - } - listeners.add(listener); - listener.postAdded(); - } - - @Override - public void submitStateUpdateTask(String source, ClusterStateUpdateTask updateTask) { - submitStateUpdateTask(source, null, updateTask, updateTask, updateTask); - } - - @Override - synchronized public void submitStateUpdateTask(String source, T task, ClusterStateTaskConfig config, ClusterStateTaskExecutor executor, ClusterStateTaskListener listener) { - logger.debug("processing [{}]", source); - if (state().nodes().localNodeMaster() == false && executor.runOnlyOnMaster()) { - listener.onNoLongerMaster(source); - logger.debug("failed [{}], no longer master", source); - return; - } - ClusterStateTaskExecutor.BatchResult batchResult; - ClusterState previousClusterState = state; - try { - batchResult = executor.execute(previousClusterState, Arrays.asList(task)); - } catch (Exception e) { - batchResult = ClusterStateTaskExecutor.BatchResult.builder().failure(task, e).build(previousClusterState); - } - - batchResult.executionResults.get(task).handle( - () -> {}, - ex -> listener.onFailure(source, new ElasticsearchException("failed to process cluster state update task [" + source + "]", ex)) - ); - - setStateAndNotifyListeners(batchResult.resultingState); - listener.clusterStateProcessed(source, previousClusterState, batchResult.resultingState); - logger.debug("finished [{}]", source); - - } - - @Override - public TimeValue getMaxTaskWaitTime() { - throw new UnsupportedOperationException(); - } - - @Override - public List pendingTasks() { - throw new UnsupportedOperationException(); - - } - - @Override - public int numberOfPendingTasks() { - throw new UnsupportedOperationException(); - } - - @Override - public Lifecycle.State lifecycleState() { - throw new UnsupportedOperationException(); - } - - @Override - public void addLifecycleListener(LifecycleListener listener) { - throw new UnsupportedOperationException(); - } - - @Override - public void removeLifecycleListener(LifecycleListener listener) { - throw new UnsupportedOperationException(); - } - - @Override - public ClusterService start() throws ElasticsearchException { - throw new UnsupportedOperationException(); - } - - @Override - public ClusterService stop() throws ElasticsearchException { - throw new UnsupportedOperationException(); - } - - @Override - public void close() throws ElasticsearchException { - throw new UnsupportedOperationException(); - } - - class NotifyTimeout implements Runnable { - final TimeoutClusterStateListener listener; - final TimeValue timeout; - volatile ScheduledFuture future; - - NotifyTimeout(TimeoutClusterStateListener listener, TimeValue timeout) { - this.listener = listener; - this.timeout = timeout; - } - - public void cancel() { - FutureUtils.cancel(future); - } - - @Override - public void run() { - if (future != null && future.isCancelled()) { - return; - } - listener.onTimeout(this.timeout); - // note, we rely on the listener to remove itself in case of timeout if needed - } - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java index 881fa43ce47c..cbcb9766943f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.test.disruption; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.InternalTestCluster; diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java index b9c663686b11..be0b69a8e8b9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.test.disruption; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.InternalTestCluster; From 000e419795b021a7bcabb86a0f50950ffe57c835 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 21 Mar 2016 14:09:30 +0100 Subject: [PATCH 319/320] docs: fix link --- docs/java-api/query-dsl/percolator-query.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/java-api/query-dsl/percolator-query.asciidoc b/docs/java-api/query-dsl/percolator-query.asciidoc index 874c46c79860..04f3ba9cb8df 100644 --- a/docs/java-api/query-dsl/percolator-query.asciidoc +++ b/docs/java-api/query-dsl/percolator-query.asciidoc @@ -2,7 +2,7 @@ ==== Percolator query See: - * {ref}/query-percolator-query.html[Percolator Query] + * {ref}/query-dsl-percolator-query.html[Percolator Query] [source,java] -------------------------------------------------- From 4ac4f3c8bc25eb3ca6b20fbba1ec33a54e93dd99 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Mon, 21 Mar 2016 15:38:01 +0100 Subject: [PATCH 320/320] Build: Update ospackage gradle plugin The older version did not support signing of the packages. --- distribution/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distribution/build.gradle b/distribution/build.gradle index 6936f898d957..b9b2784a5b33 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -34,7 +34,7 @@ buildscript { } } dependencies { - classpath 'com.netflix.nebula:gradle-ospackage-plugin:3.1.0' + classpath 'com.netflix.nebula:gradle-ospackage-plugin:3.4.0' } }